ngram
listlengths
0
67.8k
[ "\"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script,", "# pylint: disable=W0621 import random import pytest from pycoin.serialize import b2h from pycoin.tx.TxOut", "txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script,", "b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script]", "def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0)", "script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash,", "segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address()", "a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list", "spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert", "bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash()", "(\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert", "utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2]", "b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout,", "segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs):", "bip49 @pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo)", "ServerInfo from nowallet import bip49 @pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def", "def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b =", "isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script):", "random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout", "ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture", "txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value,", "secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script()", "connectrum.svr_info import ServerInfo from nowallet import bip49 @pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\",", "= [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] ==", "test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs):", "bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\"", "import TxOut from pycoin.tx.Spendable import Spendable from connectrum.svr_info import ServerInfo from nowallet import", "random import pytest from pycoin.serialize import b2h from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable", "txout_list[1] == txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script):", "TxOut) assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return", "+ \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address", "spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] ==", "address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address", "ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\")", "TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script):", "spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert", "b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY", "spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"],", "script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash,", "0 @pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def", "== 0 @pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret)", "isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address =", "== (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str)", "Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script):", "assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def", "pycoin.serialize import b2h from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import Spendable from connectrum.svr_info", "assert isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash", "= [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] ==", "b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list =", "Spendable) assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs(): secret", "b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return", "txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script,", "test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\", "(\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return", "(b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script)))", "== txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert", "spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3]", "\\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address ==", "Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout,", "return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def", "assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert", "def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10)", "def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script", "= segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def", "return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script,", "test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script():", "== b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE", "script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address,", "False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\")", "STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script", "\"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\"", "spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo:", "assert isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script(): return", "assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address() assert isinstance(address, str) assert", "spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"),", "return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\",", "b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert", "return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return", "assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0,", "assert isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash", "isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0,", "spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout", "txout_list[2] == txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert", "spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0)", "= segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def", "== spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] ==", "spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable,", "@pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\",", "segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs):", "@pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def", "bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash()", "isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs():", "spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0]", "assert txout_list[1] == txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def", "@pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert", "== spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable", "random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script", "b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script,", "= Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\" assert", "def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script,", "isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash =", "pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import Spendable from connectrum.svr_info import ServerInfo from nowallet", "== (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes)", "spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout):", "assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert", "isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address() assert isinstance(address,", "txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\",", "TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script,", "from connectrum.svr_info import ServerInfo from nowallet import bip49 @pytest.fixture def server(): return ServerInfo(\"onion\",", "b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\"", "hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False)", "spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def", "b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\")", "assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert", "txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert txout_list[1] ==", "b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return", "0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script,", "server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\") ==", "txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script,", "(txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert txout_list[2] ==", "== b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return", "== spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable)", "spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\"", "assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert", "txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert txout_list[1]", "assert spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\")", "@pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture", "@pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b", "== 0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0)", "from pycoin.serialize import b2h from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import Spendable from", "assert txout_list[2] == txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut)", "0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture", "def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def", "spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"]))", "= txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script,", "from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import Spendable from connectrum.svr_info import ServerInfo from", "assert txout_list[0] == txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert", "\"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address() assert isinstance(address, str) assert address == \"bc1pqq2wtwkpv674h8mzqjcmg0anccsejutycllqmc65qs\"", "(utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] ==", "return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout,", "from nowallet import bip49 @pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server):", "txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert txout_list[2]", "b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index ==", "spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert", "spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture", "test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert spendable.tx_hash ==", "Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script):", "b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script", "[txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script", "return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return", "assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def", "txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script):", "0) assert isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture", "= segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address =", "(b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert", "== (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value,", "import bip49 @pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server,", "== (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str)", "def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert spendable.tx_hash", "TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0,", "txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) ==", "\\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash ==", "import Spendable from connectrum.svr_info import ServerInfo from nowallet import bip49 @pytest.fixture def server():", "def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value,", "return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script,", "assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script,", "== (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script():", "Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout,", "assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture def", "def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def", "assert isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture def", "Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index", "assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert", "pylint: disable=W0621 import random import pytest from pycoin.serialize import b2h from pycoin.tx.TxOut import", "def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list", "== txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert txout_list[3] ==", "b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\")", "txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script ==", "utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout", "ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345,", "@pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs):", "def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\")", "spendable_small_hex_large_vout assert spendable_list[2] == spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable =", "\\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash ==", "assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list)", "spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list =", "address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address() assert isinstance(address, str) assert address", "(b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert", "assert isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address() assert", "assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0", "return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def", "str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address()", "txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script():", "spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return", "def test_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.p2sh_p2wpkh_address() assert isinstance(address, str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def", "def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" +", "import ServerInfo from nowallet import bip49 @pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345)", "@pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture", "== txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script", "== txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value", "txout_list[0] == txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert txout_list[3]", "server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def", "b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a,", "b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo:", "def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list)", "HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes)", "nowallet import bip49 @pytest.fixture def server(): return ServerInfo(\"onion\", hostname=\"fdkhv2bb7hqel2e7.onion\", ports=12345) def test_serverinfo_class(server): assert", "TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script(): return TxOut(0, b\"\\xFF\") @pytest.fixture def txout_large_coin_large_script(): return TxOut(10,", "12345, False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10,", "txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script))", "txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0]", "txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0 assert", "+ \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash", "10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout]", "TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\")", "txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert", "@pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\",", "(a.coin_value, b2h(a.script)) == (b.coin_value, b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda", "b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout,", "0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script,", "b\"\\x00\", 0) assert isinstance(spendable, Spendable) assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index == 0", "return TxOut(10, b\"\\xFF\") def test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10,", "BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert", "def test_serverinfo_class(server): assert isinstance(server, ServerInfo) assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def", "def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" +", "= \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert", "def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\", 10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10)", "txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value ==", "test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\", "spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1] == spendable_small_hex_large_vout assert", "b2h from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import Spendable from connectrum.svr_info import ServerInfo", "txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture def txout_large_coin_small_script(): return TxOut(10, b\"\\x00\") @pytest.fixture def txout_small_coin_large_script():", "test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout): spendable_list = [spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda", "spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout assert spendable_list[1]", "b2h(b.script)) txout_list = [txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert", "10) @pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout,", "@pytest.fixture def spendable_large_hex_large_vout(txout_large_coin_large_script): return Spendable.from_tx_out(txout_large_coin_large_script, b\"\\xFF\", 10) def test_spendable_ordering(txout_large_coin_large_script, spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout):", "import b2h from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import Spendable from connectrum.svr_info import", "def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script == b\"\\x00\"", "script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\")", "b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\"", "script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\")", "assert isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs): address", "== \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address() assert isinstance(address, str) assert address ==", "[spendable_small_hex_small_vout, spendable_large_hex_small_vout, spendable_small_hex_large_vout, spendable_large_hex_large_vout] random.shuffle(spendable_list) spendable_list.sort(key=lambda utxo: (utxo.as_dict()[\"tx_hash_hex\"], utxo.as_dict()[\"tx_out_index\"])) assert spendable_list[0] == spendable_small_hex_small_vout", "disable=W0621 import random import pytest from pycoin.serialize import b2h from pycoin.tx.TxOut import TxOut", "test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script, TxOut) assert txout_small_coin_small_script.coin_value == 0 assert txout_small_coin_small_script.script == b\"\\x00\" @pytest.fixture", "import random import pytest from pycoin.serialize import b2h from pycoin.tx.TxOut import TxOut from", "spendable_small_hex_small_vout(txout_small_coin_small_script): return Spendable.from_tx_out(txout_small_coin_small_script, b\"\\x00\", 0) @pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture", "isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\ b\"\\xe3\\x15\\x99<\\x96\\x08\\xaea\\xac%\") def test_segwitkey_electrumx_spkhash(segwitbip32node_from_chbs): script_hash =", "str) assert address == \"38G7CQfoej3fZQbHHey7Z1XPUGpVpJv4em\" def test_bech32_segwitkey_address(segwitbip32node_from_chbs): address = segwitbip32node_from_chbs.bech32_p2wpkh_address() assert isinstance(address, str)", "script_hash = segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\")", "assert spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs(): secret =", "test_txout_ordering(txout_small_coin_small_script, txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script): a, b = txout_large_coin_large_script, TxOut(10, b\"\\xFF\") assert (a.coin_value, b2h(a.script))", "def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" +", "assert server.get_port(\"t\") == (\"fdkhv2bb7hqel2e7.onion\", 12345, False) @pytest.fixture def txout_small_coin_small_script(): return TxOut(0, b\"\\x00\") @pytest.fixture", "= segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script == (b\"\\x00\\x14\\xe5\\xba\\xc1f\\xbd[\\x9fb\\x04\" + \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def", "segwitbip32node_from_chbs.electrumx_script_hash() assert isinstance(script_hash, str) assert script_hash == (\"41d8dc340e750287f1ef920956e1f9ae\" + \\ \"8a724efa9bb3772352118fe26372be97\") def test_segwitkey_address(segwitbip32node_from_chbs):", "TxOut from pycoin.tx.Spendable import Spendable from connectrum.svr_info import ServerInfo from nowallet import bip49", "pytest from pycoin.serialize import b2h from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import Spendable", "Spendable from connectrum.svr_info import ServerInfo from nowallet import bip49 @pytest.fixture def server(): return", "+ \\ b\"\\xb1\\xb4?\\xb3\\xc6!\\x99qd\\xc7\\xfe\") def test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash", "import pytest from pycoin.serialize import b2h from pycoin.tx.TxOut import TxOut from pycoin.tx.Spendable import", "pycoin.tx.Spendable import Spendable from connectrum.svr_info import ServerInfo from nowallet import bip49 @pytest.fixture def", "txout_small_coin_small_script assert txout_list[1] == txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script", "txout_small_coin_large_script assert txout_list[2] == txout_large_coin_small_script assert txout_list[3] == txout_large_coin_large_script def test_txout(txout_small_coin_small_script): assert isinstance(txout_small_coin_small_script,", "txout_large_coin_small_script, txout_small_coin_large_script, txout_large_coin_large_script] random.shuffle(txout_list) txout_list.sort(key=lambda txo: (txo.coin_value, b2h(txo.script))) assert txout_list[0] == txout_small_coin_small_script assert", "segwitbip32node_from_chbs(): secret = \"CORRECT HORSE BATTERY STAPLE\".encode(\"utf-8\") return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script =", "return bip49.SegwitBIP32Node.from_master_secret(secret) def test_segwitkey_script(segwitbip32node_from_chbs): script = segwitbip32node_from_chbs.p2wpkh_script() assert isinstance(script, bytes) assert script ==", "test_segwitkey_script_hash(segwitbip32node_from_chbs): script_hash = segwitbip32node_from_chbs.p2wpkh_script_hash() assert isinstance(script_hash, bytes) assert script_hash == (b\"H\\x12\\xe21\\x90\\x00:\\xc2\\xd2\\xd7\" + \\", "from pycoin.tx.Spendable import Spendable from connectrum.svr_info import ServerInfo from nowallet import bip49 @pytest.fixture", "@pytest.fixture def spendable_large_hex_small_vout(txout_large_coin_small_script): return Spendable.from_tx_out(txout_large_coin_small_script, b\"\\xFF\", 0) @pytest.fixture def spendable_small_hex_large_vout(txout_small_coin_large_script): return Spendable.from_tx_out(txout_small_coin_large_script, b\"\\x00\",", "<reponame>hkrugersa/nowallet<filename>tests/test_bip69_bip49.py # pylint: disable=W0621 import random import pytest from pycoin.serialize import b2h from", "== spendable_large_hex_small_vout assert spendable_list[3] == spendable_large_hex_large_vout def test_spendable(spendable_small_hex_small_vout): spendable = Spendable.from_tx_out(TxOut(0, b\"\\x00\"), b\"\\x00\",", "spendable.tx_hash == b\"\\x00\" assert spendable.tx_out_index == 0 @pytest.fixture def segwitbip32node_from_chbs(): secret = \"CORRECT" ]
[ "train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229,", "oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224),", "import os import pytest import torch import torchvision from flower_classifier.datasets.csv import CSVDataset from", "TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456,", "OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset()", "@pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,", "] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader =", "oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def random_datamodule(): data_module =", "os import pytest import torch import torchvision from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers", "= torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224),", "oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ]", "@pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def random_datamodule():", "return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\")", "data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms", "torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset", "0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset):", "dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229,", "0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader", "] dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset,", "torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms)", "pytest import torch import torchvision from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset,", "from flower_classifier.datasets.random import RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset:", "from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule from tests.datasets import", "transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset", "torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return", "@pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406],", "= OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR)", "-> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(),", "dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset,", "torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def random_datamodule(): data_module = RandomDataModule(batch_size=32) return data_module", "= [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset =", "import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\")", "[ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename,", "0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset()", "train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR,", "tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(),", "OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False)", "return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\")", "def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [", "dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8,", "transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader", "RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms = [", "def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),", "def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def random_datamodule(): data_module", "= os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,", "import torchvision from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from", "[ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR,", "= CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False)", "[0.229, 0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset):", "torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return", "flower_classifier.datasets.random import RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms", "os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),", "download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return", "torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(),", "OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def", "dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def", "] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset:", "torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms) return", "data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR,", "import pytest import torch import torchvision from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import", "@pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule():", "= OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8,", "transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] data_module", "import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule from", "0.406], [0.229, 0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\")", "0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\")", "torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True,", "torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32,", "batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485,", "flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule", "oxford_dataset() -> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,", "dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [", "[0.229, 0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def", "return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406],", "target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406],", "torch import torchvision from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset", "flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule from tests.datasets import TEST_CACHE_DIR", "[ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR,", "OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename", "transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader", "torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485,", "def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms", "= torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def random_datamodule(): data_module = RandomDataModule(batch_size=32) return", "oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms =", "\"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ]", "split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456,", "0.406], [0.229, 0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def", "dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def random_datamodule(): data_module = RandomDataModule(batch_size=32)", "@pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename = os.path.join(TEST_CACHE_DIR, \"train_split.csv\") transforms =", "torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ]", "0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader", "0.456, 0.406], [0.229, 0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module", "dataset @pytest.fixture(scope=\"module\") def oxford_dataloader(oxford_dataset): dataloader = torch.utils.data.DataLoader(oxford_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def", "batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() -> torch.utils.data.Dataset: split_dataset(root_dir=TEST_CACHE_DIR, target_dir=TEST_CACHE_DIR) train_filename =", "0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset", "import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485,", "torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms)", "torchvision from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random", "from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224),", "shuffle=False) return dataloader @pytest.fixture(scope=\"module\") def oxford_datamodule(): transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456,", "[0.229, 0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def", "from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import", "CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule, split_dataset from flower_classifier.datasets.random import RandomDataModule from tests.datasets", "import RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() -> torch.utils.data.Dataset: transforms =", "split_dataset from flower_classifier.datasets.random import RandomDataModule from tests.datasets import TEST_CACHE_DIR @pytest.fixture(scope=\"module\") def oxford_dataset() ->", "0.225]), ] dataset = CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader =", "import torch import torchvision from flower_classifier.datasets.csv import CSVDataset from flower_classifier.datasets.oxford_flowers import OxfordFlowers102Dataset, OxfordFlowersDataModule,", "def oxford_dataset() -> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229,", "= [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] data_module =", "torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms)", "0.225]), ] data_module = OxfordFlowersDataModule(data_dir=TEST_CACHE_DIR, batch_size=32, train_transforms=transforms) return data_module @pytest.fixture(scope=\"module\") def oxford_csv_dataset() ->", "0.406], [0.229, 0.224, 0.225]), ] dataset = OxfordFlowers102Dataset(root_dir=TEST_CACHE_DIR, download=True, transforms=transforms) return dataset @pytest.fixture(scope=\"module\")", "return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return dataloader @pytest.fixture(scope=\"module\")", "CSVDataset(filename=train_filename, transforms=transforms) return dataset @pytest.fixture(scope=\"module\") def oxford_csv_dataloader(oxford_csv_dataset): dataloader = torch.utils.data.DataLoader(oxford_csv_dataset, batch_size=8, shuffle=False) return", "-> torch.utils.data.Dataset: transforms = [ torchvision.transforms.RandomResizedCrop(224), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])," ]
[ "conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item ) ) await", "session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None: await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id))", "[ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _(): bot = nonebot.get_bot() async", ") ) if resp[len(\"delete \") :] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail)", ")⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot =", "await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except:", "conn: values = await conn.fetch(\"select gid from mg where rss = true\") values", "0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else \"订阅\") ) session.finish( AutoReply(", "== \"group\": async with db.pool.acquire() as conn: values = await conn.fetch( \"select safe", "await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str): if", "not in NOBROADCAST, key in FULLTEXT, values, ), loop, ) @on_command(\"rss\", only_to_me=False) async", "key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) > 0 and \" \".join(ls).strip()", "def rss(session: CommandSession): if \"subs\" in session.state: async with db.pool.acquire() as conn: for", "\" + i[\"rss\"] for i in values]) ) ) else: loop = asyncio.get_event_loop()", "session.bot, nm, None, item, feedBack=session.event.group_id if session.event.detail_type != \"private\" else False, ), loop,", ") else: loop = asyncio.get_event_loop() for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id,", "if resp[len(\"delete \") :] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) > 0:", "in i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session: CommandSession): ls =", "as conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\")", "import Command, call_command from nonebot.message import unescape, escape import asyncio import asyncpg import", "as CQHttpError from nonebot.argparse import ArgumentParser import sys from nonebot.log import logger from", "[\"pprice\"] BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _(): bot", "getrss, (1, 1), route=rt, ) if resp and session.event.detail_type != \"private\": await session.send(", ") if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format(", "\"以下是 rss 指令支持的源\", [(i, j) for i, j in doc.items() if \"r18\" not", "len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None: await session.send(str(session.event.user_id)) result =", "await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \" + i[\"rss\"] for", "for key in doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls)", "== 0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \"", "\" \".join(ls).strip() != \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\"", "asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id if session.event.detail_type != \"private\" else", ") logger.error(\"some rss issue\", exc_info=True) elif \"route\" in session.state: for rt in session.state[\"ls\"]:", "\"group\" event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18", "nm, None, item, feedBack=session.event.group_id if session.event.detail_type != \"private\" else False, ), loop, )", "key in doc: if key in NOUPDATE or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe(", "= await conn.fetch(\"select gid from mg where rss = true\") values = [int(item[\"gid\"])", "message id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True) elif \"route\" in session.state: for", "\"rss\", current_arg=\"-d \" + ls, disable_interaction=True, ) if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\",", "bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as conn: values =", "[] for _, dl in session.state[\"ls\"]: resp = await conn.execute( \"delete from subs", "\"route\" in session.state: for rt in session.state[\"ls\"]: resp = await sendrss( session.event.user_id, session.bot,", "hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot = nonebot.get_bot() async with db.pool.acquire() as conn:", ") @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot = nonebot.get_bot() loop = asyncio.get_event_loop() async", "= argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None: await", "bot = nonebot.get_bot() async with db.pool.acquire() as conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP,", "for item in values: item = item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\"", "from .pork_price import pprice from .bh3 import bh3 from .hpoi import hpoi from", "!= None: session.state[\"subs\"] = argv.subs ls = argv.subs if argv.delete != None: session.state[\"del\"]", "for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id", "pprice from .bh3 import bh3 from .hpoi import hpoi from .xlOfficial import xl", "values[0][\"safe\"]: ls = [i for i in ls if \"r18\" not in i]", "sendrss, getrss, handlerss, AutoReply from .bcr import bcr from .mrfz import mrfz from", "or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key not in", ") if len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else", ") ) if len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != []", "\"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", }", "_, dl in session.state[\"ls\"]: resp = await conn.execute( \"delete from subs where qid", "为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True) elif \"route\" in session.state: for rt in", "session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item", "in session.state: async with db.pool.acquire() as conn: for _, item in session.state[\"ls\"]: try:", "into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot = nonebot.get_bot() loop", "minutes=20) async def __(): bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire()", "> 0 and values[0][\"safe\"]: ls = [i for i in ls if \"r18\"", "in FULLTEXT, values, ), loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER)", "from mg where rss = true\") values = [int(item[\"gid\"]) for item in values]", ") subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \"))", ") ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await", "async with db.pool.acquire() as conn: values = await conn.fetch( \"select safe from mg", "nonebot.plugin import perm from nonebot.command import Command, call_command from nonebot.message import unescape, escape", "from mg where gid = {}\".format(session.event.group_id) ) if len(values) > 0 and values[0][\"safe\"]:", "\"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j) for i, j in doc.items() if", "perm from nonebot.command import Command, call_command from nonebot.message import unescape, escape import asyncio", "\"del\" in session.state: async with db.pool.acquire() as conn: fail = [] success =", "time __plugin_name__ = \"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT", "route=rt, ) if resp and session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\")", "\"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id await call_command( session.bot,", "from db import db import cq from utils import doc import feedparser as", "session.state[\"list\"] = argv.list if argv.list: return if argv.subs != None: session.state[\"subs\"] = argv.subs", "return if argv.add != None: await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result)", "async def _(session: CommandSession): event = { \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\",", "\"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) > 0: await session.send( cq.at(session.event.user_id) + f\"这{'个'", "FULLTEXT = [\"pprice\"] BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def", "and \" \".join(ls).strip() != \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) +", "fail = [] success = [] for _, dl in session.state[\"ls\"]: resp =", "len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0: await session.send( cq.at(session.event.user_id) + f\"", "feedparser as fp import re from .utils import sendrss, getrss, handlerss, AutoReply from", "Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except", "ls = argv.subs if argv.delete != None: session.state[\"del\"] = argv.delete ls = argv.delete", "= argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add", "session.bot, session.event, \"rss\", current_arg=\"-d \" + ls, disable_interaction=True, ) if flg == False:", "as conn: values = await conn.fetch( \"select safe from mg where gid =", "key in FULLTEXT, values, ), loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True,", "+ \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \" + i[\"rss\"] for i in values])", "return if argv.subs != None: session.state[\"subs\"] = argv.subs ls = argv.subs if argv.delete", "async def bk(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: ls =", "async def rss(session: CommandSession): if \"subs\" in session.state: async with db.pool.acquire() as conn:", "\") :] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) > 0: await session.send(", "only_to_me=False, shell_like=True) async def subs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await", "parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\")", "i] for key in doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if", "= await call_command( session.bot, session.event, \"rss\", current_arg=\"-d \" + ls, disable_interaction=True, ) if", "Command, call_command from nonebot.message import unescape, escape import asyncio import asyncpg import nonebot", "def subs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event,", "pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot = nonebot.get_bot() async with db.pool.acquire()", "session.state[\"list\"]: async with db.pool.acquire() as conn: values = await conn.fetch( \"select * from", "订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP =", "gid = {}\".format(session.event.group_id) ) if len(values) > 0 and values[0][\"safe\"]: ls = [i", "AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j) for i, j in doc.items()", "only_to_me=False) async def rss(session: CommandSession): if \"subs\" in session.state: async with db.pool.acquire() as", "await conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item ) )", "False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \")", "subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的", ") if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session: CommandSession):", "except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some", "item in ls: await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async", "logger from db import db import cq from utils import doc import feedparser", "elif \"del\" in session.state: async with db.pool.acquire() as conn: fail = [] success", "feedBack=session.event.group_id if session.event.detail_type != \"private\" else False, ), loop, ) @rss.args_parser async def", "event[\"message_type\"] = \"group\" event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id await call_command( session.bot, session.event,", ".loli import loli from .pork_price import pprice from .bh3 import bh3 from .hpoi", "except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot = nonebot.get_bot() async", "\"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\")", "conn: values = await conn.fetch(\"select gid from mg where morningcall = true\") for", "if len(values) > 0 and values[0][\"safe\"]: ls = [i for i in ls", "in NOBROADCAST, key in FULLTEXT, values, ), loop, ) @on_command(\"rss\", only_to_me=False) async def", "\"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j) for i,", "await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with db.pool.acquire() as", "len(fail) > 0: await session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" )", "session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if", "hour=\"5\", minute=\"0\") async def _(): bot = nonebot.get_bot() async with db.pool.acquire() as conn:", "await conn.fetch(\"select gid from mg where rss = true\") values = [int(item[\"gid\"]) for", "escape import asyncio import asyncpg import nonebot from aiocqhttp.exceptions import Error as CQHttpError", "session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type !=", "== 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else \"订阅\") ) session.finish(", "cq.at(545870222) + \"添加订阅!\" ) ) if len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if", "_(session: CommandSession): event = { \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\",", "import db import cq from utils import doc import feedparser as fp import", "current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str): if \"pixiv\" in name: name =", ") except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot = nonebot.get_bot()", "CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-s", "@nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot = nonebot.get_bot() async with db.pool.acquire() as", "aiocqhttp.exceptions import Error as CQHttpError from nonebot.argparse import ArgumentParser import sys from nonebot.log", "doc.items() if \"r18\" not in i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async def", "await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message", "argv.subs ls = argv.subs if argv.delete != None: session.state[\"del\"] = argv.delete ls =", "session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None:", "rss(session: CommandSession): if \"subs\" in session.state: async with db.pool.acquire() as conn: for _,", "for i in ls if \"r18\" not in i] for key in doc:", "\"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP", "\"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type != \"private\": event[\"message_type\"] = \"group\"", "values] for key in doc: if key in NOUPDATE or \"pixiv\" in key:", "sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1, 1), route=rt, ) if resp and", "try: await conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item )", "f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0: await session.send( cq.at(session.event.user_id)", "\"sub_type\": \"friend\", } if session.event.detail_type != \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] = None", "AutoReply from .bcr import bcr from .mrfz import mrfz from .loli import loli", "in ls: await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def", "on_startup from nonebot.plugin import perm from nonebot.command import Command, call_command from nonebot.message import", "sys from nonebot.log import logger from db import db import cq from utils", "list(set(ls)) if session.event.detail_type == \"group\": async with db.pool.acquire() as conn: values = await", "session.state[\"rss\"] != [] else \"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\",", "import ArgumentParser import sys from nonebot.log import logger from db import db import", "!= \"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state: async", "session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else \"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\",", "+ ls, disable_interaction=True, ) if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async", "only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot() loop", "len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] +", "if session.is_first_run: parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss", "\"r18\" not in i] for key in doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key),", "== 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None: await session.send(str(session.event.user_id)) result = await", "handlerss( bot, key, gtfun(key), key not in NOBROADCAST, key in FULLTEXT, values, ),", "rt in session.state[\"ls\"]: resp = await sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1,", ") @rss.args_parser async def _(session: CommandSession): if session.is_first_run: parser = ArgumentParser(session=session) subparser =", "= nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as conn: values = await", "item in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No", "else: success.append(doc[dl]) if len(fail) > 0: await session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1", "subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的", "@on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession): if \"subs\" in session.state: async with db.pool.acquire()", "源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\"", "morningcall = true\") for item in values: item = item[\"gid\"] try: await bot.send_group_msg(", "session.state: for rt in session.state[\"ls\"]: resp = await sendrss( session.event.user_id, session.bot, \"自定义路由\", None,", "session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id", "!= []: session.state[\"rss\"] = argv.rss ls = argv.rss if argv.route != None: session.state[\"route\"]", "_(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: values = await conn.fetch(\"select", "= [] success = [] for _, dl in session.state[\"ls\"]: resp = await", "async with db.pool.acquire() as conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await", "add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if session.event.detail_type == \"group\": async with db.pool.acquire()", ".hpoi import hpoi from .xlOfficial import xl from .pixiv import pixiv import time", "if argv.subs != None: session.state[\"subs\"] = argv.subs ls = argv.subs if argv.delete !=", "!= None: session.state[\"del\"] = argv.delete ls = argv.delete if argv.rss != []: session.state[\"rss\"]", "values = await conn.fetch( \"select safe from mg where gid = {}\".format(session.event.group_id) )", "session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await", "for rt in session.state[\"ls\"]: resp = await sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss,", "+ i[\"rss\"] for i in values]) ) ) else: loop = asyncio.get_event_loop() for", "async def _(session: CommandSession): if session.is_first_run: parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\",", ") await conn.execute(\"\"\"delete from backup\"\"\") for item in ls: await conn.execute( f\"\"\"insert into", "from .utils import sendrss, getrss, handlerss, AutoReply from .bcr import bcr from .mrfz", "= await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if session.event.detail_type == \"group\": async", "> 0 and \" \".join(ls).strip() != \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) +", ") if len(success) > 0: await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif", "permission=perm.SUPERUSER) async def up(x): print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot() loop = asyncio.get_event_loop()", "safe from mg where gid = {}\".format(session.event.group_id) ) if len(values) > 0 and", "self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\") for item in ls: await conn.execute( f\"\"\"insert", "session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-s \" + ls,", "resp and session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\"", "at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession): event = {", "where gid = {}\".format(session.event.group_id) ) if len(values) > 0 and values[0][\"safe\"]: ls =", "if argv.list: return if argv.subs != None: session.state[\"subs\"] = argv.subs ls = argv.subs", "= true\") values = [int(item[\"gid\"]) for item in values] for key in doc:", "in values]) ) ) else: loop = asyncio.get_event_loop() for item, nm in session.state[\"ls\"]:", "= \"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"]", "if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]]", "await sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1, 1), route=rt, ) if resp", "= [int(item[\"gid\"]) for item in values] for key in doc: if key in", "loli from .pork_price import pprice from .bh3 import bh3 from .hpoi import hpoi", "session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) > 0 and \" \".join(ls).strip() != \"\": await", "asyncpg import nonebot from aiocqhttp.exceptions import Error as CQHttpError from nonebot.argparse import ArgumentParser", "= [\"pprice\"] BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _():", "len(values) > 0 and values[0][\"safe\"]: ls = [i for i in ls if", "\".join(ls).strip() != \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\" )", "!= [] else \"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i,", "ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-s \"", "mg where rss = true\") values = [int(item[\"gid\"]) for item in values] for", "in doc.items() if \"r18\" not in i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async", "= {}\".format(session.event.group_id) ) if len(values) > 0 and values[0][\"safe\"]: ls = [i for", "if len(fail) > 0: await session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\"", "if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session: CommandSession): ls", "CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-d", "\"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str): if \"pixiv\" in name: name", "session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg", ") await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\")", "sendrss( session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id if session.event.detail_type != \"private\" else False,", "@on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot()", "0 and \" \".join(ls).strip() != \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222)", "{} and rss = '{}'\".format( session.event.user_id, dl ) ) if resp[len(\"delete \") :]", "] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _(): bot = nonebot.get_bot() async with db.pool.acquire()", "not in i] for key in doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key))", "def bk(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: ls = await", "@on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg =", "for i, j in doc.items() if \"r18\" not in i], ) ) @on_command(\"订阅\",", "订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\",", "subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"]", "nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id if session.event.detail_type", "item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\",", "= argv.delete if argv.rss != []: session.state[\"rss\"] = argv.rss ls = argv.rss if", "session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if session.event.detail_type ==", "), loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session:", "only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession): event = { \"user_id\": session.event.user_id, \"message\":", "doc import feedparser as fp import re from .utils import sendrss, getrss, handlerss,", "session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state: async with db.pool.acquire() as", "{}\".format(session.event.group_id) ) if len(values) > 0 and values[0][\"safe\"]: ls = [i for i", "ls if \"r18\" not in i] for key in doc: if key in", "rss 指令支持的源\", [(i, j) for i, j in doc.items() if \"r18\" not in", ".bcr import bcr from .mrfz import mrfz from .loli import loli from .pork_price", "db.pool.acquire() as conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from", "import sendrss, getrss, handlerss, AutoReply from .bcr import bcr from .mrfz import mrfz", "async with db.pool.acquire() as conn: fail = [] success = [] for _,", "conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot =", "\"group\": async with db.pool.acquire() as conn: values = await conn.fetch( \"select safe from", "key, gtfun(key), key not in NOBROADCAST, key in FULLTEXT, values, ), loop, )", "call_command from nonebot.message import unescape, escape import asyncio import asyncpg import nonebot from", ".xlOfficial import xl from .pixiv import pixiv import time __plugin_name__ = \"rss 订阅\"", "subs where qid = {}\".format(session.event.user_id) ) if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\")", "= await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\") for item in", "conn.fetch( \"select safe from mg where gid = {}\".format(session.event.group_id) ) if len(values) >", "success.append(doc[dl]) if len(fail) > 0: await session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else", "pixiv import time __plugin_name__ = \"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST =", "argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"] = argv.list if argv.list: return", "* from subs where qid = {}\".format(session.event.user_id) ) if len(values) == 0: session.finish(\"貌似你没有订阅任何", "i in ls if \"r18\" not in i] for key in doc: if", "where morningcall = true\") for item in values: item = item[\"gid\"] try: await", "loop, ) @rss.args_parser async def _(session: CommandSession): if session.is_first_run: parser = ArgumentParser(session=session) subparser", "bk(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: ls = await bot.get_group_member_list(", "), loop, ) @on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession): if \"subs\" in session.state:", "fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) > 0: await session.send( cq.at(session.event.user_id) + f\"这{'个' if", "values, ), loop, ) @on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession): if \"subs\" in", "= argv.subs ls = argv.subs if argv.delete != None: session.state[\"del\"] = argv.delete ls", "rss = '{}'\".format( session.event.user_id, dl ) ) if resp[len(\"delete \") :] == \"0\":", "unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\",", "session.state[\"rss\"] = argv.rss ls = argv.rss if argv.route != None: session.state[\"route\"] = argv.route", ") elif \"del\" in session.state: async with db.pool.acquire() as conn: fail = []", "None: await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if", "from .mrfz import mrfz from .loli import loli from .pork_price import pprice from", "= argv.delete ls = argv.delete if argv.rss != []: session.state[\"rss\"] = argv.rss ls", "if \"subs\" in session.state: async with db.pool.acquire() as conn: for _, item in", "145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _(): bot = nonebot.get_bot() async with", "argv.list: return if argv.subs != None: session.state[\"subs\"] = argv.subs ls = argv.subs if", "in session.state[\"ls\"]: resp = await conn.execute( \"delete from subs where qid = {}", "f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with db.pool.acquire() as conn: values = await", "in session.state: for rt in session.state[\"ls\"]: resp = await sendrss( session.event.user_id, session.bot, \"自定义路由\",", "loop = asyncio.get_event_loop() async with db.pool.acquire() as conn: values = await conn.fetch(\"select gid", "session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id if session.event.detail_type != \"private\" else False, ),", "= true\") for item in values: item = item[\"gid\"] try: await bot.send_group_msg( group_id=int(item),", "if len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else \"订阅\")", "\"自定义路由\", None, getrss, (1, 1), route=rt, ) if resp and session.event.detail_type != \"private\":", "> 0: await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with", "call_command( session.bot, session.event, \"rss\", current_arg=\"-d \" + ls, disable_interaction=True, ) if flg ==", "\"\\n\".join([doc[i[\"rss\"]] + \" - \" + i[\"rss\"] for i in values]) ) )", "= { \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\":", "conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\") for", "await session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) >", "ls: await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __():", "j in doc.items() if \"r18\" not in i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True)", "session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-d \" + ls,", "resp = await conn.execute( \"delete from subs where qid = {} and rss", "if key in NOUPDATE or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key,", "conn: values = await conn.fetch( \"select safe from mg where gid = {}\".format(session.event.group_id)", "argv.rss if argv.route != None: session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"])", "if argv.add != None: await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls", "{time.strftime('%X')}\") bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as conn: values", "values: item = item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError:", "action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv", "= list(set(ls)) if session.event.detail_type == \"group\": async with db.pool.acquire() as conn: values =", "[]: session.state[\"rss\"] = argv.rss ls = argv.rss if argv.route != None: session.state[\"route\"] =", "conn.execute( \"delete from subs where qid = {} and rss = '{}'\".format( session.event.user_id,", "doc: if key in NOUPDATE or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot,", "from nonebot import on_command, CommandSession, on_startup from nonebot.plugin import perm from nonebot.command import", "1), route=rt, ) if resp and session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id) +", "await conn.fetch( \"select safe from mg where gid = {}\".format(session.event.group_id) ) if len(values)", "session.event.detail_type != \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id await", "await conn.execute(\"\"\"delete from backup\"\"\") for item in ls: await conn.execute( f\"\"\"insert into backup", "conn.fetch( \"select * from subs where qid = {}\".format(session.event.user_id) ) if len(values) ==", "item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError:", "values = await conn.fetch(\"select gid from mg where rss = true\") values =", "ArgumentParser import sys from nonebot.log import logger from db import db import cq", "import asyncio import asyncpg import nonebot from aiocqhttp.exceptions import Error as CQHttpError from", "\"delete from subs where qid = {} and rss = '{}'\".format( session.event.user_id, dl", "import loli from .pork_price import pprice from .bh3 import bh3 from .hpoi import", "{ \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\",", "None: session.state[\"subs\"] = argv.subs ls = argv.subs if argv.delete != None: session.state[\"del\"] =", "argv.delete != None: session.state[\"del\"] = argv.delete ls = argv.delete if argv.rss != []:", "resp[len(\"delete \") :] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) > 0: await", "session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with db.pool.acquire() as conn:", "if argv.route != None: session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) ==", "= argv.rss if argv.route != None: session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route if", "CommandSession, on_startup from nonebot.plugin import perm from nonebot.command import Command, call_command from nonebot.message", "@on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg =", "session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True)", "group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\") for item in ls: await conn.execute(", "if session.event.detail_type != \"private\" else False, ), loop, ) @rss.args_parser async def _(session:", "from .hpoi import hpoi from .xlOfficial import xl from .pixiv import pixiv import", "conn: fail = [] success = [] for _, dl in session.state[\"ls\"]: resp", "指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j) for i, j in doc.items() if \"r18\"", "for _, dl in session.state[\"ls\"]: resp = await conn.execute( \"delete from subs where", "in values] for key in doc: if key in NOUPDATE or \"pixiv\" in", "key in NOUPDATE or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key),", "logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\"", "session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state:", "minute=\"0\") async def bk(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: ls", "CQHttpError from nonebot.argparse import ArgumentParser import sys from nonebot.log import logger from db", "mrfz from .loli import loli from .pork_price import pprice from .bh3 import bh3", "\"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\",", ") if resp[len(\"delete \") :] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) >", "CommandSession): if session.is_first_run: parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的", "db.pool.acquire() as conn: values = await conn.fetch( \"select * from subs where qid", "getrss, handlerss, AutoReply from .bcr import bcr from .mrfz import mrfz from .loli", "if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None: await session.send(str(session.event.user_id)) result", "key not in NOBROADCAST, key in FULLTEXT, values, ), loop, ) print(f\"finished at", "loop = asyncio.get_event_loop() for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm,", "import feedparser as fp import re from .utils import sendrss, getrss, handlerss, AutoReply", "argv.delete if argv.rss != []: session.state[\"rss\"] = argv.rss ls = argv.rss if argv.route", "backup\"\"\") for item in ls: await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\",", "call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str): if \"pixiv\"", "\"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \" + i[\"rss\"] for i in values]) )", "= argv.list if argv.list: return if argv.subs != None: session.state[\"subs\"] = argv.subs ls", "dl in session.state[\"ls\"]: resp = await conn.execute( \"delete from subs where qid =", "values, ), loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def", "\"添加订阅!\" ) ) if len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] !=", "where qid = {} and rss = '{}'\".format( session.event.user_id, dl ) ) if", "asyncio.get_event_loop() for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None, item,", "session.event, \"rss\", current_arg=\"-s \" + ls, disable_interaction=True, ) if flg == False: session.finish(\"订阅失败\")", "False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started at {time.strftime('%X')}\") bot", "item in values: item = item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" )", "re from .utils import sendrss, getrss, handlerss, AutoReply from .bcr import bcr from", "NOBROADCAST, key in FULLTEXT, values, ), loop, ) @on_command(\"rss\", only_to_me=False) async def rss(session:", "db.pool.acquire() as conn: for _, item in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into", "\"rss\", current_arg=\"-s \" + ls, disable_interaction=True, ) if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\",", "'{}'\".format( session.event.user_id, dl ) ) if resp[len(\"delete \") :] == \"0\": fail.append(doc[dl]) else:", "loop, ) @on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession): if \"subs\" in session.state: async", "\"private\" else False, ), loop, ) @rss.args_parser async def _(session: CommandSession): if session.is_first_run:", "from nonebot.plugin import perm from nonebot.command import Command, call_command from nonebot.message import unescape,", "session.state: async with db.pool.acquire() as conn: for _, item in session.state[\"ls\"]: try: await", "+ ls, disable_interaction=True, ) if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER)", "session.bot, \"自定义路由\", None, getrss, (1, 1), route=rt, ) if resp and session.event.detail_type !=", "ls = argv.delete if argv.rss != []: session.state[\"rss\"] = argv.rss ls = argv.rss", "({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\")", "bcr from .mrfz import mrfz from .loli import loli from .pork_price import pprice", "else \"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j) for", "as fp import re from .utils import sendrss, getrss, handlerss, AutoReply from .bcr", "pixiv_day_male_r18\", ) def gtfun(name: str): if \"pixiv\" in name: name = \"pixiv\" return", "conn: values = await conn.fetch( \"select * from subs where qid = {}\".format(session.event.user_id)", "rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss", "len(success) > 0: await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async", "[] session.state[\"list\"] = argv.list if argv.list: return if argv.subs != None: session.state[\"subs\"] =", "async def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot,", "import cq from utils import doc import feedparser as fp import re from", "+ f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state: async with db.pool.acquire() as conn: fail", "0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None: await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(),", "values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await", "from subs where qid = {} and rss = '{}'\".format( session.event.user_id, dl )", "+ cq.at(545870222) + \"添加订阅!\" ) ) if len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\"", "in doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) > 0", "argv.rss != []: session.state[\"rss\"] = argv.rss ls = argv.rss if argv.route != None:", "nonebot.command import Command, call_command from nonebot.message import unescape, escape import asyncio import asyncpg", "= [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP = [ 145029700,", "\"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key not in NOBROADCAST,", "parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\",", "shell_like=True) async def subs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command(", "\" - \" + i[\"rss\"] for i in values]) ) ) else: loop", "= None event[\"group_id\"] = session.event.group_id await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\",", "where qid = {}\".format(session.event.user_id) ) if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\") await", "cq from utils import doc import feedparser as fp import re from .utils", "subs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\",", "nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\",", "= session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-s \" +", "async def up(x): print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot() loop = asyncio.get_event_loop() async", "import xl from .pixiv import pixiv import time __plugin_name__ = \"rss 订阅\" NOUPDATE", "= await sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1, 1), route=rt, ) if", "= {} and rss = '{}'\".format( session.event.user_id, dl ) ) if resp[len(\"delete \")", "await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async", "import Error as CQHttpError from nonebot.argparse import ArgumentParser import sys from nonebot.log import", "for item in ls: await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20)", "conn.fetch(\"select gid from mg where morningcall = true\") for item in values: item", "import nonebot from aiocqhttp.exceptions import Error as CQHttpError from nonebot.argparse import ArgumentParser import", "session.state: async with db.pool.acquire() as conn: fail = [] success = [] for", "FULLTEXT, values, ), loop, ) @on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession): if \"subs\"", "from .loli import loli from .pork_price import pprice from .bh3 import bh3 from", "session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type != \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] =", "current_arg=\"-s \" + ls, disable_interaction=True, ) if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False,", "argv.subs != None: session.state[\"subs\"] = argv.subs ls = argv.subs if argv.delete != None:", "bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def", "cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with db.pool.acquire() as conn: values", "unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\" ) ) if len(session.state[\"ls\"]) == 0:", "for _, item in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format(", "asyncio.get_event_loop() async with db.pool.acquire() as conn: values = await conn.fetch(\"select gid from mg", "\"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\" ) ) if", "\"r18\" not in i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session: CommandSession):", "\") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-s \" + ls, disable_interaction=True,", "cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0: await", "async def __(): bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as", "\"No Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True)", "+ f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with db.pool.acquire() as conn: values =", "await call_command( session.bot, session.event, \"rss\", current_arg=\"-s \" + ls, disable_interaction=True, ) if flg", "help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除", "loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession):", "j) for i, j in doc.items() if \"r18\" not in i], ) )", "session.bot, session.event, \"rss\", current_arg=\"-s \" + ls, disable_interaction=True, ) if flg == False:", "import sys from nonebot.log import logger from db import db import cq from", "elif \"route\" in session.state: for rt in session.state[\"ls\"]: resp = await sendrss( session.event.user_id,", "= parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"] = argv.list if argv.list: return if", "result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if session.event.detail_type == \"group\":", "\"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\",", "shell_like=True) async def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command(", "if argv.rss != []: session.state[\"rss\"] = argv.rss ls = argv.rss if argv.route !=", "f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state: async with db.pool.acquire() as conn: fail =", "from utils import doc import feedparser as fp import re from .utils import", "session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1, 1), route=rt, ) if resp and session.event.detail_type", "+ f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0: await session.send(", "\"select * from subs where qid = {}\".format(session.event.user_id) ) if len(values) == 0:", "{time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession): event = { \"user_id\":", ") elif session.state[\"list\"]: async with db.pool.acquire() as conn: values = await conn.fetch( \"select", "None: session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return", "CommandSession): if \"subs\" in session.state: async with db.pool.acquire() as conn: for _, item", "from backup\"\"\") for item in ls: await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" )", "fp import re from .utils import sendrss, getrss, handlerss, AutoReply from .bcr import", "i[\"rss\"] for i in values]) ) ) else: loop = asyncio.get_event_loop() for item,", "!= \"private\" else False, ), loop, ) @rss.args_parser async def _(session: CommandSession): if", "in values: item = item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except", "session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str): if \"pixiv\" in name:", "import on_command, CommandSession, on_startup from nonebot.plugin import perm from nonebot.command import Command, call_command", "session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True) elif \"route\" in", "= asyncio.get_event_loop() for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None,", "(1, 1), route=rt, ) if resp and session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id)", "with db.pool.acquire() as conn: values = await conn.fetch( \"select safe from mg where", "subs where qid = {} and rss = '{}'\".format( session.event.user_id, dl ) )", "len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else \"订阅\") )", "values = [int(item[\"gid\"]) for item in values] for key in doc: if key", "None, getrss, (1, 1), route=rt, ) if resp and session.event.detail_type != \"private\": await", "@on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession): event = { \"user_id\": session.event.user_id,", "\"subs\" in session.state: async with db.pool.acquire() as conn: for _, item in session.state[\"ls\"]:", "= await conn.fetch( \"select * from subs where qid = {}\".format(session.event.user_id) ) if", "help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\"", "= [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\")", "try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\")", "= \"group\" event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id await call_command( session.bot, session.event, \"rss\",", "None event[\"group_id\"] = session.event.group_id await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", )", "if session.event.detail_type == \"group\": async with db.pool.acquire() as conn: values = await conn.fetch(", "if len(ls) > 0 and \" \".join(ls).strip() != \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\"", "rss 源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \" +", "import bcr from .mrfz import mrfz from .loli import loli from .pork_price import", "if session.state[\"rss\"] != [] else \"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss", "else: loop = asyncio.get_event_loop() for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot,", ") if resp and session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") )", "ls.remove(key) if len(ls) > 0 and \" \".join(ls).strip() != \"\": await session.send( unescape(", "await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state: async with db.pool.acquire()", "0 and values[0][\"safe\"]: ls = [i for i in ls if \"r18\" not", "subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\",", "argv.route != None: session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) == 0:", "ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\",", "else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0: await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\"", "await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if session.event.detail_type", "elif session.state[\"list\"]: async with db.pool.acquire() as conn: values = await conn.fetch( \"select *", "in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id if session.event.detail_type !=", "with db.pool.acquire() as conn: values = await conn.fetch( \"select * from subs where", ") @on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession): if \"subs\" in session.state: async with", "= {}\".format(session.event.user_id) ) if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id)", "flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-s \" + ls, disable_interaction=True, )", "gtfun(key), key not in NOBROADCAST, key in FULLTEXT, values, ), loop, ) print(f\"finished", "qid = {} and rss = '{}'\".format( session.event.user_id, dl ) ) if resp[len(\"delete", "unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state: async with db.pool.acquire() as conn:", "= await conn.fetch( \"select safe from mg where gid = {}\".format(session.event.group_id) ) if", "), loop, ) @rss.args_parser async def _(session: CommandSession): if session.is_first_run: parser = ArgumentParser(session=session)", "await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True) elif \"route\"", "import pixiv import time __plugin_name__ = \"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST", "values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot = nonebot.get_bot() loop = asyncio.get_event_loop()", "session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j) for i, j in", "help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"] = argv.list", "\")) session.state[\"ls\"] = [] session.state[\"list\"] = argv.list if argv.list: return if argv.subs !=", "default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv =", "gtfun(key), key not in NOBROADCAST, key in FULLTEXT, values, ), loop, ) @on_command(\"rss\",", "gid from mg where morningcall = true\") for item in values: item =", "and session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in", "argv.rss ls = argv.rss if argv.route != None: session.state[\"route\"] = argv.route session.state[\"ls\"] =", "from .pixiv import pixiv import time __plugin_name__ = \"rss 订阅\" NOUPDATE = [\"loli\",", "success = [] for _, dl in session.state[\"ls\"]: resp = await conn.execute( \"delete", "from subs where qid = {}\".format(session.event.user_id) ) if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss", "item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id if", ") session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j) for i, j", "i, j in doc.items() if \"r18\" not in i], ) ) @on_command(\"订阅\", only_to_me=False,", "cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \" + i[\"rss\"] for i in", "in session.state: async with db.pool.acquire() as conn: fail = [] success = []", "as conn: values = await conn.fetch( \"select * from subs where qid =", "!= None: await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls))", "@nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _(): bot = nonebot.get_bot() async with db.pool.acquire() as", "源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"] = argv.list if argv.list:", "rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"] = argv.list if", "for key in doc: if key in NOUPDATE or \"pixiv\" in key: continue", "\"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type != \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"]", "= await conn.fetch(\"select gid from mg where morningcall = true\") for item in", "session.event.detail_type != \"private\" else False, ), loop, ) @rss.args_parser async def _(session: CommandSession):", "resp = await sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1, 1), route=rt, )", ".mrfz import mrfz from .loli import loli from .pork_price import pprice from .bh3", "def _(session: CommandSession): event = { \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\":", "} if session.event.detail_type != \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] = None event[\"group_id\"] =", "parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"]", "event[\"group_id\"] = session.event.group_id await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def", "values]) ) ) else: loop = asyncio.get_event_loop() for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe(", "__plugin_name__ = \"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT =", "rss = true\") values = [int(item[\"gid\"]) for item in values] for key in", "qid = {}\".format(session.event.user_id) ) if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send(", "backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot = nonebot.get_bot() loop =", "= ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\",", "import hpoi from .xlOfficial import xl from .pixiv import pixiv import time __plugin_name__", "async with db.pool.acquire() as conn: values = await conn.fetch(\"select gid from mg where", "Error as CQHttpError from nonebot.argparse import ArgumentParser import sys from nonebot.log import logger", "else False, ), loop, ) @rss.args_parser async def _(session: CommandSession): if session.is_first_run: parser", "= [i for i in ls if \"r18\" not in i] for key", "if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) > 0 and \"", "true\") for item in values: item = item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω<", "bh3 from .hpoi import hpoi from .xlOfficial import xl from .pixiv import pixiv", "async def subs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot,", "@nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with", "session.is_first_run: parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\")", "item in values] for key in doc: if key in NOUPDATE or \"pixiv\"", "item, feedBack=session.event.group_id if session.event.detail_type != \"private\" else False, ), loop, ) @rss.args_parser async", "in NOBROADCAST, key in FULLTEXT, values, ), loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\",", "!= None: session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\")", "await conn.fetch(\"select gid from mg where morningcall = true\") for item in values:", "item = item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass", "into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except", "db.pool.acquire() as conn: values = await conn.fetch(\"select gid from mg where morningcall =", "nonebot.get_bot() async with db.pool.acquire() as conn: values = await conn.fetch(\"select gid from mg", "\") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-d \" + ls, disable_interaction=True,", ") def gtfun(name: str): if \"pixiv\" in name: name = \"pixiv\" return getattr(sys.modules[__name__],", "await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\") for item in ls:", "key not in NOBROADCAST, key in FULLTEXT, values, ), loop, ) @on_command(\"rss\", only_to_me=False)", "hpoi from .xlOfficial import xl from .pixiv import pixiv import time __plugin_name__ =", "import mrfz from .loli import loli from .pork_price import pprice from .bh3 import", "= [] for _, dl in session.state[\"ls\"]: resp = await conn.execute( \"delete from", "help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\",", "{}\".format(session.event.user_id) ) if len(values) == 0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id) +", "subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument(", "argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add !=", "exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" )", "help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\")", "true\") values = [int(item[\"gid\"]) for item in values] for key in doc: if", "\"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else \"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是", "continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key not in NOBROADCAST, key in FULLTEXT,", "\"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] =", "0: await session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success)", "shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession): event = { \"user_id\": session.event.user_id, \"message\": session.event.message,", "= session.event.group_id await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name:", "from mg where morningcall = true\") for item in values: item = item[\"gid\"]", "NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\",", "print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as", "0: await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with db.pool.acquire()", ".utils import sendrss, getrss, handlerss, AutoReply from .bcr import bcr from .mrfz import", "session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str): if \"pixiv\" in", "async def _(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: values =", "conn.execute(\"\"\"delete from backup\"\"\") for item in ls: await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\"", "= argv.subs if argv.delete != None: session.state[\"del\"] = argv.delete ls = argv.delete if", "with db.pool.acquire() as conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete", "except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send(", "ls = list(set(ls)) if session.event.detail_type == \"group\": async with db.pool.acquire() as conn: values", "session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if", "nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as conn: values = await conn.fetch(\"select", "await conn.fetch( \"select * from subs where qid = {}\".format(session.event.user_id) ) if len(values)", "values = await conn.fetch(\"select gid from mg where morningcall = true\") for item", "f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot = nonebot.get_bot()", "= asyncio.get_event_loop() async with db.pool.acquire() as conn: values = await conn.fetch(\"select gid from", "session.state[\"subs\"] = argv.subs ls = argv.subs if argv.delete != None: session.state[\"del\"] = argv.delete", "CommandSession): event = { \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\":", "session.state[\"ls\"] = [] session.state[\"list\"] = argv.list if argv.list: return if argv.subs != None:", "nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\",", "session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss( session.event.user_id, session.bot, nm, None, item, feedBack=session.event.group_id if session.event.detail_type != \"private\"", "bot, key, gtfun(key), key not in NOBROADCAST, key in FULLTEXT, values, ), loop,", "for item in values] for key in doc: if key in NOUPDATE or", "as conn: values = await conn.fetch(\"select gid from mg where morningcall = true\")", "doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) > 0 and", "minute=\"0\") async def _(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: values", "in NOUPDATE or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key", "event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18", "\" + ls, disable_interaction=True, ) if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True,", "= parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss", "group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk():", "[(i, j) for i, j in doc.items() if \"r18\" not in i], )", "current_arg=\"-d \" + ls, disable_interaction=True, ) if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False,", "import perm from nonebot.command import Command, call_command from nonebot.message import unescape, escape import", "__(): bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as conn: values", "NOBROADCAST, key in FULLTEXT, values, ), loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False,", "nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\",", "in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key not in NOBROADCAST, key", "with db.pool.acquire() as conn: fail = [] success = [] for _, dl", "argv.list if argv.list: return if argv.subs != None: session.state[\"subs\"] = argv.subs ls =", "pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str): if \"pixiv\" in name: name = \"pixiv\"", "nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"] =", "db import cq from utils import doc import feedparser as fp import re", "db.pool.acquire() as conn: values = await conn.fetch( \"select safe from mg where gid", "\"friend\", } if session.event.detail_type != \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] = None event[\"group_id\"]", "nonebot.message import unescape, escape import asyncio import asyncpg import nonebot from aiocqhttp.exceptions import", "ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-d \"", "await conn.execute( f\"\"\"insert into backup values({item['user_id']},'{item['card']}','{item['role']}')\"\"\" ) @nonebot.scheduler.scheduled_job(\"interval\", minutes=20) async def __(): bot", ":] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) > 0: await session.send( cq.at(session.event.user_id)", "> 0: await session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if", "not in NOBROADCAST, key in FULLTEXT, values, ), loop, ) print(f\"finished at {time.strftime('%X')}\")", "import pprice from .bh3 import bh3 from .hpoi import hpoi from .xlOfficial import", "取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]: async with db.pool.acquire() as conn: values = await conn.fetch(", "nonebot.get_bot() async with db.pool.acquire() as conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 )", "ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\") for item", "\"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss", ") print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession): event", "mg where morningcall = true\") for item in values: item = item[\"gid\"] try:", "i in values]) ) ) else: loop = asyncio.get_event_loop() for item, nm in", "message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot", "only_to_me=False, shell_like=True) async def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await", "def _(): bot = nonebot.get_bot() async with db.pool.acquire() as conn: values = await", "import bh3 from .hpoi import hpoi from .xlOfficial import xl from .pixiv import", "with db.pool.acquire() as conn: for _, item in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert", "in doc: if key in NOUPDATE or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss(", "handlerss, AutoReply from .bcr import bcr from .mrfz import mrfz from .loli import", ".bh3 import bh3 from .hpoi import hpoi from .xlOfficial import xl from .pixiv", "argv.subs if argv.delete != None: session.state[\"del\"] = argv.delete ls = argv.delete if argv.rss", "where rss = true\") values = [int(item[\"gid\"]) for item in values] for key", "str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if session.event.detail_type == \"group\": async with db.pool.acquire() as", "except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True) elif", "flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started at", "flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-d \" + ls, disable_interaction=True, )", "key in FULLTEXT, values, ), loop, ) @on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession):", "from .xlOfficial import xl from .pixiv import pixiv import time __plugin_name__ = \"rss", "xl from .pixiv import pixiv import time __plugin_name__ = \"rss 订阅\" NOUPDATE =", "as conn: for _, item in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into subs", "if \"r18\" not in i] for key in doc: if key in ls[:]:", "指令支持的源\", [(i, j) for i, j in doc.items() if \"r18\" not in i],", "asyncio import asyncpg import nonebot from aiocqhttp.exceptions import Error as CQHttpError from nonebot.argparse", "id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True) elif \"route\" in session.state: for rt", "key)) ls.remove(key) if len(ls) > 0 and \" \".join(ls).strip() != \"\": await session.send(", "mg where gid = {}\".format(session.event.group_id) ) if len(values) > 0 and values[0][\"safe\"]: ls", "= nonebot.get_bot() async with db.pool.acquire() as conn: ls = await bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367", "asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some rss", "subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError:", "= await call_command( session.bot, session.event, \"rss\", current_arg=\"-s \" + ls, disable_interaction=True, ) if", "\"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif \"del\" in session.state: async with", "= item[\"gid\"] try: await bot.send_group_msg( group_id=int(item), message=f\"Ciallo~(∠・ω< )⌒★,早上好。\" ) except CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\",", "dl ) ) if resp[len(\"delete \") :] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if", "ls, disable_interaction=True, ) if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async", "= nonebot.get_bot() async with db.pool.acquire() as conn: values = await conn.fetch(\"select gid from", "gid from mg where rss = true\") values = [int(item[\"gid\"]) for item in", "await conn.execute( \"delete from subs where qid = {} and rss = '{}'\".format(", ") @on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg", "print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def _(session: CommandSession): event =", "= await conn.execute( \"delete from subs where qid = {} and rss =", "in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id, \"No Information\",", "== False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\"", "asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key not in NOBROADCAST, key in FULLTEXT, values,", ") if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x):", "len(ls) > 0 and \" \".join(ls).strip() != \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls))", "CQHttpError: pass @nonebot.scheduler.scheduled_job(\"cron\", hour=\"0,6,12,18\", minute=\"0\") async def bk(): bot = nonebot.get_bot() async with", "import doc import feedparser as fp import re from .utils import sendrss, getrss,", "\"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type != \"private\":", "NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP = [", "False, ), loop, ) @rss.args_parser async def _(session: CommandSession): if session.is_first_run: parser =", "!= \"\": await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\" ) )", "\"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type != \"private\": event[\"message_type\"] =", "= '{}'\".format( session.event.user_id, dl ) ) if resp[len(\"delete \") :] == \"0\": fail.append(doc[dl])", "argv.add != None: await session.send(str(session.event.user_id)) result = await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls =", "values = await conn.fetch( \"select * from subs where qid = {}\".format(session.event.user_id) )", "NOUPDATE or \"pixiv\" in key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key not", "源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \" + i[\"rss\"]", "= argv.rss ls = argv.rss if argv.route != None: session.state[\"route\"] = argv.route session.state[\"ls\"]", "- \" + i[\"rss\"] for i in values]) ) ) else: loop =", "BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _(): bot =", "key: continue asyncio.run_coroutine_threadsafe( handlerss( bot, key, gtfun(key), key not in NOBROADCAST, key in", "with db.pool.acquire() as conn: values = await conn.fetch(\"select gid from mg where morningcall", "nonebot.log import logger from db import db import cq from utils import doc", "if resp and session.event.detail_type != \"private\": await session.send( unescape(cq.at(session.event.user_id) + f\"「{rt}」的资讯已私信,请查收。\") ) elif", "rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False,", "[] success = [] for _, dl in session.state[\"ls\"]: resp = await conn.execute(", "+ \"添加订阅!\" ) ) if len(session.state[\"ls\"]) == 0: await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"]", "import time __plugin_name__ = \"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"]", "session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \"", "nonebot import on_command, CommandSession, on_startup from nonebot.plugin import perm from nonebot.command import Command,", "[] else \"订阅\") ) session.finish( AutoReply( \"Rss 指令帮助菜单\", \"以下是 rss 指令支持的源\", [(i, j)", "def __(): bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as conn:", "ls, disable_interaction=True, ) if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def", "await add_rss(argv.add.strip(), str(session.event.user_id)) session.finish(result) ls = list(set(ls)) if session.event.detail_type == \"group\": async with", "def gtfun(name: str): if \"pixiv\" in name: name = \"pixiv\" return getattr(sys.modules[__name__], name)", "\"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\")", "session.event, \"rss\", current_arg=\"-d \" + ls, disable_interaction=True, ) if flg == False: session.finish(\"取消订阅失败\")", "conn.fetch(\"select gid from mg where rss = true\") values = [int(item[\"gid\"]) for item", "0: session.finish(\"貌似你没有订阅任何 rss 源\") await session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" -", "ls = argv.rss if argv.route != None: session.state[\"route\"] = argv.route session.state[\"ls\"] = argv.route", "rss issue\", exc_info=True) elif \"route\" in session.state: for rt in session.state[\"ls\"]: resp =", "db import db import cq from utils import doc import feedparser as fp", "argv.delete ls = argv.delete if argv.rss != []: session.state[\"rss\"] = argv.rss ls =", "key in doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) >", "async with db.pool.acquire() as conn: for _, item in session.state[\"ls\"]: try: await conn.execute(", "from .bcr import bcr from .mrfz import mrfz from .loli import loli from", "if len(success) > 0: await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" ) elif session.state[\"list\"]:", "_(session: CommandSession): if session.is_first_run: parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\", nargs=\"+\",", "db.pool.acquire() as conn: values = await conn.fetch(\"select gid from mg where rss =", "async with db.pool.acquire() as conn: values = await conn.fetch( \"select * from subs", "f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\", exc_info=True) elif \"route\" in session.state:", "conn: for _, item in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into subs values", "def _(session: CommandSession): if session.is_first_run: parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group() subparser.add_argument(\"-s\", \"--subs\",", "\"--subs\", nargs=\"+\", help=\"订阅指定的 rss 源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\",", "session.finish(result) ls = list(set(ls)) if session.event.detail_type == \"group\": async with db.pool.acquire() as conn:", "[i for i in ls if \"r18\" not in i] for key in", "\".join(ls)) + cq.at(545870222) + \"添加订阅!\" ) ) if len(session.state[\"ls\"]) == 0: await session.send(", "on_command, CommandSession, on_startup from nonebot.plugin import perm from nonebot.command import Command, call_command from", "from aiocqhttp.exceptions import Error as CQHttpError from nonebot.argparse import ArgumentParser import sys from", "session.state[\"ls\"]: resp = await conn.execute( \"delete from subs where qid = {} and", "await session.send( \"本次资讯{}为空哦!\".format(\"查看\" if session.state[\"rss\"] != [] else \"订阅\") ) session.finish( AutoReply( \"Rss", "session.send( cq.at(session.event.user_id) + f\"这{'个' if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0:", "session.send( cq.at(session.event.user_id) + \"以下是你已订阅的源:\\n{}\".format( \"\\n\".join([doc[i[\"rss\"]] + \" - \" + i[\"rss\"] for i", ".pork_price import pprice from .bh3 import bh3 from .hpoi import hpoi from .xlOfficial", "in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) > 0 and \" \".join(ls).strip() !=", "asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\", exc_info=True) except asyncpg.exceptions.UniqueViolationError: await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位", "logger.error(\"some rss issue\", exc_info=True) elif \"route\" in session.state: for rt in session.state[\"ls\"]: resp", "_, item in session.state[\"ls\"]: try: await conn.execute( \"\"\"insert into subs values ({},'{}','{}')\"\"\".format( session.event.user_id,", "argv.route if len(session.state[\"ls\"]) == 0: session.finish(\"查询路由地址不能为空哦!\") return if argv.add != None: await session.send(str(session.event.user_id))", "\"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\" ) ) if len(session.state[\"ls\"]) == 0: await", "from nonebot.command import Command, call_command from nonebot.message import unescape, escape import asyncio import", "from nonebot.log import logger from db import db import cq from utils import", "session.event.user_id, \"No Information\", item ) ) await session.send(f\"「{doc[item]}」的资讯已添加订阅了!有新资讯发布时,会私信你哦!\") except asyncpg.exceptions.ForeignKeyViolationError: await session.send(f\"貌似系统并没有支持该订阅源的订阅!\") logger.error(\"no\",", "= [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async def _(): bot = nonebot.get_bot()", "if session.event.detail_type != \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id", "as conn: values = await conn.fetch(\"select gid from mg where rss = true\")", ".pixiv import pixiv import time __plugin_name__ = \"rss 订阅\" NOUPDATE = [\"loli\", \"hpoi\"]", "with db.pool.acquire() as conn: values = await conn.fetch(\"select gid from mg where rss", "session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\" ) ) if len(session.state[\"ls\"]) ==", "+ \" - \" + i[\"rss\"] for i in values]) ) ) else:", "nonebot.argparse import ArgumentParser import sys from nonebot.log import logger from db import db", "session.event.user_id, dl ) ) if resp[len(\"delete \") :] == \"0\": fail.append(doc[dl]) else: success.append(doc[dl])", "issue\", exc_info=True) elif \"route\" in session.state: for rt in session.state[\"ls\"]: resp = await", "up(x): print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire()", "@rss.args_parser async def _(session: CommandSession): if session.is_first_run: parser = ArgumentParser(session=session) subparser = parser.add_mutually_exclusive_group()", "as conn: fail = [] success = [] for _, dl in session.state[\"ls\"]:", "permission=perm.SUPERUSER) async def _(session: CommandSession): event = { \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\":", "utils import doc import feedparser as fp import re from .utils import sendrss,", "session.event.group_id await call_command( session.bot, session.event, \"rss\", current_arg=\"pixiv_day_r18 pixiv_week_r18 pixiv_day_male_r18\", ) def gtfun(name: str):", "ls = [i for i in ls if \"r18\" not in i] for", "session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started at {time.strftime('%X')}\") bot =", "[int(item[\"gid\"]) for item in values] for key in doc: if key in NOUPDATE", "== \"0\": fail.append(doc[dl]) else: success.append(doc[dl]) if len(fail) > 0: await session.send( cq.at(session.event.user_id) +", "not in i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session: CommandSession): ls", "shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot() loop =", "\"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type", "import re from .utils import sendrss, getrss, handlerss, AutoReply from .bcr import bcr", "exc_info=True) elif \"route\" in session.state: for rt in session.state[\"ls\"]: resp = await sendrss(", "session.event.detail_type == \"group\": async with db.pool.acquire() as conn: values = await conn.fetch( \"select", "call_command( session.bot, session.event, \"rss\", current_arg=\"-s \" + ls, disable_interaction=True, ) if flg ==", "from .bh3 import bh3 from .hpoi import hpoi from .xlOfficial import xl from", "== False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started at {time.strftime('%X')}\")", "import unescape, escape import asyncio import asyncpg import nonebot from aiocqhttp.exceptions import Error", "= session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event, \"rss\", current_arg=\"-d \" +", "[\"loli\", \"hpoi\"] NOBROADCAST = [\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP = [ 145029700, ]", "nonebot from aiocqhttp.exceptions import Error as CQHttpError from nonebot.argparse import ArgumentParser import sys", "session.state[\"ls\"]: resp = await sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1, 1), route=rt,", "if len(fail)==1 else '些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0: await session.send( cq.at(session.event.user_id) +", ") ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session: CommandSession): ls = session.current_arg_text.strip(\" \")", "disable_interaction=True, ) if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def", "from nonebot.message import unescape, escape import asyncio import asyncpg import nonebot from aiocqhttp.exceptions", "!= \"private\": event[\"message_type\"] = \"group\" event[\"sub_type\"] = None event[\"group_id\"] = session.event.group_id await call_command(", "'些'}源「{'、'.join(fail)}」不在你的订阅列表里面哦~\" ) if len(success) > 0: await session.send( cq.at(session.event.user_id) + f\" 取消订阅「{'、'.join(success)}」成功!可喜可贺,可喜可贺!\" )", "\"select safe from mg where gid = {}\".format(session.event.group_id) ) if len(values) > 0", "def up(x): print(f\"started at {time.strftime('%X')}\") bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with", "session.state[\"del\"] = argv.delete ls = argv.delete if argv.rss != []: session.state[\"rss\"] = argv.rss", "bot.get_group_member_list( group_id=bot.config.QGROUP, self_id=3418961367 ) await conn.execute(\"\"\"delete from backup\"\"\") for item in ls: await", "from nonebot.argparse import ArgumentParser import sys from nonebot.log import logger from db import", "\" + ls, disable_interaction=True, ) if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True)", "if \"r18\" not in i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session:", "None, item, feedBack=session.event.group_id if session.event.detail_type != \"private\" else False, ), loop, ) @rss.args_parser", "import logger from db import db import cq from utils import doc import", "and rss = '{}'\".format( session.event.user_id, dl ) ) if resp[len(\"delete \") :] ==", "import asyncpg import nonebot from aiocqhttp.exceptions import Error as CQHttpError from nonebot.argparse import", "subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" )", "\"--delete\", nargs=\"+\", help=\"删除 rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\",", "parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = [] session.state[\"list\"] = argv.list if argv.list: return if argv.subs", "= [] session.state[\"list\"] = argv.list if argv.list: return if argv.subs != None: session.state[\"subs\"]", ") ) else: loop = asyncio.get_event_loop() for item, nm in session.state[\"ls\"]: asyncio.run_coroutine_threadsafe( sendrss(", "FULLTEXT, values, ), loop, ) print(f\"finished at {time.strftime('%X')}\") @on_command(\"带礼包\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async", "and values[0][\"safe\"]: ls = [i for i in ls if \"r18\" not in", "unescape, escape import asyncio import asyncpg import nonebot from aiocqhttp.exceptions import Error as", "源\") subparser.add_argument(\"-r\", \"--route\", nargs=\"+\", help=\"获取自定路由的 rss 源的资讯\") subparser.add_argument(\"-d\", \"--delete\", nargs=\"+\", help=\"删除 rss 订阅\")", "rss 订阅\") subparser.add_argument( \"-l\", \"--list\", action=\"store_true\", default=False, help=\"列出已订阅的源\" ) subparser.add_argument(\"-a\", \"--add\", help=\"开通rss源\") parser.add_argument(\"rss\",", ") if len(values) > 0 and values[0][\"safe\"]: ls = [i for i in", "at {time.strftime('%X')}\") bot = nonebot.get_bot() loop = asyncio.get_event_loop() async with db.pool.acquire() as conn:", "None: session.state[\"del\"] = argv.delete ls = argv.delete if argv.rss != []: session.state[\"rss\"] =", "ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key) if len(ls) > 0 and \" \".join(ls).strip() != \"\":", "def unsubs(session: CommandSession): ls = session.current_arg_text.strip(\" \") flg = await call_command( session.bot, session.event,", "bot = nonebot.get_bot() async with db.pool.acquire() as conn: values = await conn.fetch(\"select gid", "in FULLTEXT, values, ), loop, ) @on_command(\"rss\", only_to_me=False) async def rss(session: CommandSession): if", "for i in values]) ) ) else: loop = asyncio.get_event_loop() for item, nm", "disable_interaction=True, ) if flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session:", "await call_command( session.bot, session.event, \"rss\", current_arg=\"-d \" + ls, disable_interaction=True, ) if flg", "flg == False: session.finish(\"订阅失败\") @on_command(\"取消订阅\", only_to_me=False, shell_like=True) async def unsubs(session: CommandSession): ls =", "if flg == False: session.finish(\"取消订阅失败\") @on_command(\"up\", only_to_me=False, shell_like=True, permission=perm.SUPERUSER) async def up(x): print(f\"started", "\"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message, \"sub_type\": \"friend\", } if session.event.detail_type != \"private\": event[\"message_type\"]", "db.pool.acquire() as conn: fail = [] success = [] for _, dl in", "help=\"开通rss源\") parser.add_argument(\"rss\", nargs=\"*\", help=\"获取已存在的 rss 源资讯\") argv = parser.parse_args(session.current_arg_text.strip().split(\" \")) session.state[\"ls\"] = []", "[\"gcores\"] FULLTEXT = [\"pprice\"] BROADCASTGROUP = [ 145029700, ] @nonebot.scheduler.scheduled_job(\"cron\", hour=\"5\", minute=\"0\") async", "if argv.delete != None: session.state[\"del\"] = argv.delete ls = argv.delete if argv.rss !=", "event = { \"user_id\": session.event.user_id, \"message\": session.event.message, \"post_type\": \"message\", \"message_type\": \"private\", \"raw_message\": session.event.raw_message,", "<reponame>Pzzzzz5142/animal-forest-QQ-group-bot from nonebot import on_command, CommandSession, on_startup from nonebot.plugin import perm from nonebot.command", "i], ) ) @on_command(\"订阅\", only_to_me=False, shell_like=True) async def subs(session: CommandSession): ls = session.current_arg_text.strip(\"", "await session.send( unescape( \"没有添加「{}」的订阅源!请联系\".format(\" \".join(ls)) + cq.at(545870222) + \"添加订阅!\" ) ) if len(session.state[\"ls\"])", "in ls if \"r18\" not in i] for key in doc: if key", "in i] for key in doc: if key in ls[:]: session.state[\"ls\"].append((gtfun(key), key)) ls.remove(key)", "await session.send(f\"你已经添加过「{doc[item]}」的资讯订阅啦!\") except: await session.send( f\"发生未知错误!错误详细信息已记录了在log中!\\n定位 message id 为:{session.event.message_id}\" ) logger.error(\"some rss issue\",", "in session.state[\"ls\"]: resp = await sendrss( session.event.user_id, session.bot, \"自定义路由\", None, getrss, (1, 1)," ]
[ "variable X is equal to 5. \"\"\" from math import exp, factorial def", "\"\"\" Objective In this challenge, we learn about Poisson distributions. Task A random", "In this challenge, we learn about Poisson distributions. Task A random variable, X,", "poisson(lam=2.5, k=5): \"\"\" Return the probability of X=k with possion distribution with mean", "exp, factorial def poisson(lam=2.5, k=5): \"\"\" Return the probability of X=k with possion", "def poisson(lam=2.5, k=5): \"\"\" Return the probability of X=k with possion distribution with", "the probability of X=k with possion distribution with mean lam \"\"\" return lam**k*exp(-lam)/factorial(k)", "k=5): \"\"\" Return the probability of X=k with possion distribution with mean lam", "2.5. Find the probability with which the random variable X is equal to", "5. \"\"\" from math import exp, factorial def poisson(lam=2.5, k=5): \"\"\" Return the", "distributions. Task A random variable, X, follows Poisson distribution with mean of 2.5.", "which the random variable X is equal to 5. \"\"\" from math import", "factorial def poisson(lam=2.5, k=5): \"\"\" Return the probability of X=k with possion distribution", "Objective In this challenge, we learn about Poisson distributions. Task A random variable,", "to 5. \"\"\" from math import exp, factorial def poisson(lam=2.5, k=5): \"\"\" Return", "X, follows Poisson distribution with mean of 2.5. Find the probability with which", "from math import exp, factorial def poisson(lam=2.5, k=5): \"\"\" Return the probability of", "math import exp, factorial def poisson(lam=2.5, k=5): \"\"\" Return the probability of X=k", "of X=k with possion distribution with mean lam \"\"\" return lam**k*exp(-lam)/factorial(k) print(round(poisson(), 3))", "import exp, factorial def poisson(lam=2.5, k=5): \"\"\" Return the probability of X=k with", "Task A random variable, X, follows Poisson distribution with mean of 2.5. Find", "of 2.5. Find the probability with which the random variable X is equal", "follows Poisson distribution with mean of 2.5. Find the probability with which the", "distribution with mean of 2.5. Find the probability with which the random variable", "variable, X, follows Poisson distribution with mean of 2.5. Find the probability with", "is equal to 5. \"\"\" from math import exp, factorial def poisson(lam=2.5, k=5):", "equal to 5. \"\"\" from math import exp, factorial def poisson(lam=2.5, k=5): \"\"\"", "random variable X is equal to 5. \"\"\" from math import exp, factorial", "the probability with which the random variable X is equal to 5. \"\"\"", "Poisson distributions. Task A random variable, X, follows Poisson distribution with mean of", "learn about Poisson distributions. Task A random variable, X, follows Poisson distribution with", "Return the probability of X=k with possion distribution with mean lam \"\"\" return", "mean of 2.5. Find the probability with which the random variable X is", "the random variable X is equal to 5. \"\"\" from math import exp,", "probability of X=k with possion distribution with mean lam \"\"\" return lam**k*exp(-lam)/factorial(k) print(round(poisson(),", "X is equal to 5. \"\"\" from math import exp, factorial def poisson(lam=2.5,", "random variable, X, follows Poisson distribution with mean of 2.5. Find the probability", "Find the probability with which the random variable X is equal to 5.", "\"\"\" Return the probability of X=k with possion distribution with mean lam \"\"\"", "this challenge, we learn about Poisson distributions. Task A random variable, X, follows", "Poisson distribution with mean of 2.5. Find the probability with which the random", "probability with which the random variable X is equal to 5. \"\"\" from", "\"\"\" from math import exp, factorial def poisson(lam=2.5, k=5): \"\"\" Return the probability", "with mean of 2.5. Find the probability with which the random variable X", "we learn about Poisson distributions. Task A random variable, X, follows Poisson distribution", "A random variable, X, follows Poisson distribution with mean of 2.5. Find the", "about Poisson distributions. Task A random variable, X, follows Poisson distribution with mean", "with which the random variable X is equal to 5. \"\"\" from math", "challenge, we learn about Poisson distributions. Task A random variable, X, follows Poisson" ]
[ ": pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk = vk def dispense(self, com): try: command", "instrate import history.history as history import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as images", ": aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' :", "as images from time import * import logging def defaultModule(com, vk): return {'message'", "images.getShakalized} self.vk = vk def dispense(self, com): try: command = com.args[0] if command", "aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized}", "aliases.getAlias(com.id, command) if newargs is None: return defaultModule(com, self.vk) args = (newargs +", "com = com._replace(args=args) command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk)", "{'message' : 'Nemo omnia potest scire'} class Dispenser: def __init__(self, vk): self.modules =", "history.history as history import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as images from time", "from time import * import logging def defaultModule(com, vk): return {'message' : 'Nemo", "return defaultModule(com, self.vk) args = (newargs + com.args[1:]) com = com._replace(args=args) command =", "import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as images from time import * import", "{'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating, 'история'", "import logging def defaultModule(com, vk): return {'message' : 'Nemo omnia potest scire'} class", "try: command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) newargs =", "command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id,", ": audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' : history.getHistory, 'опрос' :", "utf-8 -*- import weather.weather as weather import audio.audio as audio import aliases.aliases as", "import instrate.instrate as instrate import history.history as history import pollanalyse.pollanalyse as pollanalyse import", ": instrate.getRating, 'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk =", "logging def defaultModule(com, vk): return {'message' : 'Nemo omnia potest scire'} class Dispenser:", "history import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as images from time import *", "(newargs + com.args[1:]) com = com._replace(args=args) command = com.args[0] if command in self.modules:", "'Nemo omnia potest scire'} class Dispenser: def __init__(self, vk): self.modules = {'погода' :", "-*- coding: utf-8 -*- import weather.weather as weather import audio.audio as audio import", "coding: utf-8 -*- import weather.weather as weather import audio.audio as audio import aliases.aliases", "aliases.aliases as aliases import instrate.instrate as instrate import history.history as history import pollanalyse.pollanalyse", "self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk) except Exception as e: logging.exception(e) return", "in self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk) except Exception as e: logging.exception(e)", "= aliases.getAlias(com.id, command) if newargs is None: return defaultModule(com, self.vk) args = (newargs", "= com._replace(args=args) command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) return", "import history.history as history import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as images from", "self.vk) newargs = aliases.getAlias(com.id, command) if newargs is None: return defaultModule(com, self.vk) args", "'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать'", "weather.weather as weather import audio.audio as audio import aliases.aliases as aliases import instrate.instrate", "weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' : history.getHistory,", "as weather import audio.audio as audio import aliases.aliases as aliases import instrate.instrate as", "as pollanalyse import imageprocessing.imageprocessing as images from time import * import logging def", "return {'message' : 'Nemo omnia potest scire'} class Dispenser: def __init__(self, vk): self.modules", ": weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' :", "omnia potest scire'} class Dispenser: def __init__(self, vk): self.modules = {'погода' : weather.makeWeatherReport,", "'шакализировать' : images.getShakalized} self.vk = vk def dispense(self, com): try: command = com.args[0]", "is None: return defaultModule(com, self.vk) args = (newargs + com.args[1:]) com = com._replace(args=args)", "as instrate import history.history as history import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as", "'рейтинг' : instrate.getRating, 'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk", "self.modules: return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command) if newargs is None: return", "if command in self.modules: return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command) if newargs", "* import logging def defaultModule(com, vk): return {'message' : 'Nemo omnia potest scire'}", "vk): self.modules = {'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг'", "def dispense(self, com): try: command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com,", "import audio.audio as audio import aliases.aliases as aliases import instrate.instrate as instrate import", "command in self.modules: return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command) if newargs is", "defaultModule(com, vk): return {'message' : 'Nemo omnia potest scire'} class Dispenser: def __init__(self,", "<gh_stars>1-10 # -*- coding: utf-8 -*- import weather.weather as weather import audio.audio as", "audio import aliases.aliases as aliases import instrate.instrate as instrate import history.history as history", "import imageprocessing.imageprocessing as images from time import * import logging def defaultModule(com, vk):", "as aliases import instrate.instrate as instrate import history.history as history import pollanalyse.pollanalyse as", "com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command) if", "instrate.instrate as instrate import history.history as history import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing", ": 'Nemo omnia potest scire'} class Dispenser: def __init__(self, vk): self.modules = {'погода'", "self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command) if newargs is None: return defaultModule(com, self.vk)", "time import * import logging def defaultModule(com, vk): return {'message' : 'Nemo omnia", "def defaultModule(com, vk): return {'message' : 'Nemo omnia potest scire'} class Dispenser: def", "dispense(self, com): try: command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk)", "potest scire'} class Dispenser: def __init__(self, vk): self.modules = {'погода' : weather.makeWeatherReport, 'аудио'", "history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk = vk def dispense(self, com):", "import aliases.aliases as aliases import instrate.instrate as instrate import history.history as history import", "-*- import weather.weather as weather import audio.audio as audio import aliases.aliases as aliases", "= com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk) except", "self.vk) args = (newargs + com.args[1:]) com = com._replace(args=args) command = com.args[0] if", "images from time import * import logging def defaultModule(com, vk): return {'message' :", "com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk) except Exception", "'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk = vk def", "Dispenser: def __init__(self, vk): self.modules = {'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе'", "None: return defaultModule(com, self.vk) args = (newargs + com.args[1:]) com = com._replace(args=args) command", "weather import audio.audio as audio import aliases.aliases as aliases import instrate.instrate as instrate", "newargs = aliases.getAlias(com.id, command) if newargs is None: return defaultModule(com, self.vk) args =", "if command in self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk) except Exception as", "imageprocessing.imageprocessing as images from time import * import logging def defaultModule(com, vk): return", "args = (newargs + com.args[1:]) com = com._replace(args=args) command = com.args[0] if command", "= com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command)", "'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk = vk def dispense(self, com): try:", "in self.modules: return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command) if newargs is None:", "audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo,", ": history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk = vk def dispense(self,", "+ com.args[1:]) com = com._replace(args=args) command = com.args[0] if command in self.modules: return", "instrate.getRating, 'история' : history.getHistory, 'опрос' : pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk = vk", "if newargs is None: return defaultModule(com, self.vk) args = (newargs + com.args[1:]) com", "self.vk = vk def dispense(self, com): try: command = com.args[0] if command in", "= vk def dispense(self, com): try: command = com.args[0] if command in self.modules:", "pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as images from time import * import logging", "def __init__(self, vk): self.modules = {'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' :", "return self.modules[com.args[0]](com, self.vk) newargs = aliases.getAlias(com.id, command) if newargs is None: return defaultModule(com,", "vk): return {'message' : 'Nemo omnia potest scire'} class Dispenser: def __init__(self, vk):", "as history import pollanalyse.pollanalyse as pollanalyse import imageprocessing.imageprocessing as images from time import", "defaultModule(com, self.vk) args = (newargs + com.args[1:]) com = com._replace(args=args) command = com.args[0]", "# -*- coding: utf-8 -*- import weather.weather as weather import audio.audio as audio", "command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk)", "self.modules = {'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' :", "aliases import instrate.instrate as instrate import history.history as history import pollanalyse.pollanalyse as pollanalyse", "com._replace(args=args) command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com,", "'аудио' : audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating, 'история' : history.getHistory, 'опрос'", "vk def dispense(self, com): try: command = com.args[0] if command in self.modules: return", "pollanalyse.getPollInfo, 'шакализировать' : images.getShakalized} self.vk = vk def dispense(self, com): try: command =", "return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk) except Exception as e: logging.exception(e) return 'Error'", "class Dispenser: def __init__(self, vk): self.modules = {'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio,", ": images.getShakalized} self.vk = vk def dispense(self, com): try: command = com.args[0] if", "pollanalyse import imageprocessing.imageprocessing as images from time import * import logging def defaultModule(com,", "com): try: command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com, self.vk) newargs", "command in self.modules: return self.modules[com.args[0]](com, self.vk) return defaultModule(com, self.vk) except Exception as e:", "scire'} class Dispenser: def __init__(self, vk): self.modules = {'погода' : weather.makeWeatherReport, 'аудио' :", "as audio import aliases.aliases as aliases import instrate.instrate as instrate import history.history as", "import weather.weather as weather import audio.audio as audio import aliases.aliases as aliases import", "audio.audio as audio import aliases.aliases as aliases import instrate.instrate as instrate import history.history", "com.args[1:]) com = com._replace(args=args) command = com.args[0] if command in self.modules: return self.modules[com.args[0]](com,", "__init__(self, vk): self.modules = {'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' : aliases.setAlias,", "= {'погода' : weather.makeWeatherReport, 'аудио' : audio.getAudio, 'иначе' : aliases.setAlias, 'рейтинг' : instrate.getRating,", "= (newargs + com.args[1:]) com = com._replace(args=args) command = com.args[0] if command in", "newargs is None: return defaultModule(com, self.vk) args = (newargs + com.args[1:]) com =", "import * import logging def defaultModule(com, vk): return {'message' : 'Nemo omnia potest", "command) if newargs is None: return defaultModule(com, self.vk) args = (newargs + com.args[1:])" ]
[ "names as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict =", "\"\"\"Common utils for parsing and handling InferenceServices.\"\"\" import os from kubeflow.kubeflow.crud_backend import api,", "the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for accessing the logs", "for the web app's custom resource, replaces the variables and returns it as", "= {} for pod in pods: for revision in revisions_dict: if KNATIVE_REVISION_LABEL not", "service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic of this function", "component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys())", "names for the different isvc components. Return a dictionary with (endpoint, component) keys,", "to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"]", "svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic of this function according to", "status = svc[\"status\"] revisions_dict = {} for component in components: if \"components\" not", "revisions_dict = {} for component in components: if \"components\" not in status: log.info(\"Component", "component_pods_dict = {} for pod in pods: for revision in revisions_dict: if KNATIVE_REVISION_LABEL", "helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML =", "'%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] =", "(endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0: return {} pods", "= \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs):", "import os from kubeflow.kubeflow.crud_backend import api, helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL =", "and returns it as a python dict. kwargs: the parameters to be replaced", "svc) if len(revisions_dict.keys()) == 0: return {} pods = api.list_pods(namespace, auth=False).items component_pods_dict =", "keys, i.e. (\"default\", \"predictor\") and a list of pod names as values \"\"\"", "dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict = {} for component in components: if", "according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status =", "= curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No pods are found for inference service:", "if \"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys()) ==", "of pod names as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)}", "\"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\"", "replaces the variables and returns it as a python dict. kwargs: the parameters", "the different isvc components. Return a dictionary with (endpoint, component) keys, i.e. (\"default\",", "different isvc components. Return a dictionary with (endpoint, component) keys, i.e. (\"default\", \"predictor\")", "isvc components. Return a dictionary with (endpoint, component) keys, i.e. (\"default\", \"predictor\") and", "for inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic of", "component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No pods are found for inference", "not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component = revisions_dict[revision] curr_pod_names", "inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if component not in status[\"components\"]: log.info(\"Component '%s'", "if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component =", "continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, [])", "service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision]", "component if len(revisions_dict.keys()) == 0: log.info( \"No revisions found for the inference service's", "\"No revisions found for the inference service's components: %s\", svc[\"metadata\"][\"name\"], ) return revisions_dict", "revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0: return {} pods = api.list_pods(namespace,", "os from kubeflow.kubeflow.crud_backend import api, helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\"", "# https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict", "def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict = {}", "as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components,", "= get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0: return {} pods = api.list_pods(namespace, auth=False).items", "component, svc[\"metadata\"][\"name\"]) continue if component not in status[\"components\"]: log.info(\"Component '%s' not in inference", "load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict, with defaults from the local yaml. Reads", "i.e. (\"default\", \"predictor\") and a list of pod names as values \"\"\" namespace", "yaml. Reads the yaml for the web app's custom resource, replaces the variables", "https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict =", "a list of pod names as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName:", "get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0: return {} pods = api.list_pods(namespace, auth=False).items component_pods_dict", "%s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic of this function according", "os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict, with defaults", "return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic of this function according to #", "len(component_pods_dict.keys()) == 0: log.info(\"No pods are found for inference service: %s\", svc[\"metadata\"][\"name\"]) return", "a python dict. kwargs: the parameters to be replaced in the yaml \"\"\"", "import api, helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__))", "0: log.info( \"No revisions found for the inference service's components: %s\", svc[\"metadata\"][\"name\"], )", "Reads the yaml for the web app's custom resource, replaces the variables and", "from kubeflow.kubeflow.crud_backend import api, helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH", "\"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for accessing the logs of an", "of an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod names for the", "== 0: log.info( \"No revisions found for the inference service's components: %s\", svc[\"metadata\"][\"name\"],", "logs of an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod names for", "pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component]", "the web app's custom resource, replaces the variables and returns it as a", "in status: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if", "revision: continue component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names", "revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) == 0:", "if len(component_pods_dict.keys()) == 0: log.info(\"No pods are found for inference service: %s\", svc[\"metadata\"][\"name\"])", "revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys()) == 0: log.info( \"No revisions", "component}.\"\"\" status = svc[\"status\"] revisions_dict = {} for component in components: if \"components\"", "for parsing and handling InferenceServices.\"\"\" import os from kubeflow.kubeflow.crud_backend import api, helpers, logging", "'%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]:", "log.info(\"No pods are found for inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd):", "= svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) ==", "not in status: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue", "if len(revisions_dict.keys()) == 0: log.info( \"No revisions found for the inference service's components:", "a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict = {} for component in components:", "revision in revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision:", "this function according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\"", "pod in pods: for revision in revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue", "log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH,", "\"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict, with defaults from the", "component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No pods are", "the variables and returns it as a python dict. kwargs: the parameters to", "def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict, with defaults from the local yaml.", "in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]: revision =", "not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]: revision", "# helper functions for accessing the logs of an InferenceService def get_inference_service_pods(svc, components=[]):", "status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys()) == 0: log.info( \"No revisions found for", "pods are found for inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change", "component) keys, i.e. (\"default\", \"predictor\") and a list of pod names as values", "curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No pods are found for inference service: %s\",", "**kwargs) # helper functions for accessing the logs of an InferenceService def get_inference_service_pods(svc,", "inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic of this", "INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict,", "'%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if component not in", "helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for accessing the logs of an InferenceService def", "logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\")", "from the local yaml. Reads the yaml for the web app's custom resource,", "for accessing the logs of an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return the", "0: return {} pods = api.list_pods(namespace, auth=False).items component_pods_dict = {} for pod in", "\"components\" not in status: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"])", "len(revisions_dict.keys()) == 0: log.info( \"No revisions found for the inference service's components: %s\",", "utils for parsing and handling InferenceServices.\"\"\" import os from kubeflow.kubeflow.crud_backend import api, helpers,", "svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict = {} for component", "Return a dictionary with (endpoint, component) keys, i.e. (\"default\", \"predictor\") and a list", "FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return", "function according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status", "{} for component in components: if \"components\" not in status: log.info(\"Component '%s' not", "component)} revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0: return {} pods =", "if component not in status[\"components\"]: log.info(\"Component '%s' not in inference service '%s'\", component,", "dict, with defaults from the local yaml. Reads the yaml for the web", "defaults from the local yaml. Reads the yaml for the web app's custom", "svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if", "if len(revisions_dict.keys()) == 0: return {} pods = api.list_pods(namespace, auth=False).items component_pods_dict = {}", "replaced in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for accessing", "log.info( \"No revisions found for the inference service's components: %s\", svc[\"metadata\"][\"name\"], ) return", "revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component", "dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0: return {}", "service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if component not in status[\"components\"]: log.info(\"Component '%s' not", "found for inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic", "= component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No pods", "svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0:", "== 0: return {} pods = api.list_pods(namespace, auth=False).items component_pods_dict = {} for pod", "= {} for component in components: if \"components\" not in status: log.info(\"Component '%s'", "log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in", "yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for accessing the logs of", "Pod names for the different isvc components. Return a dictionary with (endpoint, component)", "InferenceService dict, with defaults from the local yaml. Reads the yaml for the", "in components: if \"components\" not in status: log.info(\"Component '%s' not in inference service", "yaml for the web app's custom resource, replaces the variables and returns it", "= os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict, with", "\"\"\" Return an InferenceService dict, with defaults from the local yaml. Reads the", "list of pod names as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint,", "InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod names for the different isvc", "\"\"\" Return the Pod names for the different isvc components. Return a dictionary", "returns it as a python dict. kwargs: the parameters to be replaced in", "svc[\"status\"] revisions_dict = {} for component in components: if \"components\" not in status:", "components: if \"components\" not in status: log.info(\"Component '%s' not in inference service '%s'\",", "= status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys()) == 0: log.info( \"No revisions found", "components=[]): \"\"\" Return the Pod names for the different isvc components. Return a", "\"predictor\") and a list of pod names as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"]", "pods = api.list_pods(namespace, auth=False).items component_pods_dict = {} for pod in pods: for revision", "values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc)", "KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component = revisions_dict[revision]", "revisions_dict[revision] = component if len(revisions_dict.keys()) == 0: log.info( \"No revisions found for the", "component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component", "parsing and handling InferenceServices.\"\"\" import os from kubeflow.kubeflow.crud_backend import api, helpers, logging log", "= api.list_pods(namespace, auth=False).items component_pods_dict = {} for pod in pods: for revision in", "not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if component not in status[\"components\"]:", "svc[\"metadata\"][\"name\"]) continue if component not in status[\"components\"]: log.info(\"Component '%s' not in inference service", "continue if \"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys())", "InferenceServices.\"\"\" import os from kubeflow.kubeflow.crud_backend import api, helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL", "app's custom resource, replaces the variables and returns it as a python dict.", "= os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an", "dictionary with (endpoint, component) keys, i.e. (\"default\", \"predictor\") and a list of pod", "the local yaml. Reads the yaml for the web app's custom resource, replaces", "dict. kwargs: the parameters to be replaced in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML,", "in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component = revisions_dict[revision] curr_pod_names =", "an InferenceService dict, with defaults from the local yaml. Reads the yaml for", "= revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) ==", "pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component,", "in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys()) == 0: log.info(", "components. Return a dictionary with (endpoint, component) keys, i.e. (\"default\", \"predictor\") and a", "kubeflow.kubeflow.crud_backend import api, helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH =", "for revision in revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] !=", "namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys())", "os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService", "the logs of an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod names", "FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict, with defaults from", "{} for pod in pods: for revision in revisions_dict: if KNATIVE_REVISION_LABEL not in", "\"\"\"Return a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict = {} for component in", "web app's custom resource, replaces the variables and returns it as a python", "return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for accessing the logs of an InferenceService", "the Pod names for the different isvc components. Return a dictionary with (endpoint,", "KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\", \"inference_service_template.yaml\") def", "for component in components: if \"components\" not in status: log.info(\"Component '%s' not in", "handling InferenceServices.\"\"\" import os from kubeflow.kubeflow.crud_backend import api, helpers, logging log = logging.getLogger(__name__)", "0: log.info(\"No pods are found for inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict #", "and handling InferenceServices.\"\"\" import os from kubeflow.kubeflow.crud_backend import api, helpers, logging log =", "continue component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if", "an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod names for the different", "\"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc) if", "in status[\"components\"]: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if", "status[\"components\"]: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\"", "the logic of this function according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return", "resource, replaces the variables and returns it as a python dict. kwargs: the", "logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join(", "as a python dict. kwargs: the parameters to be replaced in the yaml", "(\"default\", \"predictor\") and a list of pod names as values \"\"\" namespace =", "return {} pods = api.list_pods(namespace, auth=False).items component_pods_dict = {} for pod in pods:", "pods: for revision in revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL]", "[]) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No pods are found", "log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if component not", "kwargs: the parameters to be replaced in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs)", "to be replaced in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions", "component not in status[\"components\"]: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"])", "component in components: if \"components\" not in status: log.info(\"Component '%s' not in inference", "in pods: for revision in revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if", "the parameters to be replaced in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) #", "len(revisions_dict.keys()) == 0: return {} pods = api.list_pods(namespace, auth=False).items component_pods_dict = {} for", "custom resource, replaces the variables and returns it as a python dict. kwargs:", "in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for accessing the", "if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name)", "Return the Pod names for the different isvc components. Return a dictionary with", "== 0: log.info(\"No pods are found for inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict", "python dict. kwargs: the parameters to be replaced in the yaml \"\"\" return", "for the different isvc components. Return a dictionary with (endpoint, component) keys, i.e.", "auth=False).items component_pods_dict = {} for pod in pods: for revision in revisions_dict: if", "it as a python dict. kwargs: the parameters to be replaced in the", "for pod in pods: for revision in revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels:", "if \"components\" not in status: log.info(\"Component '%s' not in inference service '%s'\", component,", "be replaced in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper functions for", "accessing the logs of an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod", "api.list_pods(namespace, auth=False).items component_pods_dict = {} for pod in pods: for revision in revisions_dict:", "status: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if component", "local yaml. Reads the yaml for the web app's custom resource, replaces the", "in revisions_dict: if KNATIVE_REVISION_LABEL not in pod.metadata.labels: continue if pod.metadata.labels[KNATIVE_REVISION_LABEL] != revision: continue", "get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod names for the different isvc components. Return", "the yaml for the web app's custom resource, replaces the variables and returns", "= logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML = os.path.join( FILE_ABS_PATH, \"yaml\",", "and a list of pod names as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] #", "# dictionary{revisionName: (endpoint, component)} revisions_dict = get_components_revisions_dict(components, svc) if len(revisions_dict.keys()) == 0: return", "# FIXME(elikatsis,kimwnasptd): Change the logic of this function according to # https://github.com/arrikto/dev/issues/867 def", "pod names as values \"\"\" namespace = svc[\"metadata\"][\"namespace\"] # dictionary{revisionName: (endpoint, component)} revisions_dict", "def get_inference_service_pods(svc, components=[]): \"\"\" Return the Pod names for the different isvc components.", "\"inference_service_template.yaml\") def load_inference_service_template(**kwargs): \"\"\" Return an InferenceService dict, with defaults from the local", "!= revision: continue component = revisions_dict[revision] curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] =", "api, helpers, logging log = logging.getLogger(__name__) KNATIVE_REVISION_LABEL = \"serving.knative.dev/revision\" FILE_ABS_PATH = os.path.abspath(os.path.dirname(__file__)) INFERENCESERVICE_TEMPLATE_YAML", "Change the logic of this function according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc):", "logic of this function according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a", "inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if \"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"]", "(endpoint, component) keys, i.e. (\"default\", \"predictor\") and a list of pod names as", "variables and returns it as a python dict. kwargs: the parameters to be", "'%s'\", component, svc[\"metadata\"][\"name\"]) continue if component not in status[\"components\"]: log.info(\"Component '%s' not in", "are found for inference service: %s\", svc[\"metadata\"][\"name\"]) return component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the", "with defaults from the local yaml. Reads the yaml for the web app's", "a dictionary with (endpoint, component) keys, i.e. (\"default\", \"predictor\") and a list of", "curr_pod_names = component_pods_dict.get(component, []) curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No", "functions for accessing the logs of an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\" Return", "Return an InferenceService dict, with defaults from the local yaml. Reads the yaml", "= svc[\"status\"] revisions_dict = {} for component in components: if \"components\" not in", "continue if component not in status[\"components\"]: log.info(\"Component '%s' not in inference service '%s'\",", "status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys()) == 0: log.info( \"No", "{} pods = api.list_pods(namespace, auth=False).items component_pods_dict = {} for pod in pods: for", "FIXME(elikatsis,kimwnasptd): Change the logic of this function according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components,", "of this function according to # https://github.com/arrikto/dev/issues/867 def get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId:", "with (endpoint, component) keys, i.e. (\"default\", \"predictor\") and a list of pod names", "\"latestReadyRevision\" in status[\"components\"][component]: revision = status[\"components\"][component][\"latestReadyRevision\"] revisions_dict[revision] = component if len(revisions_dict.keys()) == 0:", "parameters to be replaced in the yaml \"\"\" return helpers.load_param_yaml(INFERENCESERVICE_TEMPLATE_YAML, **kwargs) # helper", "= component if len(revisions_dict.keys()) == 0: log.info( \"No revisions found for the inference", "component_pods_dict # FIXME(elikatsis,kimwnasptd): Change the logic of this function according to # https://github.com/arrikto/dev/issues/867", "in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue if component not in status[\"components\"]: log.info(\"Component", "not in status[\"components\"]: log.info(\"Component '%s' not in inference service '%s'\", component, svc[\"metadata\"][\"name\"]) continue", "curr_pod_names.append(pod.metadata.name) component_pods_dict[component] = curr_pod_names if len(component_pods_dict.keys()) == 0: log.info(\"No pods are found for", "helper functions for accessing the logs of an InferenceService def get_inference_service_pods(svc, components=[]): \"\"\"", "get_components_revisions_dict(components, svc): \"\"\"Return a dictionary{revisionId: component}.\"\"\" status = svc[\"status\"] revisions_dict = {} for" ]
[ "models class Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'), ] operations = [ migrations.AddField(", "class Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'), ] operations = [ migrations.AddField( model_name='orders',", "operations = [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders',", "), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'), ('qrpay', '线下支付订单')], default='ord',", "06:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'),", "# Generated by Django 2.1.7 on 2019-03-26 06:24 from django.db import migrations, models", "= [ ('trade', '0041_auto_20190313_1418'), ] operations = [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20,", "verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'), ('qrpay', '线下支付订单')],", "field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl',", "name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'),", "migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'), ('qrpay', '线下支付订单')], default='ord', max_length=128,", "2.1.7 on 2019-03-26 06:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "dependencies = [ ('trade', '0041_auto_20190313_1418'), ] operations = [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True,", "] operations = [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField(", "= [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type',", "('trade', '0041_auto_20190313_1418'), ] operations = [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'),", "Generated by Django 2.1.7 on 2019-03-26 06:24 from django.db import migrations, models class", "migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'),", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'), ] operations =", "model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'), ('qrpay', '线下支付订单')], default='ord', max_length=128, verbose_name='订单类型'),", "max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'),", "[ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord',", "name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'), ('qrpay', '线下支付订单')], default='ord', max_length=128, verbose_name='订单类型'), ),", "by Django 2.1.7 on 2019-03-26 06:24 from django.db import migrations, models class Migration(migrations.Migration):", "2019-03-26 06:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('trade',", "model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub',", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'), ]", "<reponame>lianxiaopang/camel-store-api<filename>apps/trade/migrations/0042_auto_20190326_1424.py # Generated by Django 2.1.7 on 2019-03-26 06:24 from django.db import migrations,", "'0041_auto_20190313_1418'), ] operations = [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True, verbose_name='支付终端号'), ),", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'), ] operations", "Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'), ] operations = [ migrations.AddField( model_name='orders', name='machine_code',", "[ ('trade', '0041_auto_20190313_1418'), ] operations = [ migrations.AddField( model_name='orders', name='machine_code', field=models.CharField(blank=True, max_length=20, null=True,", "null=True, verbose_name='支付终端号'), ), migrations.AlterField( model_name='orders', name='model_type', field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'), ('qrpay',", "field=models.CharField(choices=[('ord', '普通订单'), ('sub', '订阅订单'), ('repl', '积分换购订单'), ('qrpay', '线下支付订单')], default='ord', max_length=128, verbose_name='订单类型'), ), ]", "on 2019-03-26 06:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "migrations, models class Migration(migrations.Migration): dependencies = [ ('trade', '0041_auto_20190313_1418'), ] operations = [", "Django 2.1.7 on 2019-03-26 06:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies" ]
[ "or minor optn = input(\"\\nChoose an option: \") if optn == \"1\": scale", "# major scale if isinstance(scale, MajorScale): print(scale) menu = (\"1. Get parallel minor\\n\"", "import MajorScale from minorScale import MinorScale print(\"Hi welcome to my app.\\n\") note =", "\"3\": note = None elif optn == \"4\": break else: print(\"Invalid option. Try", "== \"1\": scale = MajorScale(scale,1) elif optn == \"2\": scale = MajorScale(scale,2) elif", "minorScale import MinorScale print(\"Hi welcome to my app.\\n\") note = None scale =", "print(\"Hi welcome to my app.\\n\") note = None scale = None while(True): #", "print(menu) optn = input(\"\\nChoose an option: \") if optn == \"1\": scale =", "(\"1. {0} major scale\\n\" \"2. {0} minor scale\\n\" \"3. Choose another note\\n\" \"4.", "note\\n\" \"4. Exit\").format(note) print(menu) # choose major or minor optn = input(\"\\nChoose an", "\"4\": break else: print(\"Invalid option. Try again.\\n\") # minor scale if isinstance(scale, MinorScale):", "\"1\": scale = MajorScale(scale,1) elif optn == \"2\": scale = MajorScale(scale,2) elif optn", "break else: print(\"Invalid option. Try again.\\n\") # major scale if isinstance(scale, MajorScale): print(scale)", "None scale = None elif optn == \"4\": break else: print(\"Invalid option. Try", "== \"4\": break else: print(\"Invalid option. Try again.\\n\") # minor scale if isinstance(scale,", "welcome to my app.\\n\") note = None scale = None while(True): # if", "chosen if scale is None: # choose a note if note is None:", "break else: print(\"Invalid option. Try again.\\n\") # minor scale if isinstance(scale, MinorScale): print(scale)", "MinorScale print(\"Hi welcome to my app.\\n\") note = None scale = None while(True):", "optn == \"3\": note = None elif optn == \"4\": break else: print(\"Invalid", "\"1\": scale = MajorScale(note) elif optn == \"2\": scale = MinorScale(note) elif optn", "note import Note from majorScale import MajorScale from minorScale import MinorScale print(\"Hi welcome", "== \"1\": scale = MinorScale(scale,1) elif optn == \"2\": scale = MinorScale(scale,2) elif", "major or minor optn = input(\"\\nChoose an option: \") if optn == \"1\":", "Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an option: \") if", "MinorScale(scale,2) elif optn == \"3\": note = None scale = None elif optn", "MajorScale(scale,1) elif optn == \"2\": scale = MajorScale(scale,2) elif optn == \"3\": note", "relative major\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an", "None: # choose a note if note is None: note = input(\"Choose a", "\") if optn == \"1\": scale = MinorScale(scale,1) elif optn == \"2\": scale", "scale if isinstance(scale, MajorScale): print(scale) menu = (\"1. Get parallel minor\\n\" \"2. Get", "optn == \"1\": scale = MajorScale(note) elif optn == \"2\": scale = MinorScale(note)", "elif optn == \"2\": scale = MinorScale(scale,2) elif optn == \"3\": note =", "\"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an option: \")", "menu = (\"1. Get parallel minor\\n\" \"2. Get relative minor\\n\" \"3. Choose another", "= input(\"\\nChoose an option: \") if optn == \"1\": scale = MinorScale(scale,1) elif", "\"4. Exit\").format(note) print(menu) # choose major or minor optn = input(\"\\nChoose an option:", "optn == \"2\": scale = MajorScale(scale,2) elif optn == \"3\": note = None", "choose major or minor optn = input(\"\\nChoose an option: \") if optn ==", "= (\"1. {0} major scale\\n\" \"2. {0} minor scale\\n\" \"3. Choose another note\\n\"", "scale = MinorScale(scale,2) elif optn == \"3\": note = None scale = None", "Get relative major\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose", "major\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an option:", "minor optn = input(\"\\nChoose an option: \") if optn == \"1\": scale =", "== \"3\": note = None scale = None elif optn == \"4\": break", "else: print(\"Invalid option. Try again.\\n\") # minor scale if isinstance(scale, MinorScale): print(scale) menu", "parallel major\\n\" \"2. Get relative major\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu)", "{0} major scale\\n\" \"2. {0} minor scale\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note)", "another note\\n\" \"4. Exit\").format(note) print(menu) # choose major or minor optn = input(\"\\nChoose", "import MinorScale print(\"Hi welcome to my app.\\n\") note = None scale = None", "# if no scale is chosen if scale is None: # choose a", "note is None: note = input(\"Choose a note: \") menu = (\"1. {0}", "again.\\n\") # minor scale if isinstance(scale, MinorScale): print(scale) menu = (\"1. Get parallel", "Exit\").format(note) print(menu) # choose major or minor optn = input(\"\\nChoose an option: \")", "an option: \") if optn == \"1\": scale = MajorScale(note) elif optn ==", "from note import Note from majorScale import MajorScale from minorScale import MinorScale print(\"Hi", "to my app.\\n\") note = None scale = None while(True): # if no", "Get relative minor\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose", "{0} minor scale\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) # choose major", "note if note is None: note = input(\"Choose a note: \") menu =", "= MajorScale(scale,1) elif optn == \"2\": scale = MajorScale(scale,2) elif optn == \"3\":", "optn = input(\"\\nChoose an option: \") if optn == \"1\": scale = MajorScale(scale,1)", "my app.\\n\") note = None scale = None while(True): # if no scale", "print(scale) menu = (\"1. Get parallel minor\\n\" \"2. Get relative minor\\n\" \"3. Choose", "option: \") if optn == \"1\": scale = MinorScale(scale,1) elif optn == \"2\":", "= MinorScale(scale,2) elif optn == \"3\": note = None scale = None elif", "optn == \"1\": scale = MinorScale(scale,1) elif optn == \"2\": scale = MinorScale(scale,2)", "scale if isinstance(scale, MinorScale): print(scale) menu = (\"1. Get parallel major\\n\" \"2. Get", "print(menu) # choose major or minor optn = input(\"\\nChoose an option: \") if", "major\\n\" \"2. Get relative major\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn", "Get parallel major\\n\" \"2. Get relative major\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note)", "else: print(\"Invalid option. Try again.\\n\") # major scale if isinstance(scale, MajorScale): print(scale) menu", "option. Try again.\\n\") # major scale if isinstance(scale, MajorScale): print(scale) menu = (\"1.", "= None while(True): # if no scale is chosen if scale is None:", "MajorScale): print(scale) menu = (\"1. Get parallel minor\\n\" \"2. Get relative minor\\n\" \"3.", "option. Try again.\\n\") # minor scale if isinstance(scale, MinorScale): print(scale) menu = (\"1.", "\") if optn == \"1\": scale = MajorScale(note) elif optn == \"2\": scale", "is chosen if scale is None: # choose a note if note is", "print(scale) menu = (\"1. Get parallel major\\n\" \"2. Get relative major\\n\" \"3. Choose", "elif optn == \"2\": scale = MinorScale(note) elif optn == \"3\": note =", "print(\"Invalid option. Try again.\\n\") # major scale if isinstance(scale, MajorScale): print(scale) menu =", "\"2. Get relative major\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn =", "optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") # minor scale if", "optn = input(\"\\nChoose an option: \") if optn == \"1\": scale = MinorScale(scale,1)", "major scale if isinstance(scale, MajorScale): print(scale) menu = (\"1. Get parallel minor\\n\" \"2.", "if note is None: note = input(\"Choose a note: \") menu = (\"1.", "None while(True): # if no scale is chosen if scale is None: #", "\"2\": scale = MinorScale(note) elif optn == \"3\": note = None elif optn", "an option: \") if optn == \"1\": scale = MinorScale(scale,1) elif optn ==", "\"1\": scale = MinorScale(scale,1) elif optn == \"2\": scale = MinorScale(scale,2) elif optn", "isinstance(scale, MinorScale): print(scale) menu = (\"1. Get parallel major\\n\" \"2. Get relative major\\n\"", "Choose another note\\n\" \"4. Exit\").format(note) print(menu) # choose major or minor optn =", "= MinorScale(scale,1) elif optn == \"2\": scale = MinorScale(scale,2) elif optn == \"3\":", "= None scale = None elif optn == \"4\": break else: print(\"Invalid option.", "from majorScale import MajorScale from minorScale import MinorScale print(\"Hi welcome to my app.\\n\")", "== \"1\": scale = MajorScale(note) elif optn == \"2\": scale = MinorScale(note) elif", "Exit\").format(note) print(menu) optn = input(\"\\nChoose an option: \") if optn == \"1\": scale", "optn == \"1\": scale = MajorScale(scale,1) elif optn == \"2\": scale = MajorScale(scale,2)", "= None elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") print(\"Bye!\")", "scale = MinorScale(scale,1) elif optn == \"2\": scale = MinorScale(scale,2) elif optn ==", "\"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) # choose major or minor optn", "relative minor\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an", "while(True): # if no scale is chosen if scale is None: # choose", "if optn == \"1\": scale = MajorScale(note) elif optn == \"2\": scale =", "scale\\n\" \"2. {0} minor scale\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) #", "note = None elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\")", "= input(\"\\nChoose an option: \") if optn == \"1\": scale = MajorScale(note) elif", "scale is chosen if scale is None: # choose a note if note", "minor scale\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) # choose major or", "elif optn == \"3\": note = None elif optn == \"4\": break else:", "Get parallel minor\\n\" \"2. Get relative minor\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note)", "no scale is chosen if scale is None: # choose a note if", "MajorScale from minorScale import MinorScale print(\"Hi welcome to my app.\\n\") note = None", "Try again.\\n\") # minor scale if isinstance(scale, MinorScale): print(scale) menu = (\"1. Get", "\"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an option: \") if optn == \"1\":", "MinorScale(note) elif optn == \"3\": note = None elif optn == \"4\": break", "note = input(\"Choose a note: \") menu = (\"1. {0} major scale\\n\" \"2.", "== \"2\": scale = MajorScale(scale,2) elif optn == \"3\": note = None scale", "scale\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) # choose major or minor", "# choose a note if note is None: note = input(\"Choose a note:", "scale = None elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\")", "\"2. Get relative minor\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn =", "minor scale if isinstance(scale, MinorScale): print(scale) menu = (\"1. Get parallel major\\n\" \"2.", "input(\"Choose a note: \") menu = (\"1. {0} major scale\\n\" \"2. {0} minor", "\"3\": note = None scale = None elif optn == \"4\": break else:", "= input(\"\\nChoose an option: \") if optn == \"1\": scale = MajorScale(scale,1) elif", "\") if optn == \"1\": scale = MajorScale(scale,1) elif optn == \"2\": scale", "= (\"1. Get parallel minor\\n\" \"2. Get relative minor\\n\" \"3. Choose another note\\n\"", "note: \") menu = (\"1. {0} major scale\\n\" \"2. {0} minor scale\\n\" \"3.", "option: \") if optn == \"1\": scale = MajorScale(scale,1) elif optn == \"2\":", "MinorScale(scale,1) elif optn == \"2\": scale = MinorScale(scale,2) elif optn == \"3\": note", "if isinstance(scale, MinorScale): print(scale) menu = (\"1. Get parallel major\\n\" \"2. Get relative", "scale = None while(True): # if no scale is chosen if scale is", "input(\"\\nChoose an option: \") if optn == \"1\": scale = MajorScale(scale,1) elif optn", "= MajorScale(scale,2) elif optn == \"3\": note = None scale = None elif", "MajorScale(scale,2) elif optn == \"3\": note = None scale = None elif optn", "an option: \") if optn == \"1\": scale = MajorScale(scale,1) elif optn ==", "None scale = None while(True): # if no scale is chosen if scale", "\"2\": scale = MajorScale(scale,2) elif optn == \"3\": note = None scale =", "MinorScale): print(scale) menu = (\"1. Get parallel major\\n\" \"2. Get relative major\\n\" \"3.", "= input(\"Choose a note: \") menu = (\"1. {0} major scale\\n\" \"2. {0}", "= None elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") #", "scale = MajorScale(scale,2) elif optn == \"3\": note = None scale = None", "elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") # minor scale", "None elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") # major", "scale is None: # choose a note if note is None: note =", "majorScale import MajorScale from minorScale import MinorScale print(\"Hi welcome to my app.\\n\") note", "a note: \") menu = (\"1. {0} major scale\\n\" \"2. {0} minor scale\\n\"", "if optn == \"1\": scale = MajorScale(scale,1) elif optn == \"2\": scale =", "if isinstance(scale, MajorScale): print(scale) menu = (\"1. Get parallel minor\\n\" \"2. Get relative", "= MajorScale(note) elif optn == \"2\": scale = MinorScale(note) elif optn == \"3\":", "== \"4\": break else: print(\"Invalid option. Try again.\\n\") # major scale if isinstance(scale,", "if no scale is chosen if scale is None: # choose a note", "(\"1. Get parallel minor\\n\" \"2. Get relative minor\\n\" \"3. Choose another note\\n\" \"4.", "note = None scale = None while(True): # if no scale is chosen", "parallel minor\\n\" \"2. Get relative minor\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu)", "\"4\": break else: print(\"Invalid option. Try again.\\n\") # major scale if isinstance(scale, MajorScale):", "app.\\n\") note = None scale = None while(True): # if no scale is", "elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") # major scale", "None elif optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") # minor", "isinstance(scale, MajorScale): print(scale) menu = (\"1. Get parallel minor\\n\" \"2. Get relative minor\\n\"", "if optn == \"1\": scale = MinorScale(scale,1) elif optn == \"2\": scale =", "== \"2\": scale = MinorScale(note) elif optn == \"3\": note = None elif", "\"2\": scale = MinorScale(scale,2) elif optn == \"3\": note = None scale =", "elif optn == \"2\": scale = MajorScale(scale,2) elif optn == \"3\": note =", "elif optn == \"3\": note = None scale = None elif optn ==", "optn == \"4\": break else: print(\"Invalid option. Try again.\\n\") # major scale if", "print(\"Invalid option. Try again.\\n\") # minor scale if isinstance(scale, MinorScale): print(scale) menu =", "\") menu = (\"1. {0} major scale\\n\" \"2. {0} minor scale\\n\" \"3. Choose", "major scale\\n\" \"2. {0} minor scale\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu)", "= MinorScale(note) elif optn == \"3\": note = None elif optn == \"4\":", "Note from majorScale import MajorScale from minorScale import MinorScale print(\"Hi welcome to my", "None: note = input(\"Choose a note: \") menu = (\"1. {0} major scale\\n\"", "scale = MajorScale(note) elif optn == \"2\": scale = MinorScale(note) elif optn ==", "= (\"1. Get parallel major\\n\" \"2. Get relative major\\n\" \"3. Choose another note\\n\"", "(\"1. Get parallel major\\n\" \"2. Get relative major\\n\" \"3. Choose another note\\n\" \"4.", "\"2. {0} minor scale\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) # choose", "= None scale = None while(True): # if no scale is chosen if", "input(\"\\nChoose an option: \") if optn == \"1\": scale = MajorScale(note) elif optn", "note = None scale = None elif optn == \"4\": break else: print(\"Invalid", "scale = MajorScale(scale,1) elif optn == \"2\": scale = MajorScale(scale,2) elif optn ==", "another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an option: \") if optn", "note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an option: \") if optn ==", "optn = input(\"\\nChoose an option: \") if optn == \"1\": scale = MajorScale(note)", "menu = (\"1. Get parallel major\\n\" \"2. Get relative major\\n\" \"3. Choose another", "MajorScale(note) elif optn == \"2\": scale = MinorScale(note) elif optn == \"3\": note", "optn == \"2\": scale = MinorScale(note) elif optn == \"3\": note = None", "is None: # choose a note if note is None: note = input(\"Choose", "from minorScale import MinorScale print(\"Hi welcome to my app.\\n\") note = None scale", "minor\\n\" \"2. Get relative minor\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn", "== \"3\": note = None elif optn == \"4\": break else: print(\"Invalid option.", "choose a note if note is None: note = input(\"Choose a note: \")", "menu = (\"1. {0} major scale\\n\" \"2. {0} minor scale\\n\" \"3. Choose another", "# minor scale if isinstance(scale, MinorScale): print(scale) menu = (\"1. Get parallel major\\n\"", "again.\\n\") # major scale if isinstance(scale, MajorScale): print(scale) menu = (\"1. Get parallel", "# choose major or minor optn = input(\"\\nChoose an option: \") if optn", "is None: note = input(\"Choose a note: \") menu = (\"1. {0} major", "Try again.\\n\") # major scale if isinstance(scale, MajorScale): print(scale) menu = (\"1. Get", "minor\\n\" \"3. Choose another note\\n\" \"4. Exit\").format(note) print(menu) optn = input(\"\\nChoose an option:", "option: \") if optn == \"1\": scale = MajorScale(note) elif optn == \"2\":", "input(\"\\nChoose an option: \") if optn == \"1\": scale = MinorScale(scale,1) elif optn", "scale = MinorScale(note) elif optn == \"3\": note = None elif optn ==", "== \"2\": scale = MinorScale(scale,2) elif optn == \"3\": note = None scale", "if scale is None: # choose a note if note is None: note", "import Note from majorScale import MajorScale from minorScale import MinorScale print(\"Hi welcome to", "a note if note is None: note = input(\"Choose a note: \") menu", "optn == \"2\": scale = MinorScale(scale,2) elif optn == \"3\": note = None", "optn == \"3\": note = None scale = None elif optn == \"4\":" ]
[ "of displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit to the number of applications", "if #rapid requests if tries>10: return add(portgetter,block) #not nice sleep(.1); continue #registrations running_displays[display].append(p)", "#on multiple gedits only the first one is alive def app(cmd,display,**kwargs): \"\"\"runs a", "self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with each display", "return True # else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with", "def display_is_port(port): display=port return display #functions need to be one to one mappings", "import psutil # optionally used port2display={} display2port={} class LimitError(Exception): val=None; pass class DisplayLimit(LimitError):", "pass #should program onappstart onappclose #todo capture stdio on procs def get_openport(): s", ") for adisplay,an in delthese: #in case empty list try: running_displays[adisplay].pop(an) #the process...", "if an==0:continue #skip the broadway proc if aproc.poll() is None: continue# running else:", "#..hack classes dc=0 @staticmethod def getdisplay(self): self.dc+=1 ; return self.dc @staticmethod def __call__(self):", "specify a port def add(portgetter=get_openport ,block=True):#don't see a reason to not block remove_zombie_apps();", "class LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a limit to the number of displays\"\"\"", "def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind then not", "numbers are not 1-to-1') return display, port #what happens when the app spawns", "running_displays[adisplay].pop(an) #the process... # ..will be removed by the garbage collector eventually except:", "kill_all(): \"\"\"kills all display apps on the server forcefully ...that it knows about", "does not exit if it #cant get the port. it gives back: #\"Can't", "stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with the server.. can't rely on being nice", "the number of displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit to the number", "#don't use the port2dispaly_func ... #... in the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict):", "#def add(port,block=True) not a good idea to specify a port def add(portgetter=get_openport ,block=True):#don't", ") except: #todo: problem: broadwayd does not exit if it #cant get the", "#lesson learned: #def add(port,block=True) not a good idea to specify a port def", "#if can bind then not busy s.close() return False except: return True #", "# optionally used port2display={} display2port={} class LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a limit", ">display.DisplayLimit=10 \"\"\" import signal import os import atexit import subprocess from collections import", "not in running_displays: raise KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display]", "display in running_displays: raise KeyError('display server already running') else: if isport_openable(port) is True:", "< 0 or port<0: raise ValueError('neg values') return ret def display_is_port(port): display=port return", "ValueError('display does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp)", "the html5 part of the app returning the display number blocks until the", "problem: broadwayd does not exit if it #cant get the port. it gives", "port. it gives back: #\"Can't listen: Error binding to address: Address already in", "Address already in use\" #dont' p.wait raise Exception(\"couldn't start display\") #block until 'app'", "key): # if self.default_factory is None: # raise KeyError( key ) # else:", "for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway proc if aproc.poll() is", "if it become a problem if really is not True: return for ap", "proc if aproc.poll() is None: continue# running else: delthese.append( (adisplay,an) ) for adisplay,an", "should be 1 to 1 mapping if len(display2port) != len(port2display): raise Exception('display and", "class displaydict(defaultdict): #adding issues are covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self,", "p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not immediate delthese=[] for adisplay in running_displays:", "get the port. it gives back: #\"Can't listen: Error binding to address: Address", "def remove_zombie_apps(): #the not immediate delthese=[] for adisplay in running_displays: for an,aproc in", "one mappings bw out and in #port2display_function p2df=sequence port2display_function=p2df #don't use the port2dispaly_func", "delthese.append( (adisplay,an) ) for adisplay,an in delthese: #in case empty list try: running_displays[adisplay].pop(an)", "block==True:#todo if port given not openable tries=0 while ( (isport_openable(port) is not True)", "a problem if really is not True: return for ap in psutil.process_iter(): try:", "return True # cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return True # else: return", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should have used a generator but", "def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should have", "else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with the server.. can't", "sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you want some 'web' ports\"\"\" ret= port-begin if", "raise Exception(\"couldn't start display\") #block until 'app' is ready on the port if", "'broadwayd': # index 2 is the port if int(ap.cmdline[2]) not in port2display: ap.kill()", "values') return ret def display_is_port(port): display=port return display #functions need to be one", "process... # ..will be removed by the garbage collector eventually except: pass def", "self).pop(display) self.removemapping(display) #procs assoc with each display running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not", "atexit import subprocess from collections import defaultdict from time import sleep import socket", "s.bind(('127.0.0.1',port)) #if can bind then not busy s.close() return False except: return True", "raise KeyError( key ) # else: # ret = self[key] = self.default_factory(key) #", "KeyError('display server already running') else: if isport_openable(port) is True: raise ValueError(\"can't get port", "import os import atexit import subprocess from collections import defaultdict from time import", "import sleep import socket import psutil # optionally used port2display={} display2port={} class LimitError(Exception):", "running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not a good idea to specify a port", "a... #...default dict.. do i really need defaultdict? port2display[port]=display; display2port[display]=port # port->display should", "should have used a #obj oriented approach \"\"\"manages GTK3 broadwayd displays .. and", "self.dc+=1 ; return self.dc @staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def", "if isport_openable(port) is True: raise ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid", "issues are covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display)", "pass class ApplicationLimit(LimitError): \"\"\"a limit to the number of applications per display\"\"\" val=10", "is not True: return for ap in psutil.process_iter(): try: cmdline = ap.cmdline[0] except:", "= self[key] = self.default_factory(key) # return ret class displaydict(defaultdict): #adding issues are covvered", "isport_openable(port) is True: raise ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid )", "sleep(.1); continue #registrations running_displays[display].append(p) #the only reason it's a... #...default dict.. do i", "#block until 'app' is ready on the port if block==True:#todo if port given", "# raise KeyError( key ) # else: # ret = self[key] = self.default_factory(key)", "all display apps on the server forcefully ...that it knows about that is.\"\"\"", "def friendly_display(port,begin=8000): \"\"\"for wehn you want some 'web' ports\"\"\" ret= port-begin if ret", "apps on the server forcefully ...that it knows about that is.\"\"\" for ad", "if len(display2port) != len(port2display): raise Exception('display and port numbers are not 1-to-1') return", "to one mappings bw out and in #port2display_function p2df=sequence port2display_function=p2df #don't use the", "\"\"\"a limit to the number of applications per display\"\"\" val=10 pass #should program", "p.wait raise Exception(\"couldn't start display\") #block until 'app' is ready on the port", "to set the limits after import >import display >display.DisplayLimit=10 \"\"\" import signal import", "display_is_port(port): display=port return display #functions need to be one to one mappings bw", "ApplicationLimit(LimitError): \"\"\"a limit to the number of applications per display\"\"\" val=10 pass #should", "if aproc.poll() is None: continue# running else: delthese.append( (adisplay,an) ) for adisplay,an in", "on its own should have used a #obj oriented approach \"\"\"manages GTK3 broadwayd", "is True: raise ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except:", "return display #functions need to be one to one mappings bw out and", "on procs def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg():", "displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit to the number of applications per", "it become a problem if really is not True: return for ap in", "not exit if it #cant get the port. it gives back: #\"Can't listen:", "onappclose #todo capture stdio on procs def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0))", "a repo on its own should have used a #obj oriented approach \"\"\"manages", "ValueError('neg values') return ret def display_is_port(port): display=port return display #functions need to be", "safe. need to reserve port \"\"\"runs the html5 part of the app returning", "class DisplayLimit(LimitError): \"\"\"a limit to the number of displays\"\"\" val=10; pass class ApplicationLimit(LimitError):", "wehn you want some 'web' ports\"\"\" ret= port-begin if ret < 0 or", "( (isport_openable(port) is not True) ): tries+=1 ; #sometimes it gets stuck here", "#adding issues are covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict,", "port-begin if ret < 0 or port<0: raise ValueError('neg values') return ret def", "\"\"\" import signal import os import atexit import subprocess from collections import defaultdict", "kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to reserve port \"\"\"runs", "approach \"\"\"manages GTK3 broadwayd displays .. and to minimize bash scripting ugggh usage:", "port=portgetter() #not safe. need to reserve port \"\"\"runs the html5 part of the", "does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return", "self.removemapping(display) #procs assoc with each display running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not a", "display not in running_displays: raise KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist=", "idea to specify a port def add(portgetter=get_openport ,block=True):#don't see a reason to not", "a generator but cool to... #..hack classes dc=0 @staticmethod def getdisplay(self): self.dc+=1 ;", "display\"\"\" val=10 pass #should program onappstart onappclose #todo capture stdio on procs def", "reason to not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe.", "spawns a window or another proc? #on multiple gedits only the first one", "a #obj oriented approach \"\"\"manages GTK3 broadwayd displays .. and to minimize bash", "the first one is alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on display.", "do i really need defaultdict? port2display[port]=display; display2port[display]=port # port->display should be 1 to", "stuck here if #rapid requests if tries>10: return add(portgetter,block) #not nice sleep(.1); continue", "ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "if cr==0: return True # else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using", "each display running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not a good idea to specify", "display. \"\"\" if (display) not in running_displays: raise ValueError('display does not exist') remove_zombie_apps()", "to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait()", "display running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not a good idea to specify a", "remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to reserve port", "#rapid requests if tries>10: return add(portgetter,block) #not nice sleep(.1); continue #registrations running_displays[display].append(p) #the", "s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should have used a", "# ..will be removed by the garbage collector eventually except: pass def kill_zombie_displays(really=True):#seems", "not in running_displays: raise ValueError('display does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val)", "return self.dc @staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for", "it if it become a problem if really is not True: return for", "port2display={} display2port={} class LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a limit to the number", "try: cmdline = ap.cmdline[0] except: continue if cmdline == 'broadwayd': # index 2", "displaydict(defaultdict): #adding issues are covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display):", "raise KeyError('display server already running') else: if isport_openable(port) is True: raise ValueError(\"can't get", "not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to", "mapping if len(display2port) != len(port2display): raise Exception('display and port numbers are not 1-to-1')", "limit to the number of displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit to", "can bind then not busy s.close() return False except: return True # cr=s.connect_ex(('127.0.0.1',", "so just kill it \"\"\"stops display and everything running on it\"\"\" if display", "if int(ap.cmdline[2]) not in port2display: ap.kill() def kill_all(): \"\"\"kills all display apps on", "\"\"\"runs the html5 part of the app returning the display number blocks until", "'app' is ready on the port if block==True:#todo if port given not openable", "adisplay,an in delthese: #in case empty list try: running_displays[adisplay].pop(an) #the process... # ..will", "assoc with each display running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not a good idea", "you want some 'web' ports\"\"\" ret= port-begin if ret < 0 or port<0:", "def kill_zombie_displays(really=True):#seems to add robustness... #stop it if it become a problem if", "returning the display number blocks until the dispaly server is up by default\"\"\"", "proc? #on multiple gedits only the first one is alive def app(cmd,display,**kwargs): \"\"\"runs", "if cmdline == 'broadwayd': # index 2 is the port if int(ap.cmdline[2]) not", "one to one mappings bw out and in #port2display_function p2df=sequence port2display_function=p2df #don't use", "gedit is a gtk3 app you may want to set the limits after", "self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with each display running_displays=displaydict(list)", "back: #\"Can't listen: Error binding to address: Address already in use\" #dont' p.wait", "port #what happens when the app spawns a window or another proc? #on", "#cant get the port. it gives back: #\"Can't listen: Error binding to address:", "running') else: if isport_openable(port) is True: raise ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh'", "be one to one mappings bw out and in #port2display_function p2df=sequence port2display_function=p2df #don't", "a reason to not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not", "not 1-to-1') return display, port #what happens when the app spawns a window", "import subprocess from collections import defaultdict from time import sleep import socket import", "the dispaly server is up by default\"\"\" display=p2df(port) if display in running_displays: raise", "case empty list try: running_displays[adisplay].pop(an) #the process... # ..will be removed by the", "socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should have used a generator but cool", "ret= port-begin if ret < 0 or port<0: raise ValueError('neg values') return ret", "if self.default_factory is None: # raise KeyError( key ) # else: # ret", ">display.app('gedit',displaynum) #where gedit is a gtk3 app you may want to set the", "then not busy s.close() return False except: return True # cr=s.connect_ex(('127.0.0.1', port)) #", "cmdline = ap.cmdline[0] except: continue if cmdline == 'broadwayd': # index 2 is", "super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with each display running_displays=displaydict(list) #lesson learned: #def add(port,block=True)", "in the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def __missing__(self, key): # if", "display number blocks until the dispaly server is up by default\"\"\" display=p2df(port) if", "1 to 1 mapping if len(display2port) != len(port2display): raise Exception('display and port numbers", "1 mapping if len(display2port) != len(port2display): raise Exception('display and port numbers are not", "it\"\"\" if display not in running_displays: raise KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid,", ") # else: # ret = self[key] = self.default_factory(key) # return ret class", "blocks until the dispaly server is up by default\"\"\" display=p2df(port) if display in", "the limits after import >import display >display.DisplayLimit=10 \"\"\" import signal import os import", "sleep import socket import psutil # optionally used port2display={} display2port={} class LimitError(Exception): val=None;", "self.default_factory(key) # return ret class displaydict(defaultdict): #adding issues are covvered by add() def", "to specify a port def add(portgetter=get_openport ,block=True):#don't see a reason to not block", "\"\"\"kills all display apps on the server forcefully ...that it knows about that", "dispaly server is up by default\"\"\" display=p2df(port) if display in running_displays: raise KeyError('display", "#registrations running_displays[display].append(p) #the only reason it's a... #...default dict.. do i really need", "immediate delthese=[] for adisplay in running_displays: for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip", "a port def add(portgetter=get_openport ,block=True):#don't see a reason to not block remove_zombie_apps(); kill_zombie_displays()", "used port2display={} display2port={} class LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a limit to the", "# if cr==0: return True # else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when", "getdisplay(self): self.dc+=1 ; return self.dc @staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg)", "# so just kill it \"\"\"stops display and everything running on it\"\"\" if", "try: running_displays[adisplay].pop(an) #the process... # ..will be removed by the garbage collector eventually", "start display\") #block until 'app' is ready on the port if block==True:#todo if", "def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on display. \"\"\" if (display) not in", "port =display.add() >display.app('gedit',displaynum) #where gedit is a gtk3 app you may want to", "bind then not busy s.close() return False except: return True # cr=s.connect_ex(('127.0.0.1', port))", "import socket import psutil # optionally used port2display={} display2port={} class LimitError(Exception): val=None; pass", "port<0: raise ValueError('neg values') return ret def display_is_port(port): display=port return display #functions need", "except: pass def kill_zombie_displays(really=True):#seems to add robustness... #stop it if it become a", "eventually except: pass def kill_zombie_displays(really=True):#seems to add robustness... #stop it if it become", "raise ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem:", "#dont' p.wait raise Exception(\"couldn't start display\") #block until 'app' is ready on the", "app you may want to set the limits after import >import display >display.DisplayLimit=10", "be 1 to 1 mapping if len(display2port) != len(port2display): raise Exception('display and port", "need to be one to one mappings bw out and in #port2display_function p2df=sequence", "pass class DisplayLimit(LimitError): \"\"\"a limit to the number of displays\"\"\" val=10; pass class", "display2port={} class LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a limit to the number of", "remove_zombie_apps(): #the not immediate delthese=[] for adisplay in running_displays: for an,aproc in enumerate(running_displays[adisplay]):", "from time import sleep import socket import psutil # optionally used port2display={} display2port={}", "defaultdict from time import sleep import socket import psutil # optionally used port2display={}", "kill_zombie_displays(really=True):#seems to add robustness... #stop it if it become a problem if really", "classes dc=0 @staticmethod def getdisplay(self): self.dc+=1 ; return self.dc @staticmethod def __call__(self): return", "to be one to one mappings bw out and in #port2display_function p2df=sequence port2display_function=p2df", "): tries+=1 ; #sometimes it gets stuck here if #rapid requests if tries>10:", "not True: return for ap in psutil.process_iter(): try: cmdline = ap.cmdline[0] except: continue", "port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display)", "return add(portgetter,block) #not nice sleep(.1); continue #registrations running_displays[display].append(p) #the only reason it's a...", "!= len(port2display): raise Exception('display and port numbers are not 1-to-1') return display, port", "s.getsockname()[1] class sequenceg(): #should have used a generator but cool to... #..hack classes", "in running_displays: raise KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for", "running_displays[display] for p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the", "port if int(ap.cmdline[2]) not in port2display: ap.kill() def kill_all(): \"\"\"kills all display apps", "html5 part of the app returning the display number blocks until the dispaly", "binding to address: Address already in use\" #dont' p.wait raise Exception(\"couldn't start display\")", "to 1 mapping if len(display2port) != len(port2display): raise Exception('display and port numbers are", "is the port if int(ap.cmdline[2]) not in port2display: ap.kill() def kill_all(): \"\"\"kills all", "displays .. and to minimize bash scripting ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum)", "return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you want some 'web'", "display, port #what happens when the app spawns a window or another proc?", "dict.. do i really need defaultdict? port2display[port]=display; display2port[display]=port # port->display should be 1", "if display in running_displays: raise KeyError('display server already running') else: if isport_openable(port) is", "keydefaultdict(defaultdict): # def __missing__(self, key): # if self.default_factory is None: # raise KeyError(", "@staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you", "import >import display >display.DisplayLimit=10 \"\"\" import signal import os import atexit import subprocess", "port \"\"\"runs the html5 part of the app returning the display number blocks", "Exception(\"couldn't start display\") #block until 'app' is ready on the port if block==True:#todo", "display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p in reversed(proclist): p.send_signal(signal);", "problem if really is not True: return for ap in psutil.process_iter(): try: cmdline", "a good idea to specify a port def add(portgetter=get_openport ,block=True):#don't see a reason", "ap in psutil.process_iter(): try: cmdline = ap.cmdline[0] except: continue if cmdline == 'broadwayd':", "#not safe. need to reserve port \"\"\"runs the html5 part of the app", "broadwayd does not exit if it #cant get the port. it gives back:", "val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit to the number of applications per display\"\"\"", "the app spawns a window or another proc? #on multiple gedits only the", "port2dispaly_func ... #... in the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def __missing__(self,", "used a #obj oriented approach \"\"\"manages GTK3 broadwayd displays .. and to minimize", "app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on display. \"\"\" if (display) not in running_displays:", "can't rely on being nice # so just kill it \"\"\"stops display and", "raise KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p in", "return display, port #what happens when the app spawns a window or another", "capture stdio on procs def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1]", "collections import defaultdict from time import sleep import socket import psutil # optionally", "display\") #block until 'app' is ready on the port if block==True:#todo if port", "if ret < 0 or port<0: raise ValueError('neg values') return ret def display_is_port(port):", "= self.default_factory(key) # return ret class displaydict(defaultdict): #adding issues are covvered by add()", "is alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on display. \"\"\" if (display)", "def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with each display running_displays=displaydict(list) #lesson", "try: s.bind(('127.0.0.1',port)) #if can bind then not busy s.close() return False except: return", "#'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p in reversed(proclist): p.send_signal(signal); #p.kill()", "'web' ports\"\"\" ret= port-begin if ret < 0 or port<0: raise ValueError('neg values')", "self.default_factory is None: # raise KeyError( key ) # else: # ret =", "port)) # if cr==0: return True # else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): #", "None: # raise KeyError( key ) # else: # ret = self[key] =", "except: return True # cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return True # else:", "cmdline == 'broadwayd': # index 2 is the port if int(ap.cmdline[2]) not in", "number blocks until the dispaly server is up by default\"\"\" display=p2df(port) if display", "display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with each display running_displays=displaydict(list) #lesson learned: #def", "display=p2df(port) if display in running_displays: raise KeyError('display server already running') else: if isport_openable(port)", "import defaultdict from time import sleep import socket import psutil # optionally used", "a gtk3 app you may want to set the limits after import >import", "False except: return True # cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return True #", "(adisplay,an) ) for adisplay,an in delthese: #in case empty list try: running_displays[adisplay].pop(an) #the", "for ap in psutil.process_iter(): try: cmdline = ap.cmdline[0] except: continue if cmdline ==", "#... in the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def __missing__(self, key): #", "just kill it \"\"\"stops display and everything running on it\"\"\" if display not", "\"\"\"runs a gtk3 prog on display. \"\"\" if (display) not in running_displays: raise", "by the garbage collector eventually except: pass def kill_zombie_displays(really=True):#seems to add robustness... #stop", "everything running on it\"\"\" if display not in running_displays: raise KeyError('no display #'+str(display)+'", "True # cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return True # else: return cr", "only the first one is alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on", "is None: # raise KeyError( key ) # else: # ret = self[key]", "__missing__(self, key): # if self.default_factory is None: # raise KeyError( key ) #", "to address: Address already in use\" #dont' p.wait raise Exception(\"couldn't start display\") #block", "#kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try:", "bw out and in #port2display_function p2df=sequence port2display_function=p2df #don't use the port2dispaly_func ... #...", "defaultdict? port2display[port]=display; display2port[display]=port # port->display should be 1 to 1 mapping if len(display2port)", "up by default\"\"\" display=p2df(port) if display in running_displays: raise KeyError('display server already running')", "True # else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with the", "__call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you want some", "def kill_all(): \"\"\"kills all display apps on the server forcefully ...that it knows", "# when using this with the server.. can't rely on being nice #", "running_displays[display].append(sp) return sp def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can", "a window or another proc? #on multiple gedits only the first one is", "def getdisplay(self): self.dc+=1 ; return self.dc @staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda p:", "#procs assoc with each display running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not a good", "LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a limit to the number of displays\"\"\" val=10;", "socket import psutil # optionally used port2display={} display2port={} class LimitError(Exception): val=None; pass class", "running_displays: raise KeyError('display server already running') else: if isport_openable(port) is True: raise ValueError(\"can't", "server already running') else: if isport_openable(port) is True: raise ValueError(\"can't get port \"+str(port))", "\"\"\" if (display) not in running_displays: raise ValueError('display does not exist') remove_zombie_apps() if", "return False except: return True # cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return True", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind then not busy s.close() return", "using this with the server.. can't rely on being nice # so just", "def __missing__(self, key): # if self.default_factory is None: # raise KeyError( key )", "the port. it gives back: #\"Can't listen: Error binding to address: Address already", "when using this with the server.. can't rely on being nice # so", "except: continue if cmdline == 'broadwayd': # index 2 is the port if", "aproc.poll() is None: continue# running else: delthese.append( (adisplay,an) ) for adisplay,an in delthese:", "minimize bash scripting ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where gedit is a", "and to minimize bash scripting ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where gedit", "rely on being nice # so just kill it \"\"\"stops display and everything", "# cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return True # else: return cr def", "to not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need", "kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display)", "try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem: broadwayd does not exit if", "in #port2display_function p2df=sequence port2display_function=p2df #don't use the port2dispaly_func ... #... in the code", "def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict,", "the server forcefully ...that it knows about that is.\"\"\" for ad in running_displays.keys():", "see a reason to not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter()", "after import >import display >display.DisplayLimit=10 \"\"\" import signal import os import atexit import", "broadway proc if aproc.poll() is None: continue# running else: delthese.append( (adisplay,an) ) for", "pass def kill_zombie_displays(really=True):#seems to add robustness... #stop it if it become a problem", "reason it's a... #...default dict.. do i really need defaultdict? port2display[port]=display; display2port[display]=port #", "len(display2port) != len(port2display): raise Exception('display and port numbers are not 1-to-1') return display,", "with each display running_displays=displaydict(list) #lesson learned: #def add(port,block=True) not a good idea to", "program onappstart onappclose #todo capture stdio on procs def get_openport(): s = socket.socket(socket.AF_INET,", "raise ValueError('neg values') return ret def display_is_port(port): display=port return display #functions need to", "not True) ): tries+=1 ; #sometimes it gets stuck here if #rapid requests", "one is alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on display. \"\"\" if", "server forcefully ...that it knows about that is.\"\"\" for ad in running_displays.keys(): stop(ad,signal=signal.SIGKILL)", "generator but cool to... #..hack classes dc=0 @staticmethod def getdisplay(self): self.dc+=1 ; return", "pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with each display running_displays=displaydict(list) #lesson learned:", "dc=0 @staticmethod def getdisplay(self): self.dc+=1 ; return self.dc @staticmethod def __call__(self): return self.getdisplay(self)", "mappings bw out and in #port2display_function p2df=sequence port2display_function=p2df #don't use the port2dispaly_func ...", "are covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display)", "(isport_openable(port) is not True) ): tries+=1 ; #sometimes it gets stuck here if", ".. and to minimize bash scripting ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where", ",str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem: broadwayd does not exit if it #cant", "broadwayd displays .. and to minimize bash scripting ugggh usage: >displynum, port =display.add()", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should have used a generator", "@staticmethod def getdisplay(self): self.dc+=1 ; return self.dc @staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda", "gets stuck here if #rapid requests if tries>10: return add(portgetter,block) #not nice sleep(.1);", "removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display)", "s.close() return False except: return True # cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return", "#in case empty list try: running_displays[adisplay].pop(an) #the process... # ..will be removed by", "# return ret class displaydict(defaultdict): #adding issues are covvered by add() def removemapping(self,display):", "of applications per display\"\"\" val=10 pass #should program onappstart onappclose #todo capture stdio", "continue# running else: delthese.append( (adisplay,an) ) for adisplay,an in delthese: #in case empty", "server.. can't rely on being nice # so just kill it \"\"\"stops display", "port given not openable tries=0 while ( (isport_openable(port) is not True) ): tries+=1", "exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def", "return for ap in psutil.process_iter(): try: cmdline = ap.cmdline[0] except: continue if cmdline", "# else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with the server..", "# index 2 is the port if int(ap.cmdline[2]) not in port2display: ap.kill() def", "port if block==True:#todo if port given not openable tries=0 while ( (isport_openable(port) is", "# if self.default_factory is None: # raise KeyError( key ) # else: #", "on being nice # so just kill it \"\"\"stops display and everything running", "in running_displays: for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway proc if", "cr==0: return True # else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this", "key ) # else: # ret = self[key] = self.default_factory(key) # return ret", "openable tries=0 while ( (isport_openable(port) is not True) ): tries+=1 ; #sometimes it", "port->display should be 1 to 1 mapping if len(display2port) != len(port2display): raise Exception('display", "the port if int(ap.cmdline[2]) not in port2display: ap.kill() def kill_all(): \"\"\"kills all display", "return ret def display_is_port(port): display=port return display #functions need to be one to", "signal.SIGTERM) proclist= running_displays[display] for p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def", "the garbage collector eventually except: pass def kill_zombie_displays(really=True):#seems to add robustness... #stop it", "if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def isport_openable(port): s", "else: delthese.append( (adisplay,an) ) for adisplay,an in delthese: #in case empty list try:", "app returning the display number blocks until the dispaly server is up by", "part of the app returning the display number blocks until the dispaly server", "port def add(portgetter=get_openport ,block=True):#don't see a reason to not block remove_zombie_apps(); kill_zombie_displays() if", "have used a #obj oriented approach \"\"\"manages GTK3 broadwayd displays .. and to", "gedits only the first one is alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog", "val=10 pass #should program onappstart onappclose #todo capture stdio on procs def get_openport():", "happens when the app spawns a window or another proc? #on multiple gedits", "subprocess from collections import defaultdict from time import sleep import socket import psutil", "... #... in the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def __missing__(self, key):", "gives back: #\"Can't listen: Error binding to address: Address already in use\" #dont'", "ret = self[key] = self.default_factory(key) # return ret class displaydict(defaultdict): #adding issues are", "reserve port \"\"\"runs the html5 part of the app returning the display number", "good idea to specify a port def add(portgetter=get_openport ,block=True):#don't see a reason to", "\"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem: broadwayd does not exit", "psutil.process_iter(): try: cmdline = ap.cmdline[0] except: continue if cmdline == 'broadwayd': # index", "else: # ret = self[key] = self.default_factory(key) # return ret class displaydict(defaultdict): #adding", ",block=True):#don't see a reason to not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val)", "friendly_display(port,begin=8000): \"\"\"for wehn you want some 'web' ports\"\"\" ret= port-begin if ret <", "running else: delthese.append( (adisplay,an) ) for adisplay,an in delthese: #in case empty list", "alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on display. \"\"\" if (display) not", "#the process... # ..will be removed by the garbage collector eventually except: pass", "add robustness... #stop it if it become a problem if really is not", "Exception('display and port numbers are not 1-to-1') return display, port #what happens when", "len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to reserve port \"\"\"runs the html5", "is ready on the port if block==True:#todo if port given not openable tries=0", "applications per display\"\"\" val=10 pass #should program onappstart onappclose #todo capture stdio on", "multiple gedits only the first one is alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3", "for p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not", "proclist= running_displays[display] for p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps():", "== 'broadwayd': # index 2 is the port if int(ap.cmdline[2]) not in port2display:", "#functions need to be one to one mappings bw out and in #port2display_function", "p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not immediate", "..will be removed by the garbage collector eventually except: pass def kill_zombie_displays(really=True):#seems to", "ready on the port if block==True:#todo if port given not openable tries=0 while", "to... #..hack classes dc=0 @staticmethod def getdisplay(self): self.dc+=1 ; return self.dc @staticmethod def", "forcefully ...that it knows about that is.\"\"\" for ad in running_displays.keys(): stop(ad,signal=signal.SIGKILL) atexit.register(kill_all)", "be removed by the garbage collector eventually except: pass def kill_zombie_displays(really=True):#seems to add", "remove_zombie_apps() def remove_zombie_apps(): #the not immediate delthese=[] for adisplay in running_displays: for an,aproc", "remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def isport_openable(port):", "are not 1-to-1') return display, port #what happens when the app spawns a", "def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with the server.. can't rely on being", "ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where gedit is a gtk3 app you", "display=port return display #functions need to be one to one mappings bw out", "on display. \"\"\" if (display) not in running_displays: raise ValueError('display does not exist')", "port2display: ap.kill() def kill_all(): \"\"\"kills all display apps on the server forcefully ...that", "adisplay in running_displays: for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway proc", "Error binding to address: Address already in use\" #dont' p.wait raise Exception(\"couldn't start", "return sp def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind", "signal import os import atexit import subprocess from collections import defaultdict from time", "number of displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit to the number of", ",**kwargs) running_displays[display].append(sp) return sp def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if", "an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway proc if aproc.poll() is None:", "\"\"\"manages GTK3 broadwayd displays .. and to minimize bash scripting ugggh usage: >displynum,", "set the limits after import >import display >display.DisplayLimit=10 \"\"\" import signal import os", "isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind then not busy", "= ap.cmdline[0] except: continue if cmdline == 'broadwayd': # index 2 is the", "in psutil.process_iter(): try: cmdline = ap.cmdline[0] except: continue if cmdline == 'broadwayd': #", "len(port2display): raise Exception('display and port numbers are not 1-to-1') return display, port #what", "listen: Error binding to address: Address already in use\" #dont' p.wait raise Exception(\"couldn't", "app spawns a window or another proc? #on multiple gedits only the first", "kill it \"\"\"stops display and everything running on it\"\"\" if display not in", "return s.getsockname()[1] class sequenceg(): #should have used a generator but cool to... #..hack", "already running') else: if isport_openable(port) is True: raise ValueError(\"can't get port \"+str(port)) try:", "to minimize bash scripting ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where gedit is", "__delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc", "ap.kill() def kill_all(): \"\"\"kills all display apps on the server forcefully ...that it", "limit to the number of applications per display\"\"\" val=10 pass #should program onappstart", "out and in #port2display_function p2df=sequence port2display_function=p2df #don't use the port2dispaly_func ... #... in", "def add(portgetter=get_openport ,block=True):#don't see a reason to not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val:", "number of applications per display\"\"\" val=10 pass #should program onappstart onappclose #todo capture", "in use\" #dont' p.wait raise Exception(\"couldn't start display\") #block until 'app' is ready", "return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with the server.. can't rely", "if really is not True: return for ap in psutil.process_iter(): try: cmdline =", "val=None; pass class DisplayLimit(LimitError): \"\"\"a limit to the number of displays\"\"\" val=10; pass", "def __call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you want", "on the port if block==True:#todo if port given not openable tries=0 while (", "psutil # optionally used port2display={} display2port={} class LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a", "really need defaultdict? port2display[port]=display; display2port[display]=port # port->display should be 1 to 1 mapping", "to add robustness... #stop it if it become a problem if really is", "#where gedit is a gtk3 app you may want to set the limits", "#display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def __missing__(self, key): # if self.default_factory is None:", "if (display) not in running_displays: raise ValueError('display does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val:", "not busy s.close() return False except: return True # cr=s.connect_ex(('127.0.0.1', port)) # if", "delthese: #in case empty list try: running_displays[adisplay].pop(an) #the process... # ..will be removed", "None: continue# running else: delthese.append( (adisplay,an) ) for adisplay,an in delthese: #in case", "#what happens when the app spawns a window or another proc? #on multiple", "raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def isport_openable(port): s = socket.socket(socket.AF_INET,", "the server.. can't rely on being nice # so just kill it \"\"\"stops", "cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT): # when using this with the server.. can't rely on", "raise Exception('display and port numbers are not 1-to-1') return display, port #what happens", "#p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not immediate delthese=[] for adisplay in", "and in #port2display_function p2df=sequence port2display_function=p2df #don't use the port2dispaly_func ... #... in the", "raise ValueError('display does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs)", "really is not True: return for ap in psutil.process_iter(): try: cmdline = ap.cmdline[0]", "a gtk3 prog on display. \"\"\" if (display) not in running_displays: raise ValueError('display", "\"\"\"stops display and everything running on it\"\"\" if display not in running_displays: raise", "#not nice sleep(.1); continue #registrations running_displays[display].append(p) #the only reason it's a... #...default dict..", "it's a... #...default dict.. do i really need defaultdict? port2display[port]=display; display2port[display]=port # port->display", "# ret = self[key] = self.default_factory(key) # return ret class displaydict(defaultdict): #adding issues", "sp def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind then", "tries=0 while ( (isport_openable(port) is not True) ): tries+=1 ; #sometimes it gets", "repo on its own should have used a #obj oriented approach \"\"\"manages GTK3", "True) ): tries+=1 ; #sometimes it gets stuck here if #rapid requests if", "the broadway proc if aproc.poll() is None: continue# running else: delthese.append( (adisplay,an) )", "could be in a repo on its own should have used a #obj", "# port->display should be 1 to 1 mapping if len(display2port) != len(port2display): raise", "#should have used a generator but cool to... #..hack classes dc=0 @staticmethod def", "first one is alive def app(cmd,display,**kwargs): \"\"\"runs a gtk3 prog on display. \"\"\"", "used a generator but cool to... #..hack classes dc=0 @staticmethod def getdisplay(self): self.dc+=1", "garbage collector eventually except: pass def kill_zombie_displays(really=True):#seems to add robustness... #stop it if", "port2display_function=p2df #don't use the port2dispaly_func ... #... in the code #display_is_port#friendly_display# # class", "port numbers are not 1-to-1') return display, port #what happens when the app", "add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display):", "is a gtk3 app you may want to set the limits after import", "1-to-1') return display, port #what happens when the app spawns a window or", "(len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def isport_openable(port): s =", "socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind then not busy s.close() return False except:", "is None: continue# running else: delthese.append( (adisplay,an) ) for adisplay,an in delthese: #in", "add(portgetter,block) #not nice sleep(.1); continue #registrations running_displays[display].append(p) #the only reason it's a... #...default", "add(portgetter=get_openport ,block=True):#don't see a reason to not block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise", "#os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps()", "display >display.DisplayLimit=10 \"\"\" import signal import os import atexit import subprocess from collections", "s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should have used a generator but cool to...", "gtk3 prog on display. \"\"\" if (display) not in running_displays: raise ValueError('display does", "tries>10: return add(portgetter,block) #not nice sleep(.1); continue #registrations running_displays[display].append(p) #the only reason it's", "if display not in running_displays: raise KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM)", "sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you want some 'web' ports\"\"\" ret=", "gtk3 app you may want to set the limits after import >import display", "being nice # so just kill it \"\"\"stops display and everything running on", "in enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway proc if aproc.poll() is None: continue#", "if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to reserve port \"\"\"runs the", "ret def display_is_port(port): display=port return display #functions need to be one to one", "from collections import defaultdict from time import sleep import socket import psutil #", "optionally used port2display={} display2port={} class LimitError(Exception): val=None; pass class DisplayLimit(LimitError): \"\"\"a limit to", "of the app returning the display number blocks until the dispaly server is", "need to reserve port \"\"\"runs the html5 part of the app returning the", "to reserve port \"\"\"runs the html5 part of the app returning the display", "default\"\"\" display=p2df(port) if display in running_displays: raise KeyError('display server already running') else: if", "#...default dict.. do i really need defaultdict? port2display[port]=display; display2port[display]=port # port->display should be", "0 or port<0: raise ValueError('neg values') return ret def display_is_port(port): display=port return display", "on it\"\"\" if display not in running_displays: raise KeyError('no display #'+str(display)+' to kill')", "2 is the port if int(ap.cmdline[2]) not in port2display: ap.kill() def kill_all(): \"\"\"kills", "in reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not immediate delthese=[]", "#the not immediate delthese=[] for adisplay in running_displays: for an,aproc in enumerate(running_displays[adisplay]): if", "here if #rapid requests if tries>10: return add(portgetter,block) #not nice sleep(.1); continue #registrations", "the display number blocks until the dispaly server is up by default\"\"\" display=p2df(port)", "window or another proc? #on multiple gedits only the first one is alive", "else: if isport_openable(port) is True: raise ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)]", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind then not busy s.close() return False", "import signal import os import atexit import subprocess from collections import defaultdict from", "running_displays: raise KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p", "running on it\"\"\" if display not in running_displays: raise KeyError('no display #'+str(display)+' to", "the port if block==True:#todo if port given not openable tries=0 while ( (isport_openable(port)", "until the dispaly server is up by default\"\"\" display=p2df(port) if display in running_displays:", "only reason it's a... #...default dict.. do i really need defaultdict? port2display[port]=display; display2port[display]=port", "not immediate delthese=[] for adisplay in running_displays: for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue", "become a problem if really is not True: return for ap in psutil.process_iter():", "block remove_zombie_apps(); kill_zombie_displays() if len(running_displays)==DisplayLimit.val: raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to reserve", "reversed(proclist): p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not immediate delthese=[] for", "(display) not in running_displays: raise ValueError('display does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise", "want to set the limits after import >import display >display.DisplayLimit=10 \"\"\" import signal", "in running_displays: raise KeyError('display server already running') else: if isport_openable(port) is True: raise", "while ( (isport_openable(port) is not True) ): tries+=1 ; #sometimes it gets stuck", "it #cant get the port. it gives back: #\"Can't listen: Error binding to", "server is up by default\"\"\" display=p2df(port) if display in running_displays: raise KeyError('display server", "self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you want some 'web' ports\"\"\"", "use the port2dispaly_func ... #... in the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): #", "#\"Can't listen: Error binding to address: Address already in use\" #dont' p.wait raise", "if block==True:#todo if port given not openable tries=0 while ( (isport_openable(port) is not", "index 2 is the port if int(ap.cmdline[2]) not in port2display: ap.kill() def kill_all():", "code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def __missing__(self, key): # if self.default_factory is", ">import display >display.DisplayLimit=10 \"\"\" import signal import os import atexit import subprocess from", "it gets stuck here if #rapid requests if tries>10: return add(portgetter,block) #not nice", "it gives back: #\"Can't listen: Error binding to address: Address already in use\"", "by default\"\"\" display=p2df(port) if display in running_displays: raise KeyError('display server already running') else:", "#todo capture stdio on procs def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return", "port2display[port]=display; display2port[display]=port # port->display should be 1 to 1 mapping if len(display2port) !=", "display and everything running on it\"\"\" if display not in running_displays: raise KeyError('no", "own should have used a #obj oriented approach \"\"\"manages GTK3 broadwayd displays ..", "if port given not openable tries=0 while ( (isport_openable(port) is not True) ):", "stdio on procs def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class", "already in use\" #dont' p.wait raise Exception(\"couldn't start display\") #block until 'app' is", "continue if cmdline == 'broadwayd': # index 2 is the port if int(ap.cmdline[2])", "to the number of displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit to the", "sequenceg(): #should have used a generator but cool to... #..hack classes dc=0 @staticmethod", "collector eventually except: pass def kill_zombie_displays(really=True):#seems to add robustness... #stop it if it", "nice sleep(.1); continue #registrations running_displays[display].append(p) #the only reason it's a... #...default dict.. do", "the number of applications per display\"\"\" val=10 pass #should program onappstart onappclose #todo", "but cool to... #..hack classes dc=0 @staticmethod def getdisplay(self): self.dc+=1 ; return self.dc", "want some 'web' ports\"\"\" ret= port-begin if ret < 0 or port<0: raise", "is not True) ): tries+=1 ; #sometimes it gets stuck here if #rapid", "; #sometimes it gets stuck here if #rapid requests if tries>10: return add(portgetter,block)", "ret class displaydict(defaultdict): #adding issues are covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def", "not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp", "when the app spawns a window or another proc? #on multiple gedits only", "an==0:continue #skip the broadway proc if aproc.poll() is None: continue# running else: delthese.append(", "oriented approach \"\"\"manages GTK3 broadwayd displays .. and to minimize bash scripting ugggh", "#skip the broadway proc if aproc.poll() is None: continue# running else: delthese.append( (adisplay,an)", "and port numbers are not 1-to-1') return display, port #what happens when the", "use\" #dont' p.wait raise Exception(\"couldn't start display\") #block until 'app' is ready on", "robustness... #stop it if it become a problem if really is not True:", "self[key] = self.default_factory(key) # return ret class displaydict(defaultdict): #adding issues are covvered by", "get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem: broadwayd does", "enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway proc if aproc.poll() is None: continue# running", "learned: #def add(port,block=True) not a good idea to specify a port def add(portgetter=get_openport", "for adisplay,an in delthese: #in case empty list try: running_displays[adisplay].pop(an) #the process... #", "# else: # ret = self[key] = self.default_factory(key) # return ret class displaydict(defaultdict):", "in a repo on its own should have used a #obj oriented approach", "KeyError('no display #'+str(display)+' to kill') #os.killpg(p.pid, signal.SIGTERM) proclist= running_displays[display] for p in reversed(proclist):", "except: #todo: problem: broadwayd does not exit if it #cant get the port.", "#obj oriented approach \"\"\"manages GTK3 broadwayd displays .. and to minimize bash scripting", "the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def __missing__(self, key): # if self.default_factory", "another proc? #on multiple gedits only the first one is alive def app(cmd,display,**kwargs):", "and everything running on it\"\"\" if display not in running_displays: raise KeyError('no display", "list try: running_displays[adisplay].pop(an) #the process... # ..will be removed by the garbage collector", "need defaultdict? port2display[port]=display; display2port[display]=port # port->display should be 1 to 1 mapping if", "os import atexit import subprocess from collections import defaultdict from time import sleep", "DisplayLimit(LimitError): \"\"\"a limit to the number of displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a", "port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem: broadwayd does not", "\"\"\"for wehn you want some 'web' ports\"\"\" ret= port-begin if ret < 0", "#should program onappstart onappclose #todo capture stdio on procs def get_openport(): s =", "class keydefaultdict(defaultdict): # def __missing__(self, key): # if self.default_factory is None: # raise", "True: return for ap in psutil.process_iter(): try: cmdline = ap.cmdline[0] except: continue if", "tries+=1 ; #sometimes it gets stuck here if #rapid requests if tries>10: return", "cr=s.connect_ex(('127.0.0.1', port)) # if cr==0: return True # else: return cr def stop(display,signal=signal.SIGKILL):#signal.SIGINT):", "DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to reserve port \"\"\"runs the html5 part of", "or port<0: raise ValueError('neg values') return ret def display_is_port(port): display=port return display #functions", "#todo: problem: broadwayd does not exit if it #cant get the port. it", "covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def", "its own should have used a #obj oriented approach \"\"\"manages GTK3 broadwayd displays", "class sequenceg(): #should have used a generator but cool to... #..hack classes dc=0", "not a good idea to specify a port def add(portgetter=get_openport ,block=True):#don't see a", "#,preexec_fn=os.setsid ) except: #todo: problem: broadwayd does not exit if it #cant get", "this with the server.. can't rely on being nice # so just kill", "prog on display. \"\"\" if (display) not in running_displays: raise ValueError('display does not", "requests if tries>10: return add(portgetter,block) #not nice sleep(.1); continue #registrations running_displays[display].append(p) #the only", "running_displays: for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway proc if aproc.poll()", "#port2display_function p2df=sequence port2display_function=p2df #don't use the port2dispaly_func ... #... in the code #display_is_port#friendly_display#", "in running_displays: raise ValueError('display does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid", "True: raise ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo:", "p.send_signal(signal); #p.kill() p.wait() running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not immediate delthese=[] for adisplay", "you may want to set the limits after import >import display >display.DisplayLimit=10 \"\"\"", "with the server.. can't rely on being nice # so just kill it", "for adisplay in running_displays: for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip the broadway", "int(ap.cmdline[2]) not in port2display: ap.kill() def kill_all(): \"\"\"kills all display apps on the", "not in port2display: ap.kill() def kill_all(): \"\"\"kills all display apps on the server", "running_displays.pop(display) remove_zombie_apps() def remove_zombie_apps(): #the not immediate delthese=[] for adisplay in running_displays: for", "ap.cmdline[0] except: continue if cmdline == 'broadwayd': # index 2 is the port", "KeyError( key ) # else: # ret = self[key] = self.default_factory(key) # return", "it \"\"\"stops display and everything running on it\"\"\" if display not in running_displays:", "or another proc? #on multiple gedits only the first one is alive def", "bash scripting ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where gedit is a gtk3", "class ApplicationLimit(LimitError): \"\"\"a limit to the number of applications per display\"\"\" val=10 pass", "raise DisplayLimit(DisplayLimit.val) port=portgetter() #not safe. need to reserve port \"\"\"runs the html5 part", "to the number of applications per display\"\"\" val=10 pass #should program onappstart onappclose", "GTK3 broadwayd displays .. and to minimize bash scripting ugggh usage: >displynum, port", "empty list try: running_displays[adisplay].pop(an) #the process... # ..will be removed by the garbage", "cool to... #..hack classes dc=0 @staticmethod def getdisplay(self): self.dc+=1 ; return self.dc @staticmethod", "# def __missing__(self, key): # if self.default_factory is None: # raise KeyError( key", "the app returning the display number blocks until the dispaly server is up", "display2port[display]=port # port->display should be 1 to 1 mapping if len(display2port) != len(port2display):", "be in a repo on its own should have used a #obj oriented", "def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs", "=display.add() >display.app('gedit',displaynum) #where gedit is a gtk3 app you may want to set", "removed by the garbage collector eventually except: pass def kill_zombie_displays(really=True):#seems to add robustness...", "time import sleep import socket import psutil # optionally used port2display={} display2port={} class", "p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem: broadwayd does not exit if it", "#the only reason it's a... #...default dict.. do i really need defaultdict? port2display[port]=display;", "sp=subprocess.Popen(['./display.sh',cmd,str(display)] ,**kwargs) running_displays[display].append(sp) return sp def isport_openable(port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port))", "on the server forcefully ...that it knows about that is.\"\"\" for ad in", "\"\"\"a limit to the number of displays\"\"\" val=10; pass class ApplicationLimit(LimitError): \"\"\"a limit", "have used a generator but cool to... #..hack classes dc=0 @staticmethod def getdisplay(self):", "ValueError(\"can't get port \"+str(port)) try: p=subprocess.Popen(['./start_display.sh' ,str(display),str(port)] #,preexec_fn=os.setsid ) except: #todo: problem: broadwayd", "until 'app' is ready on the port if block==True:#todo if port given not", "in port2display: ap.kill() def kill_all(): \"\"\"kills all display apps on the server forcefully", "if tries>10: return add(portgetter,block) #not nice sleep(.1); continue #registrations running_displays[display].append(p) #the only reason", "per display\"\"\" val=10 pass #should program onappstart onappclose #todo capture stdio on procs", "the port2dispaly_func ... #... in the code #display_is_port#friendly_display# # class keydefaultdict(defaultdict): # def", "display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with", "exit if it #cant get the port. it gives back: #\"Can't listen: Error", "address: Address already in use\" #dont' p.wait raise Exception(\"couldn't start display\") #block until", "self.dc @staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn", "return ret class displaydict(defaultdict): #adding issues are covvered by add() def removemapping(self,display): port2display.pop(display2port.pop(display))", "busy s.close() return False except: return True # cr=s.connect_ex(('127.0.0.1', port)) # if cr==0:", "may want to set the limits after import >import display >display.DisplayLimit=10 \"\"\" import", "procs def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should", "ports\"\"\" ret= port-begin if ret < 0 or port<0: raise ValueError('neg values') return", ">displynum, port =display.add() >display.app('gedit',displaynum) #where gedit is a gtk3 app you may want", "import atexit import subprocess from collections import defaultdict from time import sleep import", "given not openable tries=0 while ( (isport_openable(port) is not True) ): tries+=1 ;", "not openable tries=0 while ( (isport_openable(port) is not True) ): tries+=1 ; #sometimes", "ret < 0 or port<0: raise ValueError('neg values') return ret def display_is_port(port): display=port", "nice # so just kill it \"\"\"stops display and everything running on it\"\"\"", "limits after import >import display >display.DisplayLimit=10 \"\"\" import signal import os import atexit", "get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('',0)) return s.getsockname()[1] class sequenceg(): #should have used", "display apps on the server forcefully ...that it knows about that is.\"\"\" for", "s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('127.0.0.1',port)) #if can bind then not busy s.close()", "continue #registrations running_displays[display].append(p) #the only reason it's a... #...default dict.. do i really", "#sometimes it gets stuck here if #rapid requests if tries>10: return add(portgetter,block) #not", "#stop it if it become a problem if really is not True: return", "# class keydefaultdict(defaultdict): # def __missing__(self, key): # if self.default_factory is None: #", "is up by default\"\"\" display=p2df(port) if display in running_displays: raise KeyError('display server already", "i really need defaultdict? port2display[port]=display; display2port[display]=port # port->display should be 1 to 1", "in delthese: #in case empty list try: running_displays[adisplay].pop(an) #the process... # ..will be", "p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000): \"\"\"for wehn you want some 'web' ports\"\"\" ret= port-begin", "onappstart onappclose #todo capture stdio on procs def get_openport(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "some 'web' ports\"\"\" ret= port-begin if ret < 0 or port<0: raise ValueError('neg", "p2df=sequence port2display_function=p2df #don't use the port2dispaly_func ... #... in the code #display_is_port#friendly_display# #", "running_displays: raise ValueError('display does not exist') remove_zombie_apps() if (len(running_displays[display])-1)==ApplicationLimit.val: raise ApplicationLimit(ApplicationLimit.val) #kwargs['preexec_fn']=os.setpgid sp=subprocess.Popen(['./display.sh',cmd,str(display)]", "display #functions need to be one to one mappings bw out and in", "if it #cant get the port. it gives back: #\"Can't listen: Error binding", "by add() def removemapping(self,display): port2display.pop(display2port.pop(display)) def __delitem__(self, display): super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self,", "delthese=[] for adisplay in running_displays: for an,aproc in enumerate(running_displays[adisplay]): if an==0:continue #skip the", "usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where gedit is a gtk3 app you may", "; return self.dc @staticmethod def __call__(self): return self.getdisplay(self) sequence=lambda p: sequenceg.__call__(sequenceg) def friendly_display(port,begin=8000):", "super(displaydict, self).__delitem__(display) self.removemapping(display) def pop(self, display): super(displaydict, self).pop(display) self.removemapping(display) #procs assoc with each", "add(port,block=True) not a good idea to specify a port def add(portgetter=get_openport ,block=True):#don't see", "scripting ugggh usage: >displynum, port =display.add() >display.app('gedit',displaynum) #where gedit is a gtk3 app", "#this could be in a repo on its own should have used a", "running_displays[display].append(p) #the only reason it's a... #...default dict.. do i really need defaultdict?" ]
[ "key, value): assert key not in value for neighbour in value: old_neighbour_value =", "are tuple of tuples eg ((1,2), (2,2)) Values are sets of keys eg", "points are merged and connected Keys are tuple of tuples eg ((1,2), (2,2))", "for neighbour in value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour, old_neighbour_value) super().__setitem__(key, value)", "import UserDict class Graph(UserDict): \"\"\"Graph for holding which points are merged and connected", "merged and connected Keys are tuple of tuples eg ((1,2), (2,2)) Values are", "(2,2)) Values are sets of keys eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships should", "((4,2), (1,4))} Relationships should be maintained in both directions \"\"\" def __delitem__(self, key):", "for neighbour in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value):", "self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value): assert key not", "old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value): assert key not in", "class Graph(UserDict): \"\"\"Graph for holding which points are merged and connected Keys are", "eg ((1,2), (2,2)) Values are sets of keys eg {((1,2), (2,2)), ((4,2), (1,4))}", "((1,2), (2,2)) Values are sets of keys eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships", "of tuples eg ((1,2), (2,2)) Values are sets of keys eg {((1,2), (2,2)),", "directions \"\"\" def __delitem__(self, key): for neighbour in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key)", "neighbour in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value): assert", "old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour, old_neighbour_value) super().__setitem__(key, value) def __repr__(self): return f\"{type(self).__name__}{self.data}\"", "Keys are tuple of tuples eg ((1,2), (2,2)) Values are sets of keys", "in value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour, old_neighbour_value) super().__setitem__(key, value) def __repr__(self):", "Values are sets of keys eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships should be", "value): assert key not in value for neighbour in value: old_neighbour_value = self.get(neighbour,", "and connected Keys are tuple of tuples eg ((1,2), (2,2)) Values are sets", "neighbour in value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour, old_neighbour_value) super().__setitem__(key, value) def", "keys eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships should be maintained in both directions", "assert key not in value for neighbour in value: old_neighbour_value = self.get(neighbour, set())", "tuple of tuples eg ((1,2), (2,2)) Values are sets of keys eg {((1,2),", "should be maintained in both directions \"\"\" def __delitem__(self, key): for neighbour in", "def __setitem__(self, key, value): assert key not in value for neighbour in value:", "value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour, old_neighbour_value) super().__setitem__(key, value) def __repr__(self): return", "holding which points are merged and connected Keys are tuple of tuples eg", "eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships should be maintained in both directions \"\"\"", "__delitem__(self, key): for neighbour in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self,", "old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value): assert key not in value for neighbour", "which points are merged and connected Keys are tuple of tuples eg ((1,2),", "{((1,2), (2,2)), ((4,2), (1,4))} Relationships should be maintained in both directions \"\"\" def", "key): for neighbour in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key,", "be maintained in both directions \"\"\" def __delitem__(self, key): for neighbour in self[key]:", "self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value): assert key not in value for", "key not in value for neighbour in value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key)", "for holding which points are merged and connected Keys are tuple of tuples", "\"\"\" def __delitem__(self, key): for neighbour in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key)", "super().__delitem__(key) def __setitem__(self, key, value): assert key not in value for neighbour in", "both directions \"\"\" def __delitem__(self, key): for neighbour in self[key]: old_neighbour_value = self[neighbour]", "UserDict class Graph(UserDict): \"\"\"Graph for holding which points are merged and connected Keys", "tuples eg ((1,2), (2,2)) Values are sets of keys eg {((1,2), (2,2)), ((4,2),", "collections import UserDict class Graph(UserDict): \"\"\"Graph for holding which points are merged and", "are merged and connected Keys are tuple of tuples eg ((1,2), (2,2)) Values", "Graph(UserDict): \"\"\"Graph for holding which points are merged and connected Keys are tuple", "in both directions \"\"\" def __delitem__(self, key): for neighbour in self[key]: old_neighbour_value =", "__setitem__(self, key, value): assert key not in value for neighbour in value: old_neighbour_value", "\"\"\"Graph for holding which points are merged and connected Keys are tuple of", "(1,4))} Relationships should be maintained in both directions \"\"\" def __delitem__(self, key): for", "def __delitem__(self, key): for neighbour in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def", "connected Keys are tuple of tuples eg ((1,2), (2,2)) Values are sets of", "of keys eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships should be maintained in both", "maintained in both directions \"\"\" def __delitem__(self, key): for neighbour in self[key]: old_neighbour_value", "= self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value): assert key not in value", "are sets of keys eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships should be maintained", "not in value for neighbour in value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour,", "(2,2)), ((4,2), (1,4))} Relationships should be maintained in both directions \"\"\" def __delitem__(self,", "from collections import UserDict class Graph(UserDict): \"\"\"Graph for holding which points are merged", "Relationships should be maintained in both directions \"\"\" def __delitem__(self, key): for neighbour", "value for neighbour in value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour, old_neighbour_value) super().__setitem__(key,", "in self[key]: old_neighbour_value = self[neighbour] old_neighbour_value.remove(key) super().__delitem__(key) def __setitem__(self, key, value): assert key", "sets of keys eg {((1,2), (2,2)), ((4,2), (1,4))} Relationships should be maintained in", "in value for neighbour in value: old_neighbour_value = self.get(neighbour, set()) old_neighbour_value.add(key) super().__setitem__(neighbour, old_neighbour_value)" ]
[ "and merge sort basically import random def swap(A, i, j): A[i], A[j] =", "return A[k] elif j > k: hi = j - 1 else: lo", "swap(A, i, j): A[i], A[j] = A[j], A[i] def partition(A, lo, hi): pivot", "i == hi: break while A[j] > pivot: j -= 1 if j", "_ in range(test_case): number_of_elements = int(input()) A = [int(x) for x in input().strip().split('", "if j == k: return A[k] elif j > k: hi = j", "swap(A, i, j) swap(A, lo, j) print(A) return j def k_smallest(A, k): lo", "//using pivot element and partition and merge sort basically import random def swap(A,", "k): lo = 0 hi = len(A) - 1 k = k -", "- 1 k = k - 1 random.shuffle(A) while hi > lo: j", "= int(input()) A = [int(x) for x in input().strip().split(' ')] k = int(input())", "k: hi = j - 1 else: lo = j + 1 return", "if __name__ == '__main__': test_case = int(input()) for _ in range(test_case): number_of_elements =", "while A[j] > pivot: j -= 1 if j == lo: break if", "hi while True: while A[i] < pivot: i += 1 if i ==", "= partition(A, lo, hi) if j == k: return A[k] elif j >", "partition and merge sort basically import random def swap(A, i, j): A[i], A[j]", "== lo: break if j <= i: break swap(A, i, j) swap(A, lo,", "lo, hi): pivot = A[lo] i = lo + 1 j = hi", "1 if i == hi: break while A[j] > pivot: j -= 1", "while hi > lo: j = partition(A, lo, hi) if j == k:", "return j def k_smallest(A, k): lo = 0 hi = len(A) - 1", "= 0 hi = len(A) - 1 k = k - 1 random.shuffle(A)", "= j + 1 return A[k] if __name__ == '__main__': test_case = int(input())", "test_case = int(input()) for _ in range(test_case): number_of_elements = int(input()) A = [int(x)", "+= 1 if i == hi: break while A[j] > pivot: j -=", "lo + 1 j = hi while True: while A[i] < pivot: i", "1 k = k - 1 random.shuffle(A) while hi > lo: j =", "1 else: lo = j + 1 return A[k] if __name__ == '__main__':", "k: return A[k] elif j > k: hi = j - 1 else:", "-= 1 if j == lo: break if j <= i: break swap(A,", "i, j): A[i], A[j] = A[j], A[i] def partition(A, lo, hi): pivot =", "# //using pivot element and partition and merge sort basically import random def", "A[i] < pivot: i += 1 if i == hi: break while A[j]", "j + 1 return A[k] if __name__ == '__main__': test_case = int(input()) for", "hi = len(A) - 1 k = k - 1 random.shuffle(A) while hi", "in range(test_case): number_of_elements = int(input()) A = [int(x) for x in input().strip().split(' ')]", "def partition(A, lo, hi): pivot = A[lo] i = lo + 1 j", "<= i: break swap(A, i, j) swap(A, lo, j) print(A) return j def", "i = lo + 1 j = hi while True: while A[i] <", "= int(input()) for _ in range(test_case): number_of_elements = int(input()) A = [int(x) for", "lo, hi) if j == k: return A[k] elif j > k: hi", "basically import random def swap(A, i, j): A[i], A[j] = A[j], A[i] def", "random def swap(A, i, j): A[i], A[j] = A[j], A[i] def partition(A, lo,", "for _ in range(test_case): number_of_elements = int(input()) A = [int(x) for x in", "lo: j = partition(A, lo, hi) if j == k: return A[k] elif", "A[i] def partition(A, lo, hi): pivot = A[lo] i = lo + 1", "j def k_smallest(A, k): lo = 0 hi = len(A) - 1 k", "def swap(A, i, j): A[i], A[j] = A[j], A[i] def partition(A, lo, hi):", "i, j) swap(A, lo, j) print(A) return j def k_smallest(A, k): lo =", "1 random.shuffle(A) while hi > lo: j = partition(A, lo, hi) if j", "partition(A, lo, hi): pivot = A[lo] i = lo + 1 j =", "lo = j + 1 return A[k] if __name__ == '__main__': test_case =", "j > k: hi = j - 1 else: lo = j +", "return A[k] if __name__ == '__main__': test_case = int(input()) for _ in range(test_case):", "and partition and merge sort basically import random def swap(A, i, j): A[i],", "+ 1 j = hi while True: while A[i] < pivot: i +=", "pivot: i += 1 if i == hi: break while A[j] > pivot:", "j <= i: break swap(A, i, j) swap(A, lo, j) print(A) return j", "+ 1 return A[k] if __name__ == '__main__': test_case = int(input()) for _", "> pivot: j -= 1 if j == lo: break if j <=", "A[i], A[j] = A[j], A[i] def partition(A, lo, hi): pivot = A[lo] i", "True: while A[i] < pivot: i += 1 if i == hi: break", "swap(A, lo, j) print(A) return j def k_smallest(A, k): lo = 0 hi", "hi > lo: j = partition(A, lo, hi) if j == k: return", "hi) if j == k: return A[k] elif j > k: hi =", "= j - 1 else: lo = j + 1 return A[k] if", "> lo: j = partition(A, lo, hi) if j == k: return A[k]", "A = [int(x) for x in input().strip().split(' ')] k = int(input()) print(k_smallest(A, k))", "j = hi while True: while A[i] < pivot: i += 1 if", "0 hi = len(A) - 1 k = k - 1 random.shuffle(A) while", "def k_smallest(A, k): lo = 0 hi = len(A) - 1 k =", "lo: break if j <= i: break swap(A, i, j) swap(A, lo, j)", "== k: return A[k] elif j > k: hi = j - 1", "int(input()) for _ in range(test_case): number_of_elements = int(input()) A = [int(x) for x", "random.shuffle(A) while hi > lo: j = partition(A, lo, hi) if j ==", "> k: hi = j - 1 else: lo = j + 1", "int(input()) A = [int(x) for x in input().strip().split(' ')] k = int(input()) print(k_smallest(A,", "range(test_case): number_of_elements = int(input()) A = [int(x) for x in input().strip().split(' ')] k", "print(A) return j def k_smallest(A, k): lo = 0 hi = len(A) -", "= lo + 1 j = hi while True: while A[i] < pivot:", "j) print(A) return j def k_smallest(A, k): lo = 0 hi = len(A)", "- 1 random.shuffle(A) while hi > lo: j = partition(A, lo, hi) if", "= A[lo] i = lo + 1 j = hi while True: while", "A[j], A[i] def partition(A, lo, hi): pivot = A[lo] i = lo +", "break while A[j] > pivot: j -= 1 if j == lo: break", "merge sort basically import random def swap(A, i, j): A[i], A[j] = A[j],", "< pivot: i += 1 if i == hi: break while A[j] >", "i += 1 if i == hi: break while A[j] > pivot: j", "j -= 1 if j == lo: break if j <= i: break", "j): A[i], A[j] = A[j], A[i] def partition(A, lo, hi): pivot = A[lo]", "1 return A[k] if __name__ == '__main__': test_case = int(input()) for _ in", "lo = 0 hi = len(A) - 1 k = k - 1", "hi): pivot = A[lo] i = lo + 1 j = hi while", "hi: break while A[j] > pivot: j -= 1 if j == lo:", "- 1 else: lo = j + 1 return A[k] if __name__ ==", "= A[j], A[i] def partition(A, lo, hi): pivot = A[lo] i = lo", "= len(A) - 1 k = k - 1 random.shuffle(A) while hi >", "k_smallest(A, k): lo = 0 hi = len(A) - 1 k = k", "while A[i] < pivot: i += 1 if i == hi: break while", "i: break swap(A, i, j) swap(A, lo, j) print(A) return j def k_smallest(A,", "else: lo = j + 1 return A[k] if __name__ == '__main__': test_case", "A[k] if __name__ == '__main__': test_case = int(input()) for _ in range(test_case): number_of_elements", "sort basically import random def swap(A, i, j): A[i], A[j] = A[j], A[i]", "elif j > k: hi = j - 1 else: lo = j", "hi = j - 1 else: lo = j + 1 return A[k]", "1 j = hi while True: while A[i] < pivot: i += 1", "element and partition and merge sort basically import random def swap(A, i, j):", "__name__ == '__main__': test_case = int(input()) for _ in range(test_case): number_of_elements = int(input())", "partition(A, lo, hi) if j == k: return A[k] elif j > k:", "A[lo] i = lo + 1 j = hi while True: while A[i]", "'__main__': test_case = int(input()) for _ in range(test_case): number_of_elements = int(input()) A =", "j - 1 else: lo = j + 1 return A[k] if __name__", "= k - 1 random.shuffle(A) while hi > lo: j = partition(A, lo,", "== '__main__': test_case = int(input()) for _ in range(test_case): number_of_elements = int(input()) A", "lo, j) print(A) return j def k_smallest(A, k): lo = 0 hi =", "pivot: j -= 1 if j == lo: break if j <= i:", "j) swap(A, lo, j) print(A) return j def k_smallest(A, k): lo = 0", "k = k - 1 random.shuffle(A) while hi > lo: j = partition(A,", "import random def swap(A, i, j): A[i], A[j] = A[j], A[i] def partition(A,", "pivot element and partition and merge sort basically import random def swap(A, i,", "if j == lo: break if j <= i: break swap(A, i, j)", "break swap(A, i, j) swap(A, lo, j) print(A) return j def k_smallest(A, k):", "number_of_elements = int(input()) A = [int(x) for x in input().strip().split(' ')] k =", "A[j] > pivot: j -= 1 if j == lo: break if j", "A[j] = A[j], A[i] def partition(A, lo, hi): pivot = A[lo] i =", "== hi: break while A[j] > pivot: j -= 1 if j ==", "1 if j == lo: break if j <= i: break swap(A, i,", "j = partition(A, lo, hi) if j == k: return A[k] elif j", "= hi while True: while A[i] < pivot: i += 1 if i", "j == k: return A[k] elif j > k: hi = j -", "pivot = A[lo] i = lo + 1 j = hi while True:", "len(A) - 1 k = k - 1 random.shuffle(A) while hi > lo:", "if j <= i: break swap(A, i, j) swap(A, lo, j) print(A) return", "while True: while A[i] < pivot: i += 1 if i == hi:", "if i == hi: break while A[j] > pivot: j -= 1 if", "k - 1 random.shuffle(A) while hi > lo: j = partition(A, lo, hi)", "j == lo: break if j <= i: break swap(A, i, j) swap(A,", "break if j <= i: break swap(A, i, j) swap(A, lo, j) print(A)", "A[k] elif j > k: hi = j - 1 else: lo =" ]
[ "try: raise Exception(\"Oh no!\") except Exception as error: print(\"Found an error.\") finally: #", "raise Exception(\"Oh no!\") except Exception as error: print(\"Found an error.\") finally: # ..." ]
[ "scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data = {'authenticity_token': authenticity_token,", "scraper(target) if choice == 2: target = input(f'Email Address: ') if checkEmail(target) ==", "CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data", "bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier': target} cookies =", "'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if", "= bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier': target} cookies", "= str(info[0].text) phone = 'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email,", "\"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\":", "Gif Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")} ')) if choice == 1: target", "if choice == 3: verifyScraper() if choice == 4: gifScraper() def brutedomain(email, chars)", "User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")} ')) if choice", "= open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for domain in domain_file: domain = domain.rstrip()", "account with this email') if choice == 3: verifyScraper() if choice == 4:", "= scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser') try: if ( soup2.find('div',", "headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer", "[name, email, phone] def checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = {", "= f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\",", "target} cookies = req.cookies response = scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text,", "z{colours.darktext} | || (__| ' \\/ -_) _| / / '_| {colours.main} ᓚᘏᗢ", "else: _ = os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___", "domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def", "'': bio = 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context()", "'r').readlines() provider = email.split('@')[1] for domain in domain_file: domain = domain.rstrip() if provider[0]", "'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5", "resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location =", "import BeautifulSoup as bs from requests.adapters import HTTPAdapter from fake_headers import Headers from", "= 'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return [name,", "'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text) email = str(info[1].text) except: email", "os, sys, threading from bs4 import BeautifulSoup as bs from requests.adapters import HTTPAdapter", "\"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android", "} resp = json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try:", "\"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\",", "exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}')", "= scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data = {'authenticity_token':", "this email') if choice == 3: verifyScraper() if choice == 4: gifScraper() def", "\"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty',", "created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location = 'Unknown' if bio ==", "_ = os.system('cls') else: _ = os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z", "import * from util.emails.core import * from util.scraper.core import * def banner(): if", "br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty',", "'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML,", "ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try: url = 'https://twitter.com/account/begin_password_reset'", "found\" == err: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text}", "_/ __| |_ ___ __| |___ _ {colours.main} z{colours.darktext} | || (__| '", "print(f'{colours.error}•{colours.text} Username Not Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location", "= bs(response.text, 'html.parser') try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try again", "(Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36',", "' \\/ -_) _| / / '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text}", "Target Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''') choice =", "try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try again later.' ): exit(f'{colours.error}Rate", "_ _ {colours.main} {colours.darktext} |_ _/ __| |_ ___ __| |___ _ {colours.main}", "phone = 'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return", "as bs from requests.adapters import HTTPAdapter from fake_headers import Headers from urllib3.util.ssl_ import", "been {returnColor(\"Taken\")}') else: print(f'No account with this email') if choice == 3: verifyScraper()", "= 'Unknown' if bio == '': bio = 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self,", "import Headers from urllib3.util.ssl_ import create_urllib3_context from util.core import * from util.emails.core import", "_| / / '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def", "class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx,", "{ \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\",", "''') choice = int(input(f'{returnColor(\">\")} ')) if choice == 1: target = input(f'Username: {returnColor(\"@\")}')", "urllib3.util.ssl_ import create_urllib3_context from util.core import * from util.emails.core import * from util.scraper.core", "provider[0] == domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return", "\"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin':", "except: try: err = resp[\"errors\"][0][\"message\"] if \"Not found\" == err: print(f'{colours.error}•{colours.text} Username Not", "attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text) email = str(info[1].text) except:", "== 3: verifyScraper() if choice == 4: gifScraper() def brutedomain(email, chars) -> str:", "headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\" == resp[\"errors\"][0][\"message\"]: return False", "len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str)", "if choice == 2: target = input(f'Email Address: ') if checkEmail(target) == True:", "print(f'{colours.error}•{colours.text} Username Not Found On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username Not Found", "= { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA',", "'_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target", "= Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate())", "resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"]", "Verified User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")} ')) if", "fake_headers import Headers from urllib3.util.ssl_ import create_urllib3_context from util.core import * from util.emails.core", "**kwargs ) try: url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True) scraper =", "\"Not found\" == err: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') else: print(err) except:", "resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location = 'Unknown' if bio == '': bio", "if bio == '': bio = 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs):", "email, phone] def checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\":", "Username Not Found On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username Not Found On", "from util.scraper.core import * def banner(): if os.name == 'nt': _ = os.system('cls')", "if provider[0] == domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}')", "'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like", "'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132", "email') if choice == 3: verifyScraper() if choice == 4: gifScraper() def brutedomain(email,", "*args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try: url", "Headers from urllib3.util.ssl_ import create_urllib3_context from util.core import * from util.emails.core import *", "/ '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")}", "domain.rstrip() if provider[0] == domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain -", "{colours.main}z {colours.darktext} _____ ___ _ _ {colours.main} {colours.darktext} |_ _/ __| |_ ___", "== err: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username", "resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\" == resp[\"errors\"][0][\"message\"]: return False else: return False", "resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location ==", "except: try: if \"Not found\" == resp[\"errors\"][0][\"message\"]: return False else: return False except:", "init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try:", "|_ _/ __| |_ ___ __| |___ _ {colours.main} z{colours.darktext} | || (__|", "Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"]", "== len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str) ->", "\"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\":", "5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en'", "authenticity_token = soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier': target} cookies = req.cookies response", "header = Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url,", "__| |___ _ {colours.main} z{colours.darktext} | || (__| ' \\/ -_) _| /", "check == False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if choice", "requests, json, os, sys, threading from bs4 import BeautifulSoup as bs from requests.adapters", "'en' } resp = json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not", "print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___ _ _ {colours.main} {colours.darktext}", "{colours.darktext} {colours.main}z {colours.darktext} _____ ___ _ _ {colours.main} {colours.darktext} |_ _/ __| |_", "os.name == 'nt': _ = os.system('cls') else: _ = os.system('clear') print (f''' {colours.text}{colours.darktext}", "twitterRequest = scraper(target) if choice == 2: target = input(f'Email Address: ') if", "Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"]", "with this email') if choice == 3: verifyScraper() if choice == 4: gifScraper()", "email = str(info[0].text) phone = 'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}') email =", "User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''')", "choice == 2: target = input(f'Email Address: ') if checkEmail(target) == True: print(f'Email", "name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '':", "= email.split('@')[1] for domain in domain_file: domain = domain.rstrip() if provider[0] == domain[0]:", "{colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___ _ _ {colours.main} {colours.darktext} |_ _/ __|", "AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site',", "( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except:", "'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except:", "if location == '': location = 'Unknown' if bio == '': bio =", "= os.system('cls') else: _ = os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext}", "= scraper(target) if choice == 2: target = input(f'Email Address: ') if checkEmail(target)", "cfscrape, requests, json, os, sys, threading from bs4 import BeautifulSoup as bs from", "\"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\",", "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp", "super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try: url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome',", "|_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email", "guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str) -> str: url", "if \"Not found\" == resp[\"errors\"][0][\"message\"]: return False else: return False except: return False", "deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest':", "Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")} ')) if choice ==", "bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id", "int(input(f'{returnColor(\">\")} ')) if choice == 1: target = input(f'Username: {returnColor(\"@\")}') check = checkUsername(target)", "len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str) -> str:", "requests.adapters import HTTPAdapter from fake_headers import Headers from urllib3.util.ssl_ import create_urllib3_context from util.core", "'en' } resp = json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass except:", "= input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if check == False: exit('Invalid Handle') else:", "ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")}", "choice == 1: target = input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if check ==", "else: print(err) except: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers", "again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong')", "{colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___ _ _ {colours.main} {colours.darktext} |_ _/", "+ target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\",", "= create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try: url = 'https://twitter.com/account/begin_password_reset' header", "deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com',", "\"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization':", "if choice == 1: target = input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if check", "''') def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User Scraper", "print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No account with this email') if choice ==", "else: print(f'No account with this email') if choice == 3: verifyScraper() if choice", "+ \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\",", "cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token =", "True: print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No account with this email') if choice", "'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args,", "= json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\" == resp[\"errors\"][0][\"message\"]:", "== True: print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No account with this email') if", "print(f'No account with this email') if choice == 3: verifyScraper() if choice ==", "|___ _ {colours.main} z{colours.darktext} | || (__| ' \\/ -_) _| / /", "from bs4 import BeautifulSoup as bs from requests.adapters import HTTPAdapter from fake_headers import", "str(info[1].text) except: email = str(info[0].text) phone = 'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}')", "Exception as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return [name, email, phone] def", "Limit{colours.text}') try: phone = int(info[0].text) email = str(info[1].text) except: email = str(info[0].text) phone", "soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text) email = str(info[1].text)", "= 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req", "err: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username Not", "else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if choice == 2: target = input(f'Email", "Not Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"]", "Not Found On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username Not Found On Twitter')", "'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp:", "{colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified", "= int(info[0].text) email = str(info[1].text) except: email = str(info[0].text) phone = 'None' except", "')) if choice == 1: target = input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if", "'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return [name, email,", "False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if choice == 2:", "'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try:", "return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\" == resp[\"errors\"][0][\"message\"]: return False else: return", "Address: ') if checkEmail(target) == True: print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No account", "checkEmail(target) == True: print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No account with this email')", "has been {returnColor(\"Taken\")}') else: print(f'No account with this email') if choice == 3:", "os.system('cls') else: _ = os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____", "== 2: target = input(f'Email Address: ') if checkEmail(target) == True: print(f'Email has", "choice = int(input(f'{returnColor(\">\")} ')) if choice == 1: target = input(f'Username: {returnColor(\"@\")}') check", "return guesses[-1] def scraper(target: str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target +", "menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif", "chars) -> str: guesses = [] domain_file = open('files/emails.txt', 'r').readlines() provider = email.split('@')[1]", "scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser')", "choice == 4: gifScraper() def brutedomain(email, chars) -> str: guesses = [] domain_file", "str: guesses = [] domain_file = open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for domain", "brutedomain(email, chars) -> str: guesses = [] domain_file = open('files/emails.txt', 'r').readlines() provider =", "Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location = 'Unknown'", "brutedomain(email, None) return [name, email, phone] def checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\"", "{returnColor(\"[4]\")} Gif Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")} ')) if choice == 1:", "headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup = bs(req.text,", "{colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User", "e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return [name, email, phone] def checkUsername(username) ->", "1: target = input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if check == False: exit('Invalid", "util.core import * from util.emails.core import * from util.scraper.core import * def banner():", "util.emails.core import * from util.scraper.core import * def banner(): if os.name == 'nt':", "{colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target User", "{email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target", "'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors',", "Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"]", "choice == 3: verifyScraper() if choice == 4: gifScraper() def brutedomain(email, chars) ->", "'<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site',", "_____ ___ _ _ {colours.main} {colours.darktext} |_ _/ __| |_ ___ __| |___", "-_) _| / / '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''')", "BeautifulSoup as bs from requests.adapters import HTTPAdapter from fake_headers import Headers from urllib3.util.ssl_", "= { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\":", "* from util.emails.core import * from util.scraper.core import * def banner(): if os.name", "resp: pass except: try: err = resp[\"errors\"][0][\"message\"] if \"Not found\" == err: print(f'{colours.error}•{colours.text}", "'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req =", "Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: return", "try: phone = int(info[0].text) email = str(info[1].text) except: email = str(info[0].text) phone =", "if \"Not found\" == err: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') else: print(err)", "import HTTPAdapter from fake_headers import Headers from urllib3.util.ssl_ import create_urllib3_context from util.core import", "scraper(target: str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers =", "bio = 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter,", "bs4 import BeautifulSoup as bs from requests.adapters import HTTPAdapter from fake_headers import Headers", "print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if choice == 2: target = input(f'Email Address:", "def checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\":", "'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko)", "4: gifScraper() def brutedomain(email, chars) -> str: guesses = [] domain_file = open('files/emails.txt',", "Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user':", "from urllib3.util.ssl_ import create_urllib3_context from util.core import * from util.emails.core import * from", "-> str: guesses = [] domain_file = open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for", "sys, threading from bs4 import BeautifulSoup as bs from requests.adapters import HTTPAdapter from", "== 'nt': _ = os.system('cls') else: _ = os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z", "resp = json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\" ==", "'Please try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info = soup2.find('ul',", "resp = json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try: err", "- {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" +", "domain in domain_file: domain = domain.rstrip() if provider[0] == domain[0]: if len(provider.split('.')[0]) ==", "resp[\"errors\"][0][\"message\"] if \"Not found\" == err: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') else:", "exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return [name, email, phone] def checkUsername(username) -> str:", "phone = int(info[0].text) email = str(info[1].text) except: email = str(info[0].text) phone = 'None'", "json, os, sys, threading from bs4 import BeautifulSoup as bs from requests.adapters import", "os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___ _ _ {colours.main}", "domain_file: domain = domain.rstrip() if provider[0] == domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain)", "pass try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone =", "url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter())", "like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url,", "create_urllib3_context from util.core import * from util.emails.core import * from util.scraper.core import *", "location == '': location = 'Unknown' if bio == '': bio = 'Unknown'", "Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup", "except: email = str(info[0].text) phone = 'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}') email", "os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup =", "2: target = input(f'Email Address: ') if checkEmail(target) == True: print(f'Email has been", "'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36", "if check == False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if", "= domain.rstrip() if provider[0] == domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain", "== '': bio = 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx =", "input(f'Email Address: ') if checkEmail(target) == True: print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No", "attrs={'class': 'is-errored'}).text == 'Please try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try:", "resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try: err = resp[\"errors\"][0][\"message\"] if \"Not found\" ==", "CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs", "{'authenticity_token': authenticity_token, 'account_identifier': target} cookies = req.cookies response = scraper.post(url, cookies=cookies, data=data, headers=header.generate())", "{colours.darktext} |_ _/ __| |_ ___ __| |___ _ {colours.main} z{colours.darktext} | ||", "return [name, email, phone] def checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers =", "(KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp =", "cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser') try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text", "headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try: err = resp[\"errors\"][0][\"message\"] if", "gifScraper() def brutedomain(email, chars) -> str: guesses = [] domain_file = open('files/emails.txt', 'r').readlines()", ") try: url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper()", "| || (__| ' \\/ -_) _| / / '_| {colours.main} ᓚᘏᗢ {colours.darktext}", "try: err = resp[\"errors\"][0][\"message\"] if \"Not found\" == err: print(f'{colours.error}•{colours.text} Username Not Found", "'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in", "print(err) except: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers =", "= cfscrape.create_scraper() scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token", "resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"]", "= checkUsername(target) if check == False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest =", "'html.parser') authenticity_token = soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier': target} cookies = req.cookies", "'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass", "soup = bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier': target}", "in domain_file: domain = domain.rstrip() if provider[0] == domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]):", "followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created", "int(info[0].text) email = str(info[1].text) except: email = str(info[0].text) phone = 'None' except Exception", "from fake_headers import Headers from urllib3.util.ssl_ import create_urllib3_context from util.core import * from", "*args, ssl_context=ctx, **kwargs ) try: url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True)", "data = {'authenticity_token': authenticity_token, 'account_identifier': target} cookies = req.cookies response = scraper.post(url, cookies=cookies,", "'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0;", "try: if \"Not found\" == resp[\"errors\"][0][\"message\"]: return False else: return False except: return", "import * from util.scraper.core import * def banner(): if os.name == 'nt': _", "\"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors',", "target = input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if check == False: exit('Invalid Handle')", "\"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\",", "Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"]", "6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes',", "= int(input(f'{returnColor(\">\")} ')) if choice == 1: target = input(f'Username: {returnColor(\"@\")}') check =", "Username Not Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location =", "info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text) email", "from requests.adapters import HTTPAdapter from fake_headers import Headers from urllib3.util.ssl_ import create_urllib3_context from", "'html.parser') try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try again later.' ):", "if checkEmail(target) == True: print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No account with this", "= resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id =", "import create_urllib3_context from util.core import * from util.emails.core import * from util.scraper.core import", "Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if choice == 2: target =", "') if checkEmail(target) == True: print(f'Email has been {returnColor(\"Taken\")}') else: print(f'No account with", "target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\":", "checkUsername(target) if check == False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target)", "soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass", "req = scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data =", "Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\"", "authenticity_token, 'account_identifier': target} cookies = req.cookies response = scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2", "= [] domain_file = open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for domain in domain_file:", "checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip,", "str(info[0].text) phone = 'None' except Exception as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None)", "{returnColor(check)}') twitterRequest = scraper(target) if choice == 2: target = input(f'Email Address: ')", "Found On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') bio", "create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try: url = 'https://twitter.com/account/begin_password_reset' header =", "\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate,", "'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus", "f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization':", "scraper.mount('https://', CustomAdapter()) req = scraper.get(url, headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token = soup.input.get('value')", "def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")}", "location = 'Unknown' if bio == '': bio = 'Unknown' class CustomAdapter(HTTPAdapter): def", "print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif Avatar", "= resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location = 'Unknown' if bio == '':", "email = str(info[1].text) except: email = str(info[0].text) phone = 'None' except Exception as", "== 4: gifScraper() def brutedomain(email, chars) -> str: guesses = [] domain_file =", "'': location = 'Unknown' if bio == '': bio = 'Unknown' class CustomAdapter(HTTPAdapter):", "scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser') try: if ( soup2.find('div', attrs={'class':", "__| |_ ___ __| |___ _ {colours.main} z{colours.darktext} | || (__| ' \\/", "_ = os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___ _", "= str(info[1].text) except: email = str(info[0].text) phone = 'None' except Exception as e:", "bs from requests.adapters import HTTPAdapter from fake_headers import Headers from urllib3.util.ssl_ import create_urllib3_context", "later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except:", "import * def banner(): if os.name == 'nt': _ = os.system('cls') else: _", "= 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager(", "(__| ' \\/ -_) _| / / '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_|", "try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try: err = resp[\"errors\"][0][\"message\"] if \"Not", "guesses = [] domain_file = open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for domain in", "== False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if choice ==", "if resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try: err = resp[\"errors\"][0][\"message\"] if \"Not found\"", "/ / '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu():", "= json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try: err =", "Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")}", "On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name =", "|_ ___ __| |___ _ {colours.main} z{colours.darktext} | || (__| ' \\/ -_)", "{colours.darktext} _____ ___ _ _ {colours.main} {colours.darktext} |_ _/ __| |_ ___ __|", "Scraper ''') choice = int(input(f'{returnColor(\">\")} ')) if choice == 1: target = input(f'Username:", "json.loads(requests.get(url, headers=headers).text) try: if resp[\"data\"][\"user\"][\"id\"] in resp: pass except: try: err = resp[\"errors\"][0][\"message\"]", "headers=header.generate()) soup2 = bs(response.text, 'html.parser') try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please", "domain = domain.rstrip() if provider[0] == domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible", "location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if", "import cfscrape, requests, json, os, sys, threading from bs4 import BeautifulSoup as bs", "def banner(): if os.name == 'nt': _ = os.system('cls') else: _ = os.system('clear')", "response = scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser') try: if (", "json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\" == resp[\"errors\"][0][\"message\"]: return", "open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for domain in domain_file: domain = domain.rstrip() if", "3: verifyScraper() if choice == 4: gifScraper() def brutedomain(email, chars) -> str: guesses", "url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\":", "def init_poolmanager(self, *args, **kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs )", "== '': location = 'Unknown' if bio == '': bio = 'Unknown' class", "'is-errored'}).text == 'Please try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info", "threading from bs4 import BeautifulSoup as bs from requests.adapters import HTTPAdapter from fake_headers", "cookies = req.cookies response = scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser')", "= input(f'Email Address: ') if checkEmail(target) == True: print(f'Email has been {returnColor(\"Taken\")}') else:", "try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text)", "in resp: pass except: try: err = resp[\"errors\"][0][\"message\"] if \"Not found\" == err:", "{ \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\":", "-> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate,", "{colours.main} {colours.darktext} |_ _/ __| |_ ___ __| |___ _ {colours.main} z{colours.darktext} |", "{returnColor(\"Taken\")}') else: print(f'No account with this email') if choice == 3: verifyScraper() if", "str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\",", "\\/ -_) _| / / '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext}", "\"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode':", "= resp[\"errors\"][0][\"message\"] if \"Not found\" == err: print(f'{colours.error}•{colours.text} Username Not Found On Twitter')", "headers=header.generate()) soup = bs(req.text, 'html.parser') authenticity_token = soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier':", "{returnColor(\"@\")}') check = checkUsername(target) if check == False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}')", "'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site':", "if choice == 4: gifScraper() def brutedomain(email, chars) -> str: guesses = []", "def scraper(target: str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers", "try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\" == resp[\"errors\"][0][\"message\"]: return False else:", "|| (__| ' \\/ -_) _| / / '_| {colours.main} ᓚᘏᗢ {colours.darktext} |_|", "email = brutedomain(email, None) return [name, email, phone] def checkUsername(username) -> str: url", "= req.cookies response = scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser') try:", "except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text) email = str(info[1].text) except: email =", "\\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")}", "Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")} ')) if choice == 1: target =", "for domain in domain_file: domain = domain.rstrip() if provider[0] == domain[0]: if len(provider.split('.')[0])", "'Unknown' if bio == '': bio = 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args,", "= resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location", "bs(response.text, 'html.parser') try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try again later.'", "_ {colours.main} {colours.darktext} |_ _/ __| |_ ___ __| |___ _ {colours.main} z{colours.darktext}", "headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>',", "from util.core import * from util.emails.core import * from util.scraper.core import * def", "'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile", "Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language':", "ssl_context=ctx, **kwargs ) try: url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True) scraper", "if ( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}')", "* from util.scraper.core import * def banner(): if os.name == 'nt': _ =", "{colours.main} z{colours.darktext} | || (__| ' \\/ -_) _| / / '_| {colours.main}", "str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip, deflate, br\",", "= os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___ _ _", "verifyScraper() if choice == 4: gifScraper() def brutedomain(email, chars) -> str: guesses =", "banner(): if os.name == 'nt': _ = os.system('cls') else: _ = os.system('clear') print", "= brutedomain(email, None) return [name, email, phone] def checkUsername(username) -> str: url =", "___ _ _ {colours.main} {colours.darktext} |_ _/ __| |_ ___ __| |___ _", "print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target: str) -> str: url =", "**kwargs): ctx = create_urllib3_context() super(CustomAdapter, self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try: url =", "util.scraper.core import * def banner(): if os.name == 'nt': _ = os.system('cls') else:", "br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest':", "== domain[0]: if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1]", "= \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\": \"gzip,", "= soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text) email =", "def brutedomain(email, chars) -> str: guesses = [] domain_file = open('files/emails.txt', 'r').readlines() provider", "} resp = json.loads(requests.get(url, headers=headers).text) try: return resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] except: try: if \"Not found\"", "_ {colours.main} z{colours.darktext} | || (__| ' \\/ -_) _| / / '_|", "{colours.darktext} |_| \\___|_||_\\___\\__|_\\_\\_| {colours.sencondary}0.1{colours.text} {colours.darktext}▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬{colours.darktext} ''') def menu(): print(f'''{returnColor(\"[1]\")} Target User {returnColor(\"[2]\")} Target", "self).init_poolmanager( *args, ssl_context=ctx, **kwargs ) try: url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win',", "<gh_stars>10-100 import cfscrape, requests, json, os, sys, threading from bs4 import BeautifulSoup as", "from util.emails.core import * from util.scraper.core import * def banner(): if os.name ==", "\"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0", "resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location = 'Unknown' if bio", "[] domain_file = open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for domain in domain_file: domain", "* def banner(): if os.name == 'nt': _ = os.system('cls') else: _ =", "On Twitter') else: print(err) except: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') bio =", "== 1: target = input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if check == False:", "soup2 = bs(response.text, 'html.parser') try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text == 'Please try", "if len(provider.split('.')[0]) == len(domain.split('.')[0]): guesses.append(email.split('@')[0]+\"@\"+domain) print(f'Possible Domain - {email.split(\"@\")[0]}@{domain}') return guesses[-1] def scraper(target:", "req.cookies response = scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser') try: if", "phone] def checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\",", "\"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux;", "except: print(f'{colours.error}•{colours.text} Username Not Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"]", "{returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''') choice = int(input(f'{returnColor(\">\")} '))", "bio == '': bio = 'Unknown' class CustomAdapter(HTTPAdapter): def init_poolmanager(self, *args, **kwargs): ctx", "str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = {", "= resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location", "HTTPAdapter from fake_headers import Headers from urllib3.util.ssl_ import create_urllib3_context from util.core import *", "pass except: try: err = resp[\"errors\"][0][\"message\"] if \"Not found\" == err: print(f'{colours.error}•{colours.text} Username", "Limit{colours.text}') except: pass try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try:", "'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site':", "input(f'Username: {returnColor(\"@\")}') check = checkUsername(target) if check == False: exit('Invalid Handle') else: print(f'Registed:", "None) return [name, email, phone] def checkUsername(username) -> str: url = f\"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22{username}%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers", "\"en-US,en;q=0.9,bn;q=0.8\", 'authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode':", "exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest = scraper(target) if choice == 2: target", "target = input(f'Email Address: ') if checkEmail(target) == True: print(f'Email has been {returnColor(\"Taken\")}')", "err = resp[\"errors\"][0][\"message\"] if \"Not found\" == err: print(f'{colours.error}•{colours.text} Username Not Found On", "-> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\":", "check = checkUsername(target) if check == False: exit('Invalid Handle') else: print(f'Registed: {returnColor(check)}') twitterRequest", "= resp[\"data\"][\"user\"][\"id\"] created = resp[\"data\"][\"user\"][\"legacy\"][\"created_at\"] if location == '': location = 'Unknown' if", "as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return [name, email, phone] def checkUsername(username)", "Found On Twitter') bio = resp[\"data\"][\"user\"][\"legacy\"][\"description\"] followers = resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name", "(f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext} {colours.main}z {colours.darktext} _____ ___ _ _ {colours.main} {colours.darktext} |_", "soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier': target} cookies = req.cookies response = scraper.post(url,", "url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\" headers = { \"accept\": \"*/*\", \"accept-encoding\":", "Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' }", "Target User {returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper", "Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try: if", "= soup.input.get('value') data = {'authenticity_token': authenticity_token, 'account_identifier': target} cookies = req.cookies response =", "email.split('@')[1] for domain in domain_file: domain = domain.rstrip() if provider[0] == domain[0]: if", "Gecko) Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text)", "'account_identifier': target} cookies = req.cookies response = scraper.post(url, cookies=cookies, data=data, headers=header.generate()) soup2 =", "except Exception as e: exit(f'{colours.error}{e}{colours.text}') email = brutedomain(email, None) return [name, email, phone]", "{returnColor(\"[2]\")} Target Email {returnColor(\"[3]\")} Verified User Scraper {returnColor(\"[4]\")} Gif Avatar Scraper ''') choice", "guesses[-1] def scraper(target: str) -> str: url = \"https://api.twitter.com/graphql/P8ph10GzBbdMqWZxulqCfA/UserByScreenName?variables=%7B%22screen_name%22%3A%22\" + target + \"%22%2C%22withHighlightedLabel%22%3Atrue%7D\"", "___ __| |___ _ {colours.main} z{colours.darktext} | || (__| ' \\/ -_) _|", "'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N)", "exit(f'{colours.error}Rate Limit{colours.text}') try: phone = int(info[0].text) email = str(info[1].text) except: email = str(info[0].text)", "\"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'same-site', 'user-agent':", "): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate", "= resp[\"data\"][\"user\"][\"legacy\"][\"followers_count\"] location = resp[\"data\"][\"user\"][\"legacy\"][\"location\"] name = resp[\"data\"][\"user\"][\"legacy\"][\"name\"] Id = resp[\"data\"][\"user\"][\"id\"] created =", "if os.name == 'nt': _ = os.system('cls') else: _ = os.system('clear') print (f'''", "\"accept-encoding\": \"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin':", "== 'Please try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info =", "domain_file = open('files/emails.txt', 'r').readlines() provider = email.split('@')[1] for domain in domain_file: domain =", "'nt': _ = os.system('cls') else: _ = os.system('clear') print (f''' {colours.text}{colours.darktext} {colours.main}z {colours.darktext}", "try: url = 'https://twitter.com/account/begin_password_reset' header = Headers(browser='chrome', os='win', headers=True) scraper = cfscrape.create_scraper() scraper.mount('https://',", "= {'authenticity_token': authenticity_token, 'account_identifier': target} cookies = req.cookies response = scraper.post(url, cookies=cookies, data=data,", "try again later.' ): exit(f'{colours.error}Rate Limit{colours.text}') except: pass try: info = soup2.find('ul', attrs={'class':", "Chrome/80.0.3987.132 Mobile Safari/537.36', 'x-twitter-active-user': 'yes', 'x-twitter-client-language': 'en' } resp = json.loads(requests.get(url, headers=headers).text) try:", "provider = email.split('@')[1] for domain in domain_file: domain = domain.rstrip() if provider[0] ==", "except: pass try: info = soup2.find('ul', attrs={'class': 'Form-radioList'}).findAll('strong') except: exit(f'{colours.error}Rate Limit{colours.text}') try: phone", "\"gzip, deflate, br\", \"accept-language\": \"en-US,en;q=0.9,bn;q=0.8\", 'authorization': '<KEY>', \"content-type\": \"application/json\", \"dnt\": \"1\", 'origin': 'https://twitter.com',", "data=data, headers=header.generate()) soup2 = bs(response.text, 'html.parser') try: if ( soup2.find('div', attrs={'class': 'is-errored'}).text ==" ]
[ "(isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise TypeError('The first argument is not a string.\\n'", "(target_filename + '.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict", "is not a string.\\n' + usage) target_filename = sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files", "target_filename = sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename + '.json'", "source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in existing_ucc_reoptimized_files): print('The", "import optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR =", "<reponame>cgyurik/qml_for_qchem<gh_stars>0 import os import sys import json # pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(", "<target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not 3: raise Exception(usage) if not (isinstance(sys.argv[1],", "isinstance(sys.argv[1], str)): raise TypeError('The first argument is not a string.\\n' + usage) target_filename", "pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage = ('Usage: python {} <target_filename>", "= load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data to file.')", "convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR", "sys import json # pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc", "with open(UCC_REOPT_DIR + target_filename + '.json', 'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k,", "data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict =", "init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data to file.') with open(UCC_REOPT_DIR", "= sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in", "is not 3: raise Exception(usage) if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise", "as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k, v) for k, v in target_ucc_dict.items()), sep='\\n')", "import sys import json # pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from", "+ '.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict =", "file.') with open(UCC_REOPT_DIR + target_filename + '.json', 'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array)", "<init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not 3: raise Exception(usage) if not (isinstance(sys.argv[1], str)", "+ target_filename + '.json', 'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k, v) for", "target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data to file.') with open(UCC_REOPT_DIR + target_filename +", "('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not 3: raise Exception(usage)", "not a string.\\n' + usage) target_filename = sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files =", "usage = ('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not 3:", "os import sys import json # pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR)", "disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import", "raise Exception(usage) if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise TypeError('The first argument", "= sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in existing_ucc_reoptimized_files): print('The file", "exists already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename,", "TypeError('The first argument is not a string.\\n' + usage) target_filename = sys.argv[1] source_filename", "argument is not a string.\\n' + usage) target_filename = sys.argv[1] source_filename = sys.argv[2]", "usage) target_filename = sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename +", "+ '.json', 'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k, v) for k, v", "import json # pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import", "encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage = ('Usage: python", "init_params) print('saving data to file.') with open(UCC_REOPT_DIR + target_filename + '.json', 'wt') as", "load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage =", "print('saving data to file.') with open(UCC_REOPT_DIR + target_filename + '.json', 'wt') as f:", "DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage = ('Usage:", "file data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict", "Exception(usage) if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise TypeError('The first argument is", "already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params)", "'.json', 'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k, v) for k, v in", "raise TypeError('The first argument is not a string.\\n' + usage) target_filename = sys.argv[1]", "{} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not 3: raise Exception(usage) if not", "python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not 3: raise Exception(usage) if", "string.\\n' + usage) target_filename = sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if", "pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils", "'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k, v) for k, v in target_ucc_dict.items()),", "not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise TypeError('The first argument is not a", "load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data to file.') with", "not 3: raise Exception(usage) if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise TypeError('The", "str)): raise TypeError('The first argument is not a string.\\n' + usage) target_filename =", "sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json", "DATA_DIR + 'ucc_reoptimized/' usage = ('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv)", "enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage = ('Usage: python {} <target_filename> <init_params_filename>'", "optimize_ucc(target_filename, init_params) print('saving data to file.') with open(UCC_REOPT_DIR + target_filename + '.json', 'wt')", "+ usage) target_filename = sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename", "from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position", "+ 'ucc_reoptimized/' usage = ('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is", "from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR +", "import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage", "target_filename + '.json', 'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k, v) for k,", "print('The file data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params']", "os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array #", "convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/'", "data to file.') with open(UCC_REOPT_DIR + target_filename + '.json', 'wt') as f: json.dump(target_ucc_dict,", "Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving", "open(UCC_REOPT_DIR + target_filename + '.json', 'wt') as f: json.dump(target_ucc_dict, f, default=encode_complex_and_array) print(*((k, v)", "os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already. Exiting.')", "if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise TypeError('The first argument is not", "and isinstance(sys.argv[1], str)): raise TypeError('The first argument is not a string.\\n' + usage)", "'.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename)", "= DATA_DIR + 'ucc_reoptimized/' usage = ('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if", "sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint:", "first argument is not a string.\\n' + usage) target_filename = sys.argv[1] source_filename =", "= os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already.", "to file.') with open(UCC_REOPT_DIR + target_filename + '.json', 'wt') as f: json.dump(target_ucc_dict, f,", "exit() source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data", "UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage = ('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0]))", "existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params =", "if len(sys.argv) is not 3: raise Exception(usage) if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1],", "source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data to file.') with open(UCC_REOPT_DIR + target_filename", "optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array # pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR", "source_ucc_dict = load_ucc_data(source_filename) init_params = source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data to", "# pylint: enable=wrong-import-position UCC_REOPT_DIR = DATA_DIR + 'ucc_reoptimized/' usage = ('Usage: python {}", "= optimize_ucc(target_filename, init_params) print('saving data to file.') with open(UCC_REOPT_DIR + target_filename + '.json',", "str) and isinstance(sys.argv[1], str)): raise TypeError('The first argument is not a string.\\n' +", "# pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from", "len(sys.argv) is not 3: raise Exception(usage) if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)):", "sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in existing_ucc_reoptimized_files):", "3: raise Exception(usage) if not (isinstance(sys.argv[1], str) and isinstance(sys.argv[1], str)): raise TypeError('The first", "os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR, encode_complex_and_array", "json # pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc", "a string.\\n' + usage) target_filename = sys.argv[1] source_filename = sys.argv[2] existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR)", "'ucc_reoptimized/' usage = ('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not", ".format(sys.argv[0])) if len(sys.argv) is not 3: raise Exception(usage) if not (isinstance(sys.argv[1], str) and", "= ('Usage: python {} <target_filename> <init_params_filename>' .format(sys.argv[0])) if len(sys.argv) is not 3: raise", "= source_ucc_dict['params'] target_ucc_dict = optimize_ucc(target_filename, init_params) print('saving data to file.') with open(UCC_REOPT_DIR +", "= os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import load_ucc_data, DATA_DIR,", "if (target_filename + '.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already. Exiting.') exit()", "in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists already. Exiting.') exit() source_ucc_dict = load_ucc_data(source_filename) init_params", "ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) sys.path.append(ROOT_DIR) from convoQC.scripts.optimize_ucc import optimize_ucc from convoQC.utils import load_ucc_data,", "existing_ucc_reoptimized_files = os.listdir(UCC_REOPT_DIR) if (target_filename + '.json' in existing_ucc_reoptimized_files): print('The file data/ucc_reoptimized/{}.json exists", "import os import sys import json # pylint: disable=wrong-import-position ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__))))" ]
[]
[]
[ "in range(0, len(Data.keys())): if len(Data.keys()) == 1: aux = list(Data.keys())[l] rowNAME = str(aux)", "+ str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' \\ + str(float(C[o, [int(p", "else: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ',' +", "the same lenght. So no printing.') print('Variables to print metadata.') print('The number of", "is int: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is np.ndarray: try:", "row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m", "str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) +'}') else:", "\\ ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ ( ' +", "in database dictionary.') Data[str(varname)] = vardata # Searching the biggest vector in Data", "aux2 = 'The very first time importing database is compulsury. This can be", "str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' + str(float(C[o, [int(p + 2)]]))", "########################################################################################## ########################################################################################## # Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath):", "import stats import sys import time from termcolor import colored, cprint import subprocess", "1)]] = B[:, [2]] with open(ppath + 'preview_TeX' + '.tex', 'w') as aux:", "= aux1 + 'Var[' + str(l+1) + '] - Symbol: ' + var[l][\"sym\"]", "'{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') ########################################################################################## def SetUp(): print('Setting Current", "Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o in range(0, f): if o ==", "str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if m == 0: if l <= len((Data[list(Data.keys())[m]]['dat']))", "10^{\\\\infty} $ ') else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) +", "ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX table.", "import numpy as np import csv import re import os import ast import", "',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN", "'\\n' aux1 = aux1 + 'Var[' + str(l+1) + '] - Units :", "',' + ' ' + '\\n' rowSYUN = rowSYUN + ',' + \\", "aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2)", "for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 +", "= list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym'])", "= MAT.shape C = np.zeros([f, int((3 / 2) * (c))], ) for l", "CAP, SYM, UNI, ppath): f, c = MAT.shape if c==4: with open(ppath +", "SYM[0] + '(' + UNI[0] + ')\", \"' + SYM[1] + '(' +", "ux / (10 ** n)), axis=1) Y = np.concatenate((n, np.around(10 * Y) /", "+ '(' + UNI[1] + ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else: print(' Exporter", "( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' \\ +", "math import sympy as sp import scipy as sc from scipy import stats", "' \\\\ \\\\mathrm{(' + UNI[n] + ')} $ & ') # All rows", "',' + str(aux) + ',' + ' ' rowSYUN = rowSYUN + ','", "'' + ',' + \\ '' print(' Variable data and uncertainty are stored.')", "len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else:", "database + '.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data and uncertainty", "= np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)): if row[2*m+0] !=", "PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C = np.zeros([f, int((3", "try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same lenght. So no", "Y) / 10), axis=1) # Correction if exact decimal in round. f, c", "+ 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) + '},\\n') aux.write('};\\n\\n') #", "= x[:, np.newaxis] ux = ux[:, np.newaxis] n = NumPow(ux) Y = np.concatenate((x", "range(0, len(q)): row = q[l]; nam = q[0] if l == 0: for", "is compulsury. This can be done as:' aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1", "l - 2 - aux[m - 1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l -", "standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------' sectspa = ' ' ########################################################################################## ########################################################################################## #", "open(ppath + database + '.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data", "aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux')", "__init__(self): self.ids = set() def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor()", "subprocess from subprocess import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations", "1)]] = B[:, [2]] with open(ppath + 'export_TeX' + '.txt', 'a') as aux:", "= aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat'])", "NumPow(X): Y = np.around(np.log10(abs(X))); Y = Y - (10 ** Y > abs(X));", "l - 2 - aux[m - 1]) aux[m - 1] = aux[m -", "x 10[' + str(aux1[0]) + ']') except: aux1 = UncRound(x, ux); aux1 =", "range(0, int(3 * c / 2 - 2), 3): if p == int(3", "+ UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ ' + SYM[n]", "selected data to Wolfram: bad selection.') ########################################################################################## ########################################################################################## # Load ALL variables in", "- 3): if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p", "+ 'preview_TeX.tex') #os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm", "> abs(X)); return Y def UncRound(x, ux): if type(x) is float: x =", "aux1 + 'Var[' + str(l+1) + '] - Units : ' + var[l][\"uni\"]", "(implicit_multiplication,) separ = '--------------------------------------------' sectspa = ' ' ########################################################################################## ########################################################################################## # Variable extractor", "= aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') ########################################################################################## def", "range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except: print('Status Failure.')", "else: if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p +", "+ \\ '' print(' Variable data and uncertainty are stored.') ########################################################################################## ########################################################################################## #", "- aux[m - 1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2] = \\", "+ 2)]])) +'}') else: aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' +", "+ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ ') else:", "+ \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN =", "for l in range(1, c, 2): B = UncRound(MAT[:, [l - 1]], MAT[:,", "print(aux3 + '\\n') except: print('Status Failure.') def disp(var): try: aux1 = ''; aux2", "axis=0) return Y def UncPrint(x, ux): try: aux1 = UncRound(x, ux) print(' '", "' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $ &", "subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8')) def read_from_clipboard(): return subprocess.check_output( 'pbpaste', env={'LANG': 'en_US.UTF-8'}).decode('utf-8')", "= np.array([[ux]]) elif type(x) is int: x = np.array([[x]]) ux = np.array([[ux]]) elif", "scipy import stats import sys import time from termcolor import colored, cprint import", "else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 - aux[m - 1])", "' ########################################################################################## ########################################################################################## # Variable extractor class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def", "ProjectMaker variable. ########################################################## def dispu(var): try: aux1 = ''; aux2 = ''; aux3", "else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm '", "',' + \\ '' + ',' + \\ '' + '\\n' aux.write(rowDAUN) else:", "l == 0: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + '", "list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) +", "rowDAUN = rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l]))", "len(var[0]['dat'])): aux4 = '' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15)", "print(aux2); print(aux3) for m in range(0, len(var[0]['dat'])): aux4 = '' for l in", "So no printing.') print('Variables to print metadata.') print('The number of events is '", "+ str(aux1[0]) + ']') except: aux1 = UncRound(x, ux); aux1 = aux1[0] print('", "with open(ppath + 'preview_TeX' + '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n')", "\"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p = 0 for l in range(0, f): if", "+ '} $ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending", "True, FrameLabel -> {\"' + SYM[0] + '(' + UNI[0] + ')\", \"'", ") \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ & ') aux.write('\\\\\\\\ \\\\hline\\n')", "+ \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN +", "2 - aux[m - 1]) aux[m - 1] = aux[m - 1] +", "set() def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids =", "+ str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{' + str(int(C[o, [int(p)]]))", "len(Data) is 0: print(' Nothing to load.') Data = {} else: print(' Database", "== int(3 * c / 2 - 3): if C[o, [int(p)]] == -np.inf:", "np.around(np.log10(abs(X))); Y = Y - (10 ** Y > abs(X)); return Y def", "aux1) # Headers of data table for l in range(0, len(Data.keys())): if len(Data.keys())", "ux[l][0] yaux = np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def", "ux[:, np.newaxis] n = NumPow(ux) Y = np.concatenate((x / (10 ** n), ux", "# Points. aux.write('data={\\n'); p = 0 for l in range(0, f): if l", "np.newaxis] n = NumPow(ux) Y = np.concatenate((x / (10 ** n), ux /", "modified you must refresh Data variable.' aux2 = 'The very first time importing", "+ '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3", "in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] =", "+ 0)]])) + ',' + str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar[' + str(float(MAT[o,", "var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-')", "MAT.shape if c==4: with open(ppath + 'export_WMT' + '.txt', 'a') as aux: aux.write(separ", "rowNAME + ',' + str(aux) + ',' + ' ' + '\\n' rowSYUN", "def TableToTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C = np.zeros([f,", "')} $ & ') # All rows and cols iterative. for o in", "+ database + '.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data and", "rows and cols iterative. for o in range(0, f): for p in range(0,", "+ 2)]])) + ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $", "= LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n' + aux3 + '\\n') def write_to_clipboard(output):", "extractor.ids - set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty functions. ########################################################### def", "== 0: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + ' '", ") for l in range(1, c, 2): B = UncRound(MAT[:, [l - 1]],", "print(' Exporter can not export the selected data to Wolfram: bad selection.') ##########################################################################################", "'\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable", "str(float(MAT[o, [int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final activities: caption and ending", "= vardata # Searching the biggest vector in Data aux1 = 0 for", "UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ ' + SYM[n] +", "& ') else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + '", "<= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l]))", "\\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN + ',' + \\ '' + ','", "+ Data[list(Data.keys())[l]]['uni'] with open(ppath + database + '.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN)", "'\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p = 0 for l in range(0,", "'} $ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment.", "# Store ONE variable in a database. ###################################################### def StoreVar(vardata, varname, ppath, database):", "str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') except: aux1 = UncRound(x,", "packages. #################################################################### import numpy as np import csv import re import os import", "str(int(C[o, [int(p)]])) + '} $ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption", "UncRound(x, ux): if type(x) is float: x = np.array([[x]]) ux = np.array([[ux]]) elif", "and cols iterative. for o in range(0, f): for p in range(0, int(3", "\\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath + database + '.csv', 'w') as aux:", "+ str(int(C[o, [int(p)]])) + '} $ ') else: if C[o, [int(p)]] == -np.inf:", "range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3", "== -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm", "spamreader = csv.reader(csvfile, delimiter=',') q = list(spamreader) print('# Loading ' + str(file) +", "+ 1)]])) + ' \\\\pm ' \\ + str(float(C[o, [int(p + 2)]])) +", "[2]] with open(ppath + 'preview_TeX' + '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}')", "c = MAT.shape if c==4: with open(ppath + 'export_WMT' + '.txt', 'a') as", "f, c = MAT.shape C = np.zeros([f, int((3 / 2) * (c))], )", "for o in range(0, f): for p in range(0, int(3 * c /", "')\", \"' + SYM[1] + '(' + UNI[1] + ')\"},AspectRatio -> 1 /", "refresh Data variable.' aux2 = 'The very first time importing database is compulsury.", "int(3 * c / 2 - 2), 3): if p == int(3 *", "SYM, UNI, ppath): f, c = MAT.shape C = np.zeros([f, int((3 / 2)", "1 / 2) * c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for n in", "if l == int(f - 1): aux.write('{' + str(float(MAT[l, [int(p + 0)]])) +", "= rowDAUN + ',' + \\ '' + ',' + \\ '' print('", "as sc from scipy import stats import sys import time from termcolor import", "class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set() def visit_Name(self,", "len(Data.keys()) - 1: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux)", "'d({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3 =", "str(database) + '.csv'; Data = {} if os.path.isfile(file) is True: with open(file, newline='')", "uncertainty row by row for l in range(0, max(aux1, len(vardata['dat']))): for m in", "' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ ') else: if", "coding: UTF-8 -*- ########################################################################################## # Importing packages. #################################################################### import numpy as np import", "and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm", "',' + str(float(MAT[l, [int(p + 2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p", "C[:, [int((3 / 2) * (l + 1) - 1)]] = B[:, [2]]", "l == len(Data.keys()) - 1: aux = list(Data.keys())[l] rowNAME = rowNAME + ','", "activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL,", "can not export the selected data to Wolfram: bad selection.') ########################################################################################## ########################################################################################## #", "1 print(' Load file success.') if len(Data) is 0: print(' Nothing to load.')", "= ''; aux3 = '' for l in range(0,len(var)): aux1 = aux1 +", "+ ',' + str(aux) + ',' + ' ' + '\\n' rowSYUN =", "str(float(MAT[o, [int(p + 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']},\\n')", "+ ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else:", "Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ##########################################################################################", "sc from scipy import stats import sys import time from termcolor import colored,", "= row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for", "'Var[' + str(l+1) + '] - Symbol: ' + var[l][\"sym\"] + '\\n' aux1", "0 for l in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of", "first time importing database is compulsury. This can be done as:' aux3 =", "print(' ' + str(aux1[1]) + '(' + str(aux1[2]) + ') x 10[' +", "print(aux3); print(aux2); print(aux3) for m in range(0, len(var[0]['dat'])): aux4 = '' for l", "import csv import re import os import ast import math import sympy as", "class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set() def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN):", "+ ',' + \\ '' + ',' + \\ '' + '\\n' aux.write(rowDAUN)", "'\\n' + aux3 + '\\n') def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'},", "[int(p + 3)]])) + ']}') else: aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) +", "exist!') Data = {} return Data ########################################################################################## ########################################################################################## # Store ONE variable in", "1: rowDAUN = rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\", "str(aux) + ',' + ' ' rowSYUN = rowSYUN + ',' + Data[list(Data.keys())[l]]['sym']", "np.array([[ux]]) elif type(x) is np.ndarray: try: x.shape[1] except: x = x[:, np.newaxis] ux", "+ ' ' rowSYUN = rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] + \\ ','", "load.') Data = {} else: print(' Database does not exist!') Data = {}", "+ ',' + str(aux) + ',' + ' ' rowSYUN = rowSYUN +", "= np.zeros([f, int((3 / 2) * (c))], ) for l in range(1, c,", "else: for m in range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else:", "0)]])) + ',' + str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p", "+ str(database) + '.csv'; Data = {} if os.path.isfile(file) is True: with open(file,", "LaTeX document. ######################################################## def TableToTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape", "for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n')", "data and uncertainty row by row for l in range(0, max(aux1, len(vardata['dat']))): for", "!= '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l -", "'\\n' else: rowDAUN = rowDAUN + ',' + \\ '' + ',' +", "axis=1) # Correction if exact decimal in round. f, c = Y.shape for", "else: aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p +", "''; aux2 = ''; aux3 = '' for l in range(0,len(var)): aux1 =", "list(spamreader) print('# Loading ' + str(file) + '.') aux = np.zeros(len(q[0]) - 2,", "\\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units are stored.') elif l == len(Data.keys())", "len(Data.keys()) == 1: # 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' +", "+ ')\", \"' + SYM[1] + '(' + UNI[1] + ')\"},AspectRatio -> 1", "cols iterative. for o in range(0, f): for p in range(0, int(3 *", "+ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if m == 0: if l <=", "float: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is int: x =", "2 - 3): if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o,", "[l]]) C[:, [int((3 / 2) * (l + 1) - 3)]] = B[:,", "########################################################################################## # Variable extractor class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids", "Importing packages. #################################################################### import numpy as np import csv import re import os", "print(' Variable data and uncertainty are stored.') ########################################################################################## ########################################################################################## # Disp a ProjectMaker", "+ '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print('", "2)]])) + ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ &", "\\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ ') else:", "n[l] + 1; xaux = x[l][0]; uxaux = ux[l][0] yaux = np.array([xaux, uxaux])", "' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for m in range(0, len(var[0]['dat'])): aux4", "p = 0 for o in range(0, f): if o == int(f-1): aux.write('{{'", "+ '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same lenght.", "separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p = 0 for l", "aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1 - 1", "',' + str(aux) + ',' + ' ' + '\\n' rowSYUN = rowSYUN", "+ \\ '' + ',' + \\ '' print(' Variable data and uncertainty", "import colored, cprint import subprocess from subprocess import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser", "+ ']}') else: aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o,", "str(float(MAT[o, [int(p + 3)]])) + ']}') else: aux.write('{{' + str(float(MAT[o, [int(p + 0)]]))", "rowNAME = rowNAME + ',' + str(aux) + ',' + ' ' rowSYUN", "',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN = rowDAUN + ',' +", "########################################################## def dispu(var): try: aux1 = ''; aux2 = ''; aux3 = ''", "'} $ ') else: if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' +", "variable in a database. ###################################################### def StoreVar(vardata, varname, ppath, database): Data = LoadVar(ppath,database)", "Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm '", "# 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n'", "caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT)", "+ ',' + ' ' + '\\n' rowSYUN = rowSYUN + ',' +", "rowSYUN = rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni'])", "2) * (l + 1) - 2)]] = B[:, [1]] C[:, [int((3 /", "dictionary.') Data[str(varname)] = vardata # Searching the biggest vector in Data aux1 =", "',' + Data[list(Data.keys())[l]]['uni'] with open(ppath + database + '.csv', 'w') as aux: aux.write(rowNAME)", "Failure.') def disp(var): try: aux1 = ''; aux2 = ''; aux3 = ''", "\\ '' print(' Variable data and uncertainty are stored.') ########################################################################################## ########################################################################################## # Disp", "str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN = rowDAUN + ',' + \\ '' +", "= rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else:", "= x[l][0]; uxaux = ux[l][0] yaux = np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10),", "CAP, SYM, UNI, ppath): f, c = MAT.shape C = np.zeros([f, int((3 /", "does not exist!') Data = {} return Data ########################################################################################## ########################################################################################## # Store ONE", "if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ',' + \\", "aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o, [int(p + 2)]]))", "aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') ##########################################################################################", "if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]]))", "= q[0] if l == 0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {}", "Data[str(row[2*m])] = {} elif l == 1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] =", "'' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 +", "aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3", "',' + str(float(MAT[o, [int(p + 3)]])) + ']}') else: aux.write('{{' + str(float(MAT[o, [int(p", "aux[m - 1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1])", "aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0,", "# Disp a ProjectMaker variable. ########################################################## def dispu(var): try: aux1 = ''; aux2", "- 1]) aux[m - 1] = aux[m - 1] + 1 print(' Load", "'.') aux = np.zeros(len(q[0]) - 2, ) for l in range(0, len(q)): row", "\\ '' + '\\n' aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN", "import subprocess from subprocess import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication)", "' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ ')", "open(ppath + 'export_WMT' + '.txt', 'a') as aux: aux.write(separ + 'x' + separ", "'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n' + aux3 + '\\n') def", "aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for n in range(0, int(c / 2)): if", "',' + ' ' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\", "= rowNAME + ',' + str(aux) + ',' + ' ' + '\\n'", "m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l == 1: for m in", "+ str(int(C[o, [int(p)]])) + '} $ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities:", "+ CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT,", "2)]])) + ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ ')", "+ '} $ ') else: if C[o, [int(p)]] == -np.inf: aux.write('$ ( '", "ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True, FrameLabel -> {\"' + SYM[0] + '('", "Y def UncPrint(x, ux): try: aux1 = UncRound(x, ux) print(' ' + str(aux1[1])", "ast import math import sympy as sp import scipy as sc from scipy", "1)]])) + ' \\\\pm ' + str(float(C[o, [int(p + 2)]])) + ' )", "uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def UncPrint(x, ux): try: aux1", "',' + \\ '' + '\\n' aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat'])) -", "+ ',' + str(float(MAT[o, [int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final activities:", "Disp a ProjectMaker variable. ########################################################## def dispu(var): try: aux1 = ''; aux2 =", "UncRound(x, ux); aux1 = aux1[0] print(' ' + str(aux1[1]) + '(' + str(aux1[2])", "SYM[1] + '(' + UNI[1] + ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else: print('", "+ 1) - 2)]] = B[:, [1]] C[:, [int((3 / 2) * (l", "IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set() def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor", "preview_TeX.aux') os.system('rm ' + ppath + 'preview_TeX.tex') #os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/'", "in range(0, len(var[0]['dat'])): aux4 = '' for l in range(0,len(var)): aux4 = aux4", "data to Wolfram: bad selection.') ########################################################################################## ########################################################################################## # Load ALL variables in a", "'\\n' else: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux) +", "2) * (c))], ) for l in range(1, c, 2): B = UncRound(MAT[:,", "== int(f-1): aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o, [int(p", "Data aux1 = 0 for l in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1)", "',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units are stored.') elif l", "f): if l == int(f - 1): aux.write('{' + str(float(MAT[l, [int(p + 0)]]))", "\\\\hline\\n') # Headings. for n in range(0, int(c / 2)): if n ==", "0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l == 1: for", "',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN + ',' + \\ ''", "'The very first time importing database is compulsury. This can be done as:'", "= np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def UncPrint(x, ux):", "{\"' + SYM[0] + '(' + UNI[0] + ')\", \"' + SYM[1] +", "Data ########################################################################################## ########################################################################################## # Store ONE variable in a database. ###################################################### def StoreVar(vardata,", "aux = np.zeros(len(q[0]) - 2, ) for l in range(0, len(q)): row =", "+ SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline", "[int(p + 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) + '},\\n') aux.write('};\\n\\n')", "in range(0, f): if Y[l][2] == 10: naux = n[l] + 1; xaux", "is True: with open(file, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') q =", "np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0])", "ppath): f, c = MAT.shape C = np.zeros([f, int((3 / 2) * (c))],", "aux.write(rowNAME) aux.write(rowSYUN) # Writing data and uncertainty row by row for l in", "',' + str(float(MAT[l, [int(p + 2)]])) +'}') else: aux.write('{' + str(float(MAT[l, [int(p +", "- aux[m - 1]) aux[m - 1] = aux[m - 1] + 1", "import math import sympy as sp import scipy as sc from scipy import", "str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ ') else:", "{} if os.path.isfile(file) is True: with open(file, newline='') as csvfile: spamreader = csv.reader(csvfile,", "1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc']", "q[l]; nam = q[0] if l == 0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])]", "var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else:", "# Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm", "Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'],", "= list(spamreader) print('# Loading ' + str(file) + '.') aux = np.zeros(len(q[0]) -", "aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data table for l in range(0,", "aux.write('\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2) * c)): aux.write('c|')", "aux1 = UncRound(x, ux) print(' ' + str(aux1[1]) + '(' + str(aux1[2]) +", "aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $", "ux) print(' ' + str(aux1[1]) + '(' + str(aux1[2]) + ') x 10['", "+ var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 +", "Data = LoadVar(ppath,database) print(' Creating label in database dictionary.') Data[str(varname)] = vardata #", "############################################################################### def WolframEx(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape if c==4:", "10[' + str(aux1[0]) + ']') ########################################################################################## ########################################################################################## # Export table to LaTeX document.", "* (c))], ) for l in range(1, c, 2): B = UncRound(MAT[:, [l", "'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' + ppath + 'preview_TeX.tex')", "if len(Data.keys()) == 1: # 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ','", "aux3 = aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not", "file = ppath + str(database) + '.csv'; Data = {} if os.path.isfile(file) is", "print(' Variable symbols and units are stored.') elif l == len(Data.keys()) - 1:", "m in range(0, int((1 - 1 / 2) * c)): aux.write('c|') aux.write('} \\\\hline\\n')", "' + ppath + 'preview_TeX.tex') #os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf')", "aux[m - 1] + 1 print(' Load file success.') if len(Data) is 0:", "Failure.') ########################################################################################## def SetUp(): print('Setting Current Path is needed. Set it writting:') print('CURRENTPATH", "l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\", "# Variable extractor class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids =", "+ UNI[1] + ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else: print(' Exporter can not", "1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN)", "= list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' + '\\n' rowSYUN", "os.system('rm ' + ppath + 'preview_TeX.tex') #os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' +", "<= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) +", "/ GoldenRatio]\\n\\n') else: print(' Exporter can not export the selected data to Wolfram:", "\\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2) * c)):", "+ ',' + str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p +", "except: print('Status Failure.') ########################################################################################## def SetUp(): print('Setting Current Path is needed. Set it", "') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP", "########################################################################################## # Store ONE variable in a database. ###################################################### def StoreVar(vardata, varname, ppath,", "if p == int(3 * c / 2 - 3): if C[o, [int(p)]]", "open(file, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') q = list(spamreader) print('# Loading", "'': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2", "be done as:' aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n'", "in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except: print('Status", "+ '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') def disp(var): try: aux1", "[int(p + 2)]])) + ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '}", "ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' + ppath", "\\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath", "[int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final activities: caption and ending enviroment.", "ppath): f, c = MAT.shape if c==4: with open(ppath + 'export_WMT' + '.txt',", "f): if o == int(f-1): aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ','", "rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n'", "aux1 = ''; aux2 = ''; aux3 = '' for l in range(0,len(var)):", "\\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ & ')", "1) - 1)]] = B[:, [2]] with open(ppath + 'preview_TeX' + '.tex', 'w')", "########################################################### def NumPow(X): Y = np.around(np.log10(abs(X))); Y = Y - (10 ** Y", "- 1: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] +", "extractor class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set() def", "'\\n') def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8')) def read_from_clipboard():", "\\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux = list(Data.keys())[l]", "os.path.isfile(file) is True: with open(file, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') q", "#################################################################### import numpy as np import csv import re import os import ast", "if n == c / 2 - 1: aux.write('$ ' + SYM[n] +", "= np.around(np.log10(abs(X))); Y = Y - (10 ** Y > abs(X)); return Y", "+ 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ (", "']') except: aux1 = UncRound(x, ux); aux1 = aux1[0] print(' ' + str(aux1[1])", "+ '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ###############################################################################", "aux2 = ''; aux3 = '' for l in range(0,len(var)): aux1 = aux1", "print(' Creating label in database dictionary.') Data[str(varname)] = vardata # Searching the biggest", "[1]] C[:, [int((3 / 2) * (l + 1) - 1)]] = B[:,", "rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s)", "print(' Load file success.') if len(Data) is 0: print(' Nothing to load.') Data", "l in range(0, f): if l == int(f - 1): aux.write('{' + str(float(MAT[l,", "your database is modified you must refresh Data variable.' aux2 = 'The very", "compulsury. This can be done as:' aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1 +", "rowDAUN = rowDAUN + ',' + \\ '' + ',' + \\ ''", "aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' \\", "2)]])) + ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( ' +", "= set() def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids", "########################################################################################## # Export table to LaTeX document. ######################################################## def TableToTeX(MAT, CAP, SYM, UNI,", "Round uncertainty functions. ########################################################### def NumPow(X): Y = np.around(np.log10(abs(X))); Y = Y -", "WolframEx(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape if c==4: with open(ppath", "3)]])) + ']}') else: aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' +", "file success.') if len(Data) is 0: print(' Nothing to load.') Data = {}", "' ' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) +", "(l + 1) - 2)]] = B[:, [1]] C[:, [int((3 / 2) *", "+ ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else: print(' Exporter can not export the", "check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,) separ =", "+ str(aux) + ',' + ' ' + '\\n' rowSYUN = rowSYUN +", "(10 ** Y > abs(X)); return Y def UncRound(x, ux): if type(x) is", "+ '(' + str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') ##########################################################################################", "(l + 1) - 1)]] = B[:, [2]] with open(ppath + 'export_TeX' +", "' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{' + str(int(C[o,", "else: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')}", "rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\", "Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)): if row[2*m+0]", "aux = list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux) + ',' +", "aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' +", "= \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ ''", "== len(Data.keys()) - 1: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN", "'.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0, int((1 -", "########################################################################################## ########################################################################################## # Disp a ProjectMaker variable. ########################################################## def dispu(var): try: aux1 =", "+ str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' + str(float(C[o, [int(p +", "[int(p)]])) + '} $ ') else: if C[o, [int(p)]] == -np.inf: aux.write('$ (", "# Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ##########################################################################################", "c, 2): B = UncRound(MAT[:, [l - 1]], MAT[:, [l]]) C[:, [int((3 /", "2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ (", "** n)), axis=1) Y = np.concatenate((n, np.around(10 * Y) / 10), axis=1) #", "aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2) * c)): aux.write('c|')", "= np.zeros(len(q[0]) - 2, ) for l in range(0, len(q)): row = q[l];", "+ 2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p + 1)]])) + ',' + str(float(MAT[o,", "len(q)): row = q[l]; nam = q[0] if l == 0: for m", "print(' Nothing to load.') Data = {} else: print(' Database does not exist!')", "- 3)]] = B[:, [0]] C[:, [int((3 / 2) * (l + 1)", "+ str(float(MAT[o, [int(p + 3)]])) + ']}') else: aux.write('{{' + str(float(MAT[o, [int(p +", "+ '\\n' aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN", "int((3 / 2) * (c))], ) for l in range(1, c, 2): B", "+ \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s) are stored.') else: if l", "aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3", "if m == 0: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\", "+ str(float(MAT[o, [int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final activities: caption and", "= NumPow(ux) Y = np.concatenate((x / (10 ** n), ux / (10 **", "str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) +", "= aux[m - 1] + 1 print(' Load file success.') if len(Data) is", "IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## # Round", "c / 2 - 2), 3): if p == int(3 * c /", "FrameLabel -> {\"' + SYM[0] + '(' + UNI[0] + ')\", \"' +", "you must refresh Data variable.' aux2 = 'The very first time importing database", "+ '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p = 0 for l in", "# Load ALL variables in a database. ###################################################### def LoadVar(ppath, database): file =", "time from termcolor import colored, cprint import subprocess from subprocess import DEVNULL, STDOUT,", "aux3 = aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not", "aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]]))", "activities: caption and ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL,", "2 - 1: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n]", "+ separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p = 0 for", "ppath + str(database) + '.csv'; Data = {} if os.path.isfile(file) is True: with", "process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8')) def read_from_clipboard(): return subprocess.check_output( 'pbpaste',", "with open(ppath + 'export_WMT' + '.txt', 'a') as aux: aux.write(separ + 'x' +", "l in range(0, len(Data.keys())): if len(Data.keys()) == 1: aux = list(Data.keys())[l] rowNAME =", "extractor.ids = extractor.ids - set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty functions.", "' \\\\pm ' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty}", "p in range(0, int(3 * c / 2 - 2), 3): if p", "uxaux = ux[l][0] yaux = np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return", "print('Variables to print metadata.') print('The number of events is ' + str(len(var[0]['dat']))) print(aux1);", "database): file = ppath + str(database) + '.csv'; Data = {} if os.path.isfile(file)", "= ''; aux2 = ''; aux3 = '' for l in range(0,len(var)): aux1", "str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN + ',' +", "stored.') else: if l == 0: aux = list(Data.keys())[l] rowNAME = str(aux) +", "+ str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) +", "+ 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3", "'{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except: print('Status Failure.')", "2)): if n == c / 2 - 1: aux.write('$ ' + SYM[n]", "in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l == 1: for m in range(0,int(len(row)/2)):", "aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-')", "len(var[0]['dat'])): aux4 = '' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12)", "LL, Frame -> True, FrameLabel -> {\"' + SYM[0] + '(' + UNI[0]", "']') ########################################################################################## ########################################################################################## # Export table to LaTeX document. ######################################################## def TableToTeX(MAT, CAP,", "print(aux4) print(aux3 + '\\n') except: print('Status Failure.') def disp(var): try: aux1 = '';", "stats import sys import time from termcolor import colored, cprint import subprocess from", "+ ']') ########################################################################################## ########################################################################################## # Export table to LaTeX document. ######################################################## def TableToTeX(MAT,", "3)]] = B[:, [0]] C[:, [int((3 / 2) * (l + 1) -", "+ \\ '' elif m == len(Data.keys()) - 1: if l <= len((Data[list(Data.keys())[m]]['dat']))", "str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ '' + ',' + \\ '' elif m", "range(0, int(c / 2)): if n == c / 2 - 1: aux.write('$", "# Round uncertainty functions. ########################################################### def NumPow(X): Y = np.around(np.log10(abs(X))); Y = Y", "str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar['", "Path is needed. Set it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each time", "aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o in range(0, f): if", "MAT.shape C = np.zeros([f, int((3 / 2) * (c))], ) for l in", "+ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ & ')", "+ SYM[0] + '(' + UNI[0] + ')\", \"' + SYM[1] + '('", "+ var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 +", "exact decimal in round. f, c = Y.shape for l in range(0, f):", "x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is np.ndarray: try: x.shape[1] except:", "') # All rows and cols iterative. for o in range(0, f): for", "aux.write(separ + 'x' + separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p", "aux4 = '' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4", "+ \\ '' + ',' + \\ '' + '\\n' aux.write(rowDAUN) else: if", "$ ') else: if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o,", "export the selected data to Wolfram: bad selection.') ########################################################################################## ########################################################################################## # Load ALL", "B = UncRound(MAT[:, [l - 1]], MAT[:, [l]]) C[:, [int((3 / 2) *", "range(0, f): for p in range(0, int(3 * c / 2 - 2),", "= np.array([[ux]]) elif type(x) is np.ndarray: try: x.shape[1] except: x = x[:, np.newaxis]", "''; aux3 = '' for l in range(0,len(var)): aux1 = aux1 + 'Var['", "sp import scipy as sc from scipy import stats import sys import time", "str(file) + '.') aux = np.zeros(len(q[0]) - 2, ) for l in range(0,", "+ \\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ &", "= \\ '' + ',' + \\ '' elif m == len(Data.keys()) -", "',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ '' + ',' + \\", "aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except:", "10^{' + str(int(C[o, [int(p)]])) + '} $ ') else: if C[o, [int(p)]] ==", "def StoreVar(vardata, varname, ppath, database): Data = LoadVar(ppath,database) print(' Creating label in database", "\\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n')", "* Y) / 10), axis=1) # Correction if exact decimal in round. f,", "$ & ') else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) +", "data and uncertainty are stored.') ########################################################################################## ########################################################################################## # Disp a ProjectMaker variable. ##########################################################", "# Writing data and uncertainty row by row for l in range(0, max(aux1,", "' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and", "+ \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN = rowDAUN + ',' + \\", "- 1)]] = B[:, [2]] with open(ppath + 'export_TeX' + '.txt', 'a') as", "+ \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux = list(Data.keys())[l] rowNAME = rowNAME +", "+ str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for m in range(0, len(var[0]['dat'])): aux4 =", "for m in range(0, len(Data.keys())): if len(Data.keys()) == 1: # 1 var rowDAUN", "10^{\\\\infty} $ & ') else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]]))", "row by row for l in range(0, max(aux1, len(vardata['dat']))): for m in range(0,", "(l + 1) - 3)]] = B[:, [0]] C[:, [int((3 / 2) *", "###################################################### def StoreVar(vardata, varname, ppath, database): Data = LoadVar(ppath,database) print(' Creating label in", "in round. f, c = Y.shape for l in range(0, f): if Y[l][2]", "+ ') x 10[' + str(aux1[0]) + ']') ########################################################################################## ########################################################################################## # Export table", "for m in range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat']", "UTF-8 -*- ########################################################################################## # Importing packages. #################################################################### import numpy as np import csv", "str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ & ')", "[int(p)]])) + '} $ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and", "stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' + ppath + 'preview_TeX.tex') #os.system('open -a", "f, c = Y.shape for l in range(0, f): if Y[l][2] == 10:", "',' + \\ '' + ',' + \\ '' print(' Variable data and", "set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty functions. ########################################################### def NumPow(X): Y", "with open(file, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') q = list(spamreader) print('#", "'' print(' Variable data and uncertainty are stored.') ########################################################################################## ########################################################################################## # Disp a", "np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)): if row[2*m+0] != '':", "nam = q[0] if l == 0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])] =", "print(aux3 + '\\n') except: print('Status Failure.') ########################################################################################## def SetUp(): print('Setting Current Path is", "NumPow(ux) Y = np.concatenate((x / (10 ** n), ux / (10 ** n)),", "'.txt', 'a') as aux: aux.write(separ + 'x' + separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n')", "np.around(10 * Y) / 10), axis=1) # Correction if exact decimal in round.", "') x 10[' + str(aux1[0]) + ']') ########################################################################################## ########################################################################################## # Export table to", "'export_WMT' + '.txt', 'a') as aux: aux.write(separ + 'x' + separ + '\\n\\n$PlotTheme", "aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' +", "'Each time your database is modified you must refresh Data variable.' aux2 =", "is ' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for m in range(0, len(var[0]['dat'])):", "aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same", "-*- coding: UTF-8 -*- ########################################################################################## # Importing packages. #################################################################### import numpy as np", "# Export table to LaTeX document. ######################################################## def TableToTeX(MAT, CAP, SYM, UNI, ppath):", "delimiter=',') q = list(spamreader) print('# Loading ' + str(file) + '.') aux =", "1]], MAT[:, [l]]) C[:, [int((3 / 2) * (l + 1) - 3)]]", "+ ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( ' + str(float(C[o,", "= aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') def disp(var):", "else: if l == 0: aux = list(Data.keys())[l] rowNAME = str(aux) + ','", "LoadVar(ppath, database): file = ppath + str(database) + '.csv'; Data = {} if", "10^{' + str(int(C[o, [int(p)]])) + '} $ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final", "' + str(file) + '.') aux = np.zeros(len(q[0]) - 2, ) for l", "+ str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o, [int(p + 2)]])) +", "by row for l in range(0, max(aux1, len(vardata['dat']))): for m in range(0, len(Data.keys())):", "str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$", "aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) ==", "str(float(MAT[l, [int(p + 2)]])) +'}') else: aux.write('{' + str(float(MAT[l, [int(p + 0)]])) +", "** n), ux / (10 ** n)), axis=1) Y = np.concatenate((n, np.around(10 *", "'(' + str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') ########################################################################################## ##########################################################################################", "subprocess import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations", "f): for p in range(0, int(3 * c / 2 - 2), 3):", "',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if m == 0: if l", "All rows and cols iterative. for o in range(0, f): for p in", "range(0,len(var)): aux1 = aux1 + 'Var[' + str(l+1) + '] - Symbol: '", "+ '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-') try:", "',' + str(float(MAT[o, [int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final activities: caption", "= \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 - aux[m - 1]) if row[2*m+1]", "+ str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) +'}')", "check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' +", "') else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm", "from scipy import stats import sys import time from termcolor import colored, cprint", "UNI, ppath): f, c = MAT.shape C = np.zeros([f, int((3 / 2) *", "C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) +", "+ str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ ') else:", "Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## #", "Exporter can not export the selected data to Wolfram: bad selection.') ########################################################################################## ##########################################################################################", "m == len(Data.keys()) - 1: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN =", "+ str(l+1) + '] - Symbol: ' + var[l][\"sym\"] + '\\n' aux1 =", "ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True,", "aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex',", "+ '] - Symbol: ' + var[l][\"sym\"] + '\\n' aux1 = aux1 +", "yaux = np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def UncPrint(x,", "Creating label in database dictionary.') Data[str(varname)] = vardata # Searching the biggest vector", "+ str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ & ')", "print(aux4) print(aux3 + '\\n') except: print('Status Failure.') ########################################################################################## def SetUp(): print('Setting Current Path", "rowDAUN + ',' + \\ '' + ',' + \\ '' print(' Variable", "0 for o in range(0, f): if o == int(f-1): aux.write('{{' + str(float(MAT[o,", "+ '.') aux = np.zeros(len(q[0]) - 2, ) for l in range(0, len(q)):", "aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True, FrameLabel ->", "# Searching the biggest vector in Data aux1 = 0 for l in", "as np import csv import re import os import ast import math import", "0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) +'}') else: aux.write('{' + str(float(MAT[l,", "try: x.shape[1] except: x = x[:, np.newaxis] ux = ux[:, np.newaxis] n =", "########################################################################################## def SetUp(): print('Setting Current Path is needed. Set it writting:') print('CURRENTPATH =", "\\ ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( ' + str(float(C[o,", "+ ',' + str(float(MAT[l, [int(p + 2)]])) +'}') else: aux.write('{' + str(float(MAT[l, [int(p", "+ ',' + \\ '' elif m == len(Data.keys()) - 1: if l", "for l in range(0, f): if l == int(f - 1): aux.write('{' +", "sectspa = ' ' ########################################################################################## ########################################################################################## # Variable extractor class and function. #################################################", "',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux = list(Data.keys())[l] rowNAME = rowNAME", "= aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n')", "Load file success.') if len(Data) is 0: print(' Nothing to load.') Data =", "naux = n[l] + 1; xaux = x[l][0]; uxaux = ux[l][0] yaux =", "aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) ==", "sys import time from termcolor import colored, cprint import subprocess from subprocess import", "' \\ + str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{'", "database is compulsury. This can be done as:' aux3 = 'Data = LoadVar(ppath,\"Data\")'", "\\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]]))", "+ \\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux =", "= q[l]; nam = q[0] if l == 0: for m in range(0,int(len(row)/2)):", "time your database is modified you must refresh Data variable.' aux2 = 'The", "2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( '", "int: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is np.ndarray: try: x.shape[1]", "Correction if exact decimal in round. f, c = Y.shape for l in", "len(Data.keys()) - 1: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN +", "'\\n' print(' Variable name(s) are stored.') else: if l == 0: aux =", "range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,)", "= n[l] + 1; xaux = x[l][0]; uxaux = ux[l][0] yaux = np.array([xaux,", "* (l + 1) - 1)]] = B[:, [2]] with open(ppath + 'preview_TeX'", "p == int(3 * c / 2 - 3): if C[o, [int(p)]] ==", "2 - aux[m - 1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2] =", "+ 1) - 1)]] = B[:, [2]] with open(ppath + 'export_TeX' + '.txt',", "aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX table. ################################################################### def", "& ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') #", "+ '\\n' else: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux)", "importing database is compulsury. This can be done as:' aux3 = 'Data =", "range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'],", "in a database. ###################################################### def StoreVar(vardata, varname, ppath, database): Data = LoadVar(ppath,database) print('", "rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath +", "/ 2) * (c))], ) for l in range(1, c, 2): B =", "csvfile: spamreader = csv.reader(csvfile, delimiter=',') q = list(spamreader) print('# Loading ' + str(file)", "= {} elif l == 1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0]", "1) - 2)]] = B[:, [1]] C[:, [int((3 / 2) * (l +", "+ '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o in range(0,", "np.newaxis] ux = ux[:, np.newaxis] n = NumPow(ux) Y = np.concatenate((x / (10", "1)]])) + ' \\\\pm ' \\ + str(float(C[o, [int(p + 2)]])) + \\", "\\\\times 10^{\\\\infty} $ & ') else: aux.write('$ ( ' + str(float(C[o, [int(p +", "[int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ (", "- 1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1]) else:", "= \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 -", "' \\\\pm ' \\ + str(float(C[o, [int(p + 2)]])) + \\ ' )", "row = q[l]; nam = q[0] if l == 0: for m in", "Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 - aux[m - 1]) if", "Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f, c =", "with open(ppath + database + '.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing", "newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') q = list(spamreader) print('# Loading '", "' + '\\n' rowSYUN = rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ','", "Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape if", "from termcolor import colored, cprint import subprocess from subprocess import DEVNULL, STDOUT, check_call", "is modified you must refresh Data variable.' aux2 = 'The very first time", "' \\\\pm ' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{'", "aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except:", "== 10: naux = n[l] + 1; xaux = x[l][0]; uxaux = ux[l][0]", "print metadata.') print('The number of events is ' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2);", "if len(Data) is 0: print(' Nothing to load.') Data = {} else: print('", "'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat'])", "varname, ppath, database): Data = LoadVar(ppath,database) print(' Creating label in database dictionary.') Data[str(varname)]", "- 1: rowDAUN = rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' +", "'' + '\\n' aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN =", "'export_TeX' + '.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0,", "'d({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have", "range(0, int((1 - 1 / 2) * c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings.", "rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n'", "= ux[:, np.newaxis] n = NumPow(ux) Y = np.concatenate((x / (10 ** n),", "Frame -> True, FrameLabel -> {\"' + SYM[0] + '(' + UNI[0] +", "o in range(0, f): for p in range(0, int(3 * c / 2", "- 2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l -", "' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols", "Y[l][2] == 10: naux = n[l] + 1; xaux = x[l][0]; uxaux =", "[0]] C[:, [int((3 / 2) * (l + 1) - 2)]] = B[:,", "Y > abs(X)); return Y def UncRound(x, ux): if type(x) is float: x", "+ '\\n' aux1 = aux1 + 'Var[' + str(l+1) + '] - Units", "== int(f - 1): aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' +", "+ 1) - 1)]] = B[:, [2]] with open(ppath + 'preview_TeX' + '.tex',", "str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN + ',' + \\ '' + ',' +", "np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 - aux[m - 1]) aux[m - 1] =", "else: print(' Exporter can not export the selected data to Wolfram: bad selection.')", "1: # 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) +", "vector in Data aux1 = 0 for l in range(0, len(Data.keys())): aux1 =", "-*- ########################################################################################## # Importing packages. #################################################################### import numpy as np import csv import", "== c / 2 - 1: aux.write('$ ' + SYM[n] + ' \\\\", "axis=1) Y = np.concatenate((n, np.around(10 * Y) / 10), axis=1) # Correction if", "- 1] = aux[m - 1] + 1 print(' Load file success.') if", "/ 2)): if n == c / 2 - 1: aux.write('$ ' +", "'},ErrorBar[' + str(float(MAT[o, [int(p + 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]]))", "= UncRound(x, ux); aux1 = aux1[0] print(' ' + str(aux1[1]) + '(' +", "aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP +", "csv import re import os import ast import math import sympy as sp", "(10 ** n), ux / (10 ** n)), axis=1) Y = np.concatenate((n, np.around(10", "range(0, f): if l == int(f - 1): aux.write('{' + str(float(MAT[l, [int(p +", "c==4: with open(ppath + 'export_WMT' + '.txt', 'a') as aux: aux.write(separ + 'x'", "[int(p + 2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p + 1)]])) + ',' +", "+ 1) - 3)]] = B[:, [0]] C[:, [int((3 / 2) * (l", "np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def UncPrint(x, ux): try:", "+ str(float(MAT[l, [int(p + 2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p =", "def SetUp(): print('Setting Current Path is needed. Set it writting:') print('CURRENTPATH = \"your/path\"\\n')", ") for l in range(0, len(q)): row = q[l]; nam = q[0] if", "= 'The very first time importing database is compulsury. This can be done", "aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 +", "= extractor.ids - set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty functions. ###########################################################", "UncRound(x, ux) print(' ' + str(aux1[1]) + '(' + str(aux1[2]) + ') x", "[int(p + 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']}') else:", "( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' + str(float(C[o,", "\\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 - aux[m - 1]) aux[m - 1]", "' ' rowSYUN = rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] + \\ ',' +", "re import os import ast import math import sympy as sp import scipy", "Variable extractor class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set()", "' + var[l][\"sym\"] + '\\n' aux1 = aux1 + 'Var[' + str(l+1) +", "+ 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables", "enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm", "= aux1[0] print(' ' + str(aux1[1]) + '(' + str(aux1[2]) + ') x", "Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l", "if l == 0: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' +", "if exact decimal in round. f, c = Y.shape for l in range(0,", "write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8')) def read_from_clipboard(): return subprocess.check_output(", "= {} return Data ########################################################################################## ########################################################################################## # Store ONE variable in a database.", "- 2, ) for l in range(0, len(q)): row = q[l]; nam =", "+ \\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath + database + '.csv', 'w') as", "'' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4", "= B[:, [2]] with open(ppath + 'export_TeX' + '.txt', 'a') as aux: aux.write('\\\\begin{table}[H]", "'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data and uncertainty row by row", "+ ',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else:", "= Y - (10 ** Y > abs(X)); return Y def UncRound(x, ux):", "def disp(var): try: aux1 = ''; aux2 = ''; aux3 = '' for", "l in range(0, len(q)): row = q[l]; nam = q[0] if l ==", "2): B = UncRound(MAT[:, [l - 1]], MAT[:, [l]]) C[:, [int((3 / 2)", "+ \\ '' + '\\n' aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1:", "list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux) + ',' + ' '", "in range(0, int(3 * c / 2 - 2), 3): if p ==", "not exist!') Data = {} return Data ########################################################################################## ########################################################################################## # Store ONE variable", "+ ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN = rowDAUN + ','", "+ '] - Units : ' + var[l][\"uni\"] + '\\n' aux2 = aux2", "0: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' rowSYUN", "[int((3 / 2) * (l + 1) - 2)]] = B[:, [1]] C[:,", "aux: aux.write(separ + 'x' + separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n');", "and ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame ->", "str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ & ') else:", "c / 2 - 3): if C[o, [int(p)]] == -np.inf: aux.write('$ ( '", "########################################################################################## ########################################################################################## # Load ALL variables in a database. ###################################################### def LoadVar(ppath, database):", "LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape", "- (10 ** Y > abs(X)); return Y def UncRound(x, ux): if type(x)", "' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ &", "[l - 1]], MAT[:, [l]]) C[:, [int((3 / 2) * (l + 1)", "rowNAME = rowNAME + ',' + str(aux) + ',' + ' ' +", "in range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\", "* (l + 1) - 2)]] = B[:, [1]] C[:, [int((3 / 2)", "Data variable.' aux2 = 'The very first time importing database is compulsury. This", "\\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 - aux[m - 1]) if row[2*m+1] !=", "= 'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n' + aux3 + '\\n')", "' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $') aux.write('\\\\\\\\", "',' + \\ '' print(' Variable data and uncertainty are stored.') ########################################################################################## ##########################################################################################", "########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM, UNI, ppath): f, c =", "x = x[:, np.newaxis] ux = ux[:, np.newaxis] n = NumPow(ux) Y =", "\\ + str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $", "+ str(aux) + ',' + ' ' rowSYUN = rowSYUN + ',' +", "var[l][\"sym\"] + '\\n' aux1 = aux1 + 'Var[' + str(l+1) + '] -", "' \\\\ \\\\mathrm{(' + UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$", "else: rowDAUN = \\ '' + ',' + \\ '' elif m ==", "+ ',' + str(float(MAT[l, [int(p + 2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n');", "- 2), 3): if p == int(3 * c / 2 - 3):", "\\ l - 2 - aux[m - 1]) aux[m - 1] = aux[m", "Symbol: ' + var[l][\"sym\"] + '\\n' aux1 = aux1 + 'Var[' + str(l+1)", "n == c / 2 - 1: aux.write('$ ' + SYM[n] + '", "== 0: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) +", "B[:, [0]] C[:, [int((3 / 2) * (l + 1) - 2)]] =", "= ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True, FrameLabel -> {\"'", "Export table to LaTeX document. ######################################################## def TableToTeX(MAT, CAP, SYM, UNI, ppath): f,", "success.') if len(Data) is 0: print(' Nothing to load.') Data = {} else:", "aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2) *", "+ ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else:", "[int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + '", "2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2", "as aux: aux.write(separ + 'x' + separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points.", "\\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if m == 0: if", "for p in range(0, int(3 * c / 2 - 2), 3): if", "m in range(0, len(var[0]['dat'])): aux4 = '' for l in range(0,len(var)): aux4 =", "database. ###################################################### def LoadVar(ppath, database): file = ppath + str(database) + '.csv'; Data", "SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n')", "aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' + '\\n'", "aux1 = aux1 + 'Var[' + str(l+1) + '] - Units : '", "########################################################################################## ########################################################################################## # Round uncertainty functions. ########################################################### def NumPow(X): Y = np.around(np.log10(abs(X))); Y", "aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n' + aux3 +", "SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $ & ') #", "+ 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$", "+ ',' + Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath + database", "' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3", "\\\\pm ' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $", "$ & ') # All rows and cols iterative. for o in range(0,", "and uncertainty are stored.') ########################################################################################## ########################################################################################## # Disp a ProjectMaker variable. ########################################################## def", "it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each time your database is modified", "########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM, UNI, ppath): f, c", "\\\\hline \\\\hline\\n') else: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n]", "open(ppath + 'export_TeX' + '.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m", "']},\\n') aux.write('}];\\n\\n') # Final activities: caption and ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL", "range(1, c, 2): B = UncRound(MAT[:, [l - 1]], MAT[:, [l]]) C[:, [int((3", "in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data table for", "'' elif m == len(Data.keys()) - 1: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1:", "0: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ','", "/ 2) * (l + 1) - 1)]] = B[:, [2]] with open(ppath", "q = list(spamreader) print('# Loading ' + str(file) + '.') aux = np.zeros(len(q[0])", "')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ ' + SYM[n] + ' \\\\", "aux.write('data={\\n'); p = 0 for l in range(0, f): if l == int(f", "1]) aux[m - 1] = aux[m - 1] + 1 print(' Load file", "'{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same lenght. So", "[int(p + 2)]])) + \\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) +", "biggest vector in Data aux1 = 0 for l in range(0, len(Data.keys())): aux1", "np.array([[x]]) ux = np.array([[ux]]) elif type(x) is int: x = np.array([[x]]) ux =", "str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if m ==", "def UncPrint(x, ux): try: aux1 = UncRound(x, ux) print(' ' + str(aux1[1]) +", "print(' Database does not exist!') Data = {} return Data ########################################################################################## ########################################################################################## #", "in range(1, c, 2): B = UncRound(MAT[:, [l - 1]], MAT[:, [l]]) C[:,", "\\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ '' + ',' + \\ '' elif", "os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' + ppath + 'preview_TeX.tex') #os.system('open -a Preview.app", "range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data table for l", "except: print('Variables have not the same lenght. So no printing.') print('Variables to print", "to LaTeX document. ######################################################## def TableToTeX(MAT, CAP, SYM, UNI, ppath): f, c =", "aux2 + '\\n' + aux3 + '\\n') def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy',", "/ 2) * (l + 1) - 2)]] = B[:, [1]] C[:, [int((3", "'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT, CAP,", "str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units are stored.')", "2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o in", "########################################################################################## # Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f,", "ONE variable in a database. ###################################################### def StoreVar(vardata, varname, ppath, database): Data =", ": ' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 =", "str(l+1) + '] - Symbol: ' + var[l][\"sym\"] + '\\n' aux1 = aux1", "= aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the", "if len(Data.keys()) == 1: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' +", "aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-')", "' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( ' + str(float(C[o, [int(p", "def dispu(var): try: aux1 = ''; aux2 = ''; aux3 = '' for", "to print metadata.') print('The number of events is ' + str(len(var[0]['dat']))) print(aux1); print(aux3);", "str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p + 1)]])) + ','", "& ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' +", "+ ',' + \\ '' + '\\n' aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat']))", "str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') ########################################################################################## ########################################################################################## # Export", "len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same lenght. So no printing.')", "m == 0: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l]))", "'' for l in range(0,len(var)): aux1 = aux1 + 'Var[' + str(l+1) +", "caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview", "= '--------------------------------------------' sectspa = ' ' ########################################################################################## ########################################################################################## # Variable extractor class and", "aux[m - 1]) aux[m - 1] = aux[m - 1] + 1 print('", "Units : ' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2", "1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] =", "== len(Data.keys()) - 1: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' +", "(l + 1) - 1)]] = B[:, [2]] with open(ppath + 'preview_TeX' +", "range(0, max(aux1, len(vardata['dat']))): for m in range(0, len(Data.keys())): if len(Data.keys()) == 1: #", "c = MAT.shape C = np.zeros([f, int((3 / 2) * (c))], ) for", "m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc']", "aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' rowSYUN =", ") \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ ( ' + str(float(C[o, [int(p", "= \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 - aux[m - 1]) aux[m -", "Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else:", "= str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units are", "10: naux = n[l] + 1; xaux = x[l][0]; uxaux = ux[l][0] yaux", "ux): if type(x) is float: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x)", "m in range(0, len(Data.keys())): if len(Data.keys()) == 1: # 1 var rowDAUN =", "def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8')) def read_from_clipboard(): return", "= 0 for l in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers", "+ 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT,", "Database does not exist!') Data = {} return Data ########################################################################################## ########################################################################################## # Store", "a ProjectMaker variable. ########################################################## def dispu(var): try: aux1 = ''; aux2 = '';", "extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty functions. ########################################################### def NumPow(X): Y = np.around(np.log10(abs(X)));", "LoadVar(ppath,database) print(' Creating label in database dictionary.') Data[str(varname)] = vardata # Searching the", "metadata.') print('The number of events is ' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3)", "aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o in range(0, f): if o == int(f-1):", "* (l + 1) - 3)]] = B[:, [0]] C[:, [int((3 / 2)", "numpy as np import csv import re import os import ast import math", "B[:, [2]] with open(ppath + 'export_TeX' + '.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n')", "aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1 -", "ALL variables in a database. ###################################################### def LoadVar(ppath, database): file = ppath +", "* c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for n in range(0, int(c /", "list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' + '\\n' rowSYUN =", "# Headers of data table for l in range(0, len(Data.keys())): if len(Data.keys()) ==", "+ ',' + ' ' rowSYUN = rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] +", "',' + ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print('", "+ str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ &", "'{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') def disp(var): try: aux1 =", "1): aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p +", "GoldenRatio]\\n\\n') else: print(' Exporter can not export the selected data to Wolfram: bad", "= aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the", "+ 2)]])) + ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( '", "{} elif l == 1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni']", "+ Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath + database + '.csv',", "in range(0, max(aux1, len(vardata['dat']))): for m in range(0, len(Data.keys())): if len(Data.keys()) == 1:", "str(aux1[1]) + '(' + str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']')", "events is ' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for m in range(0,", "l in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data table", "########################################################################################## # Disp a ProjectMaker variable. ########################################################## def dispu(var): try: aux1 = '';", "and units are stored.') elif l == len(Data.keys()) - 1: aux = list(Data.keys())[l]", "rowNAME = str(aux) + ',' + ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ','", "+'}') else: aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p", "str(aux) + ',' + ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\", "Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2]", "[int(p + 2)]])) + ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$", "database is modified you must refresh Data variable.' aux2 = 'The very first", "csv.reader(csvfile, delimiter=',') q = list(spamreader) print('# Loading ' + str(file) + '.') aux", "',' + Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath + database +", "print(aux1 + aux2 + '\\n' + aux3 + '\\n') def write_to_clipboard(output): process =", "',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN", "c = Y.shape for l in range(0, f): if Y[l][2] == 10: naux", "for l in range(0, f): if Y[l][2] == 10: naux = n[l] +", "'x' + separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p = 0", "+ \\ ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( ' +", "if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' +", "1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN =", "return Y def UncRound(x, ux): if type(x) is float: x = np.array([[x]]) ux", "str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]]))", "scipy as sc from scipy import stats import sys import time from termcolor", "= IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## #", "return extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty functions. ########################################################### def NumPow(X): Y =", "Headers of data table for l in range(0, len(Data.keys())): if len(Data.keys()) == 1:", "+ 1 print(' Load file success.') if len(Data) is 0: print(' Nothing to", "UNI[1] + ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else: print(' Exporter can not export", "from subprocess import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations =", "= str(aux) + ',' + ' ' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) +", "aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data and uncertainty row by row for l", "same lenght. So no printing.') print('Variables to print metadata.') print('The number of events", "+ ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ ' + SYM[n] + '", "2), 3): if p == int(3 * c / 2 - 3): if", "'{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have", "aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') def", "+ '\\n' else: rowDAUN = rowDAUN + ',' + \\ '' + ','", "+ str(float(MAT[l, [int(p + 2)]])) +'}') else: aux.write('{' + str(float(MAT[l, [int(p + 0)]]))", "Data = {} if os.path.isfile(file) is True: with open(file, newline='') as csvfile: spamreader", "SetUp(): print('Setting Current Path is needed. Set it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1", "import sys import time from termcolor import colored, cprint import subprocess from subprocess", "a database. ###################################################### def LoadVar(ppath, database): file = ppath + str(database) + '.csv';", "\\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ '' +", ": ' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 =", "n = NumPow(ux) Y = np.concatenate((x / (10 ** n), ux / (10", "ux); aux1 = aux1[0] print(' ' + str(aux1[1]) + '(' + str(aux1[2]) +", "rowNAME = str(aux) + ',' + ' ' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym'])", "import ast import math import sympy as sp import scipy as sc from", "iterative. for o in range(0, f): for p in range(0, int(3 * c", "+ ') x 10[' + str(aux1[0]) + ']') except: aux1 = UncRound(x, ux);", "[int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ ') else: aux.write('$", "Data = {} return Data ########################################################################################## ########################################################################################## # Store ONE variable in a", "-np.inf: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm '", "0 for l in range(0, f): if l == int(f - 1): aux.write('{'", "int((1 - 1 / 2) * c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for", "import re import os import ast import math import sympy as sp import", "are stored.') ########################################################################################## ########################################################################################## # Disp a ProjectMaker variable. ########################################################## def dispu(var): try:", "- 1] + 1 print(' Load file success.') if len(Data) is 0: print('", "else: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux) + ','", "0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars.", "aux1[0] print(' ' + str(aux1[1]) + '(' + str(aux1[2]) + ') x 10['", "print('Status Failure.') def disp(var): try: aux1 = ''; aux2 = ''; aux3 =", "3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final activities: caption and ending enviroment. aux.write('LP =", "= UncRound(x, ux) print(' ' + str(aux1[1]) + '(' + str(aux1[2]) + ')", "+ '.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0, int((1", "'/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def", "+ '\\n') except: print('Status Failure.') def disp(var): try: aux1 = ''; aux2 =", "as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0, int((1 - 1 /", "',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux", "colored, cprint import subprocess from subprocess import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import", "###################################################### def LoadVar(ppath, database): file = ppath + str(database) + '.csv'; Data =", "float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 - aux[m -", "aux1 = UncRound(x, ux); aux1 = aux1[0] print(' ' + str(aux1[1]) + '('", "+ str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') ########################################################################################## ########################################################################################## #", "label in database dictionary.') Data[str(varname)] = vardata # Searching the biggest vector in", "Set it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each time your database is", "C = np.zeros([f, int((3 / 2) * (c))], ) for l in range(1,", "= rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) +", "+ str(float(MAT[o, [int(p + 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) +", "LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n' + aux3 + '\\n') def write_to_clipboard(output): process", "np.around(10*yaux)/10), axis=0) return Y def UncPrint(x, ux): try: aux1 = UncRound(x, ux) print('", "str(aux1[0]) + ']') except: aux1 = UncRound(x, ux); aux1 = aux1[0] print(' '", "\\ '' + ',' + \\ '' elif m == len(Data.keys()) - 1:", "for l in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data", "+ \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if m == 0:", "in range(0, f): if o == int(f-1): aux.write('{{' + str(float(MAT[o, [int(p + 0)]]))", "1; xaux = x[l][0]; uxaux = ux[l][0] yaux = np.array([xaux, uxaux]) Y[l] =", "preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM, UNI, ppath): f,", "** Y > abs(X)); return Y def UncRound(x, ux): if type(x) is float:", "preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM, UNI,", "[int(p + 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n')", "'preview_TeX' + '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n')", "+ '(' + str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') except:", "= MAT.shape if c==4: with open(ppath + 'export_WMT' + '.txt', 'a') as aux:", "aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') ########################################################################################## def SetUp():", "n)), axis=1) Y = np.concatenate((n, np.around(10 * Y) / 10), axis=1) # Correction", "########################################################################################## # Importing packages. #################################################################### import numpy as np import csv import re", "[int(p + 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) +'}') else: aux.write('{'", "= ' ' ########################################################################################## ########################################################################################## # Variable extractor class and function. ################################################# class", "len(Data.keys()) == 1: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + '", "int(c / 2)): if n == c / 2 - 1: aux.write('$ '", "and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set() def visit_Name(self, node):", "Data = {} else: print(' Database does not exist!') Data = {} return", "print(aux1); print(aux3); print(aux2); print(aux3) for m in range(0, len(var[0]['dat'])): aux4 = '' for", "dispu(var): try: aux1 = ''; aux2 = ''; aux3 = '' for l", "transformations = standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------' sectspa = ' ' ##########################################################################################", "'\\n') except: print('Status Failure.') ########################################################################################## def SetUp(): print('Setting Current Path is needed. Set", "+ ')} $ & ') # All rows and cols iterative. for o", "Y = Y - (10 ** Y > abs(X)); return Y def UncRound(x,", "f): if Y[l][2] == 10: naux = n[l] + 1; xaux = x[l][0];", "+ str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{' + str(int(C[o,", "len(Data.keys())): if len(Data.keys()) == 1: aux = list(Data.keys())[l] rowNAME = str(aux) + ','", "########################################################################################## # Load ALL variables in a database. ###################################################### def LoadVar(ppath, database): file", "rowDAUN = \\ '' + ',' + \\ '' elif m == len(Data.keys())", "+ 'preview_TeX' + '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H]", "+ str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p + 1)]])) +", "for l in range(0, max(aux1, len(vardata['dat']))): for m in range(0, len(Data.keys())): if len(Data.keys())", "\\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 - aux[m", "Current Path is needed. Set it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each", "+ ' ' + '\\n' rowSYUN = rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym'])", "+ ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN + ',' + \\", "len(var[0]['dat']) except: print('Variables have not the same lenght. So no printing.') print('Variables to", "in range(0, len(Data.keys())): if len(Data.keys()) == 1: # 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l]))", "print(' Variable name(s) are stored.') else: if l == 0: aux = list(Data.keys())[l]", "1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final", "else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 - aux[m - 1])", "aux1 = 0 for l in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) #", "# -*- coding: UTF-8 -*- ########################################################################################## # Importing packages. #################################################################### import numpy as", "= np.concatenate((n, np.around(10 * Y) / 10), axis=1) # Correction if exact decimal", "\\\\hline\\n') else: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] +", "\\\\pm ' \\ + str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times", "open(ppath + 'preview_TeX' + '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n')", "########################################################################################## ########################################################################################## # Export table to LaTeX document. ######################################################## def TableToTeX(MAT, CAP, SYM,", "UNI[n] + ')} $ & ') # All rows and cols iterative. for", "n), ux / (10 ** n)), axis=1) Y = np.concatenate((n, np.around(10 * Y)", "',' + str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p + 1)]]))", "+ ' ' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni'])", "variables in a database. ###################################################### def LoadVar(ppath, database): file = ppath + str(database)", "uncertainty functions. ########################################################### def NumPow(X): Y = np.around(np.log10(abs(X))); Y = Y - (10", "/ 2) * (l + 1) - 3)]] = B[:, [0]] C[:, [int((3", "data table for l in range(0, len(Data.keys())): if len(Data.keys()) == 1: aux =", "1: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux) + ','", "range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l == 1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym']", "no printing.') print('Variables to print metadata.') print('The number of events is ' +", "') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling", "l in range(1, c, 2): B = UncRound(MAT[:, [l - 1]], MAT[:, [l]])", "for m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l == 1: for m", "l == 0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l ==", "(10 ** n)), axis=1) Y = np.concatenate((n, np.around(10 * Y) / 10), axis=1)", "import sympy as sp import scipy as sc from scipy import stats import", "for n in range(0, int(c / 2)): if n == c / 2", "+ ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ ( ' +", "else: if m == 0: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN =", "def UncRound(x, ux): if type(x) is float: x = np.array([[x]]) ux = np.array([[ux]])", "try: aux1 = UncRound(x, ux) print(' ' + str(aux1[1]) + '(' + str(aux1[2])", "' ' + '\\n' rowSYUN = rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym']) +", "termcolor import colored, cprint import subprocess from subprocess import DEVNULL, STDOUT, check_call from", "import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------' sectspa =", "printing.') print('Variables to print metadata.') print('The number of events is ' + str(len(var[0]['dat'])))", "' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' \\ + str(float(C[o,", "\"' + SYM[1] + '(' + UNI[1] + ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n')", "print('Status Failure.') ########################################################################################## def SetUp(): print('Setting Current Path is needed. Set it writting:')", "import os import ast import math import sympy as sp import scipy as", "= rowDAUN + ',' + \\ '' + ',' + \\ '' +", "Points. aux.write('data={\\n'); p = 0 for l in range(0, f): if l ==", "def __init__(self): self.ids = set() def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor =", "+ ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s) are stored.') else:", "aux3 + '\\n') def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8'))", "= aux1 + 'Var[' + str(l+1) + '] - Units : ' +", "+ '\\n' aux.write(rowDAUN) else: if m == 0: if l <= len((Data[list(Data.keys())[m]]['dat'])) -", "+ '\\n' print(' Variable name(s) are stored.') else: if l == 0: aux", "# Headings. for n in range(0, int(c / 2)): if n == c", "in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4)", "1] = aux[m - 1] + 1 print(' Load file success.') if len(Data)", "' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ ( ' + str(float(C[o,", "to load.') Data = {} else: print(' Database does not exist!') Data =", "l in range(0,len(var)): aux1 = aux1 + 'Var[' + str(l+1) + '] -", "float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 - aux[m -", "Final activities: caption and ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP,", "+ 3)]])) + ']},\\n') aux.write('}];\\n\\n') # Final activities: caption and ending enviroment. aux.write('LP", "have not the same lenght. So no printing.') print('Variables to print metadata.') print('The", "np.zeros(len(q[0]) - 2, ) for l in range(0, len(q)): row = q[l]; nam", "3): if p == int(3 * c / 2 - 3): if C[o,", "l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l]))", "aux1 = 'Each time your database is modified you must refresh Data variable.'", ") \\\\times 10^{\\\\infty} $ ') else: aux.write('$ ( ' + str(float(C[o, [int(p +", "+ \\ ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ ( '", "+ str(file) + '.') aux = np.zeros(len(q[0]) - 2, ) for l in", "+ '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') ########################################################################################## def SetUp(): print('Setting", "str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for m in range(0, len(var[0]['dat'])): aux4 = ''", "'a') as aux: aux.write(separ + 'x' + separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') #", "- 1): aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p", "+ '(' + UNI[0] + ')\", \"' + SYM[1] + '(' + UNI[1]", "aux[m - 1] = aux[m - 1] + 1 print(' Load file success.')", "\\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN = rowDAUN + ',' + \\ ''", "+ ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ '' + ',' +", "except: print('Status Failure.') def disp(var): try: aux1 = ''; aux2 = ''; aux3", "+ ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN =", "= list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux) + ',' + '", "time importing database is compulsury. This can be done as:' aux3 = 'Data", "is 0: print(' Nothing to load.') Data = {} else: print(' Database does", "preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' + ppath + 'preview_TeX.tex') #os.system('open -a Preview.app '", "* c / 2 - 3): if C[o, [int(p)]] == -np.inf: aux.write('$ (", "Variable symbols and units are stored.') elif l == len(Data.keys()) - 1: aux", "+ 'Var[' + str(l+1) + '] - Symbol: ' + var[l][\"sym\"] + '\\n'", "ppath + 'preview_TeX.tex') #os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf')", "+ str(aux1[1]) + '(' + str(aux1[2]) + ') x 10[' + str(aux1[0]) +", "implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------' sectspa = ' '", "[int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ & ') else:", "Writing data and uncertainty row by row for l in range(0, max(aux1, len(vardata['dat']))):", "'' + ',' + \\ '' elif m == len(Data.keys()) - 1: if", "os.system('rm preview_TeX.aux') os.system('rm ' + ppath + 'preview_TeX.tex') #os.system('open -a Preview.app ' +", "+ str(aux1[0]) + ']') ########################################################################################## ########################################################################################## # Export table to LaTeX document. ########################################################", "'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m", "activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## #", "+ str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty} $ ')", "row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l", "in range(0, int((1 - 1 / 2) * c)): aux.write('c|') aux.write('} \\\\hline\\n') #", "ux): try: aux1 = UncRound(x, ux) print(' ' + str(aux1[1]) + '(' +", "= LoadVar(ppath,database) print(' Creating label in database dictionary.') Data[str(varname)] = vardata # Searching", "if c==4: with open(ppath + 'export_WMT' + '.txt', 'a') as aux: aux.write(separ +", "Y = np.around(np.log10(abs(X))); Y = Y - (10 ** Y > abs(X)); return", "# Final activities: caption and ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n')", "Searching the biggest vector in Data aux1 = 0 for l in range(0,", "+ UNI[0] + ')\", \"' + SYM[1] + '(' + UNI[1] + ')\"},AspectRatio", "xaux = x[l][0]; uxaux = ux[l][0] yaux = np.array([xaux, uxaux]) Y[l] = np.concatenate((naux,", "= 0 for l in range(0, f): if l == int(f - 1):", "x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is int: x = np.array([[x]])", "np.concatenate((x / (10 ** n), ux / (10 ** n)), axis=1) Y =", "= np.concatenate((x / (10 ** n), ux / (10 ** n)), axis=1) Y", "\\ l - 2 - aux[m - 1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l", "1) - 1)]] = B[:, [2]] with open(ppath + 'export_TeX' + '.txt', 'a')", "$ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n')", "+ var[l][\"sym\"] + '\\n' aux1 = aux1 + 'Var[' + str(l+1) + ']", "table for l in range(0, len(Data.keys())): if len(Data.keys()) == 1: aux = list(Data.keys())[l]", "extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math)) return extractor.ids ########################################################################################## ##########################################################################################", "variable. ########################################################## def dispu(var): try: aux1 = ''; aux2 = ''; aux3 =", "Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m in", "10[' + str(aux1[0]) + ']') except: aux1 = UncRound(x, ux); aux1 = aux1[0]", "= rowNAME + ',' + str(aux) + ',' + ' ' rowSYUN =", "1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']}') else: aux.write('{{' +", "VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math)) return extractor.ids ##########################################################################################", "if l == 0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l", "+ 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-') try:", "'(' + UNI[1] + ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else: print(' Exporter can", "\\\\mathrm{(' + UNI[n] + ')} $ & ') # All rows and cols", "= \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p = 0 for l in range(0, f):", "= np.array([[x]]) ux = np.array([[ux]]) elif type(x) is np.ndarray: try: x.shape[1] except: x", "+ \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units are stored.') elif l ==", "import scipy as sc from scipy import stats import sys import time from", "var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12)", "aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1 - 1 /", "Y def UncRound(x, ux): if type(x) is float: x = np.array([[x]]) ux =", "+ ',' + \\ '' + ',' + \\ '' print(' Variable data", "enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX table. ###################################################################", "int(f - 1): aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l,", "x 10[' + str(aux1[0]) + ']') ########################################################################################## ########################################################################################## # Export table to LaTeX", "rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if", "- 1: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' + str(aux) +", "except: aux1 = UncRound(x, ux); aux1 = aux1[0] print(' ' + str(aux1[1]) +", "def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids", "len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data table for l in", "as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in", "np.zeros([f, int((3 / 2) * (c))], ) for l in range(1, c, 2):", "+ 2)]])) + \\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '}", "= '' for l in range(0,len(var)): aux1 = aux1 + 'Var[' + str(l+1)", "- 1: rowDAUN = \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN", "= standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------' sectspa = ' ' ########################################################################################## ##########################################################################################", "'\\n') except: print('Status Failure.') def disp(var): try: aux1 = ''; aux2 = '';", "m in range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] =", "def WolframEx(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape if c==4: with", "os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM,", "1 / GoldenRatio]\\n\\n') else: print(' Exporter can not export the selected data to", "' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2", "' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ & ') aux.write('\\\\\\\\", "= rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath", "x[l][0]; uxaux = ux[l][0] yaux = np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0)", "* c / 2 - 2), 3): if p == int(3 * c", "np.array([[x]]) ux = np.array([[ux]]) elif type(x) is np.ndarray: try: x.shape[1] except: x =", "for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,)", "range(0, f): if Y[l][2] == 10: naux = n[l] + 1; xaux =", "extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty", "Loading ' + str(file) + '.') aux = np.zeros(len(q[0]) - 2, ) for", "str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux = list(Data.keys())[l] rowNAME", "o == int(f-1): aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o,", "database. ###################################################### def StoreVar(vardata, varname, ppath, database): Data = LoadVar(ppath,database) print(' Creating label", "+ 'export_TeX' + '.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in", "= np.array([[x]]) ux = np.array([[ux]]) elif type(x) is int: x = np.array([[x]]) ux", "+ aux3 + '\\n') def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE)", "[2]] with open(ppath + 'export_TeX' + '.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|')", "# Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f, c", "in Data aux1 = 0 for l in range(0, len(Data.keys())): aux1 = max(len(Data[list(Data.keys())[l]]['dat']),", "+ 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']}') else: aux.write('{{'", "TableToTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C = np.zeros([f, int((3", "Load ALL variables in a database. ###################################################### def LoadVar(ppath, database): file = ppath", "= subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8')) def read_from_clipboard(): return subprocess.check_output( 'pbpaste', env={'LANG':", "type(x) is int: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is np.ndarray:", "C[:, [int((3 / 2) * (l + 1) - 3)]] = B[:, [0]]", "= str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s) are", "= '' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3", "aux.write('Show[LP, LL, Frame -> True, FrameLabel -> {\"' + SYM[0] + '(' +", "len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ','", "range(0, len(Data.keys())): if len(Data.keys()) == 1: aux = list(Data.keys())[l] rowNAME = str(aux) +", "int(3 * c / 2 - 3): if C[o, [int(p)]] == -np.inf: aux.write('$", "+ ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units are stored.') elif", "self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math)) return", "Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def UncPrint(x, ux): try: aux1 =", "2)]])) + '},ErrorBar[' + str(float(MAT[o, [int(p + 1)]])) + ',' + str(float(MAT[o, [int(p", "else: aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o, [int(p +", "UNI[0] + ')\", \"' + SYM[1] + '(' + UNI[1] + ')\"},AspectRatio ->", "+ 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' + ppath +", "Data[list(Data.keys())[l]]['uni'] with open(ppath + database + '.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) #", "is float: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is int: x", "2, ) for l in range(0, len(q)): row = q[l]; nam = q[0]", "must refresh Data variable.' aux2 = 'The very first time importing database is", "0: print(' Nothing to load.') Data = {} else: print(' Database does not", "+ ',' + ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni'])", "Y = np.concatenate((n, np.around(10 * Y) / 10), axis=1) # Correction if exact", "disp(var): try: aux1 = ''; aux2 = ''; aux3 = '' for l", "',' + \\ '' elif m == len(Data.keys()) - 1: if l <=", "- 1]], MAT[:, [l]]) C[:, [int((3 / 2) * (l + 1) -", "print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each time your database is modified you must", "not the same lenght. So no printing.') print('Variables to print metadata.') print('The number", "Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni'] with open(ppath + database + '.csv', 'w')", "2)]])) +'}') else: aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l,", "row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)): if", "B[:, [1]] C[:, [int((3 / 2) * (l + 1) - 1)]] =", "int(f-1): aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o, [int(p +", "aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $')", "'] - Symbol: ' + var[l][\"sym\"] + '\\n' aux1 = aux1 + 'Var['", "round. f, c = Y.shape for l in range(0, f): if Y[l][2] ==", "1: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ',' +", "l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12)", "2) * (l + 1) - 1)]] = B[:, [2]] with open(ppath +", "= rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) +", "SYM, UNI, ppath): f, c = MAT.shape if c==4: with open(ppath + 'export_WMT'", "in range(0, f): if l == int(f - 1): aux.write('{' + str(float(MAT[l, [int(p", "rowDAUN + ',' + \\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN", "node): self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math))", "'(' + UNI[0] + ')\", \"' + SYM[1] + '(' + UNI[1] +", "2) * c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for n in range(0, int(c", "'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0, int((1 - 1", "elif type(x) is np.ndarray: try: x.shape[1] except: x = x[:, np.newaxis] ux =", "as csvfile: spamreader = csv.reader(csvfile, delimiter=',') q = list(spamreader) print('# Loading ' +", "= str(aux) + ',' + ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' +", "vardata # Searching the biggest vector in Data aux1 = 0 for l", "'\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat'])", "np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def UncPrint(x, ux): try: aux1 = UncRound(x, ux)", "+ ']},\\n') aux.write('}];\\n\\n') # Final activities: caption and ending enviroment. aux.write('LP = ListPlot[data];\\n')", "c / 2 - 1: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{('", "'.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data and uncertainty row by", "ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True, FrameLabel -> {\"' +", "document. ######################################################## def TableToTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C", "= {} if os.path.isfile(file) is True: with open(file, newline='') as csvfile: spamreader =", "-> 1 / GoldenRatio]\\n\\n') else: print(' Exporter can not export the selected data", "'},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o in range(0, f):", "x[:, np.newaxis] ux = ux[:, np.newaxis] n = NumPow(ux) Y = np.concatenate((x /", "str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s) are stored.')", "3): if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p +", "l in range(0, f): if Y[l][2] == 10: naux = n[l] + 1;", "l == int(f - 1): aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ','", "print('Setting Current Path is needed. Set it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 =", "table to LaTeX document. ######################################################## def TableToTeX(MAT, CAP, SYM, UNI, ppath): f, c", "f, c = MAT.shape if c==4: with open(ppath + 'export_WMT' + '.txt', 'a')", "elif type(x) is int: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is", "\\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux = list(Data.keys())[l] rowNAME = rowNAME + ','", "ppath, database): Data = LoadVar(ppath,database) print(' Creating label in database dictionary.') Data[str(varname)] =", "+ \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ '' + ',' + \\ ''", "elif m == len(Data.keys()) - 1: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN", "with open(ppath + 'export_TeX' + '.txt', 'a') as aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for", "aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same", "stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log') os.system('rm preview_TeX.aux') os.system('rm ' + ppath + 'preview_TeX.tex') #os.system('open", "aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2) *", "aux.write('}];\\n\\n') # Final activities: caption and ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL =", "= csv.reader(csvfile, delimiter=',') q = list(spamreader) print('# Loading ' + str(file) + '.')", "/ 2 - 2), 3): if p == int(3 * c / 2", "len(Data.keys())): if len(Data.keys()) == 1: # 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\", "/ 2) * c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for n in range(0,", "table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C", "try: aux1 = ''; aux2 = ''; aux3 = '' for l in", "can be done as:' aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2 +", "done as:' aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n' +", "caption and ending enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame", "str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s) are stored.') else: if l == 0:", "C[:, [int((3 / 2) * (l + 1) - 2)]] = B[:, [1]]", "'': Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\", "Store ONE variable in a database. ###################################################### def StoreVar(vardata, varname, ppath, database): Data", "-a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ##########################################################################################", "- 2 - aux[m - 1]) if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2]", "+ \\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ ')", "str(aux) + ',' + ' ' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ','", "(c))], ) for l in range(1, c, 2): B = UncRound(MAT[:, [l -", "\\\\ \\\\mathrm{(' + UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ '", "bad selection.') ########################################################################################## ########################################################################################## # Load ALL variables in a database. ###################################################### def", "DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,)", "'.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for", "stored.') elif l == len(Data.keys()) - 1: aux = list(Data.keys())[l] rowNAME = rowNAME", "/ (10 ** n)), axis=1) Y = np.concatenate((n, np.around(10 * Y) / 10),", "' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n'", "2) * (l + 1) - 3)]] = B[:, [0]] C[:, [int((3 /", "'] - Units : ' + var[l][\"uni\"] + '\\n' aux2 = aux2 +", "- Units : ' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12)", "- 1)]] = B[:, [2]] with open(ppath + 'preview_TeX' + '.tex', 'w') as", "elif l == len(Data.keys()) - 1: aux = list(Data.keys())[l] rowNAME = rowNAME +", "' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram.", "$ & ') aux.write('\\\\\\\\ \\\\hline\\n') # Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{'", "\\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN = rowDAUN", "+ 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) +'}') else: aux.write('{' +", "= max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data table for l in range(0, len(Data.keys())):", "UncRound(MAT[:, [l - 1]], MAT[:, [l]]) C[:, [int((3 / 2) * (l +", "aux4 = '' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4)", "== 1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1] Data[str(nam[2*m])]['dat']", "str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux = list(Data.keys())[l] rowNAME = rowNAME + ',' +", "+ ' \\\\pm ' \\ + str(float(C[o, [int(p + 2)]])) + \\ '", "+ '\\n') except: print('Status Failure.') ########################################################################################## def SetUp(): print('Setting Current Path is needed.", "for m in range(0, len(var[0]['dat'])): aux4 = '' for l in range(0,len(var)): aux4", "in range(0,len(var)): aux1 = aux1 + 'Var[' + str(l+1) + '] - Symbol:", "of events is ' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for m in", "str(aux1[0]) + ']') ########################################################################################## ########################################################################################## # Export table to LaTeX document. ######################################################## def", "is np.ndarray: try: x.shape[1] except: x = x[:, np.newaxis] ux = ux[:, np.newaxis]", "ux = np.array([[ux]]) elif type(x) is int: x = np.array([[x]]) ux = np.array([[ux]])", "str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = \\ '' + ','", "[int(p + 1)]])) + ' \\\\pm ' \\ + str(float(C[o, [int(p + 2)]]))", "+ (implicit_multiplication,) separ = '--------------------------------------------' sectspa = ' ' ########################################################################################## ########################################################################################## # Variable", "np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 - aux[m - 1]) if row[2*m+1] != '':", "else: print(' Database does not exist!') Data = {} return Data ########################################################################################## ##########################################################################################", "for m in range(0, int((1 - 1 / 2) * c)): aux.write('c|') aux.write('}", "\\ '' + ',' + \\ '' print(' Variable data and uncertainty are", "+ str(l+1) + '] - Units : ' + var[l][\"uni\"] + '\\n' aux2", "[int(p + 2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for", "selection.') ########################################################################################## ########################################################################################## # Load ALL variables in a database. ###################################################### def LoadVar(ppath,", "$ ') else: aux.write('$ ( ' + str(float(C[o, [int(p + 1)]])) + '", "cprint import subprocess from subprocess import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations,", "- 2 - aux[m - 1]) aux[m - 1] = aux[m - 1]", "np.ndarray: try: x.shape[1] except: x = x[:, np.newaxis] ux = ux[:, np.newaxis] n", "[int((3 / 2) * (l + 1) - 3)]] = B[:, [0]] C[:,", "+ 'export_WMT' + '.txt', 'a') as aux: aux.write(separ + 'x' + separ +", "= UncRound(MAT[:, [l - 1]], MAT[:, [l]]) C[:, [int((3 / 2) * (l", "aux: aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2)", "Headings. for n in range(0, int(c / 2)): if n == c /", "name(s) are stored.') else: if l == 0: aux = list(Data.keys())[l] rowNAME =", "np.array([[ux]]) elif type(x) is int: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x)", "' + str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' + str(float(C[o, [int(p", "+ str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') except: aux1 =", "+ '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except: print('Status", "str(int(C[o, [int(p)]])) + '} $ ') else: if C[o, [int(p)]] == -np.inf: aux.write('$", "Variable name(s) are stored.') else: if l == 0: aux = list(Data.keys())[l] rowNAME", "[int((3 / 2) * (l + 1) - 1)]] = B[:, [2]] with", "\\\\ \\\\mathrm{(' + UNI[n] + ')} $ & ') # All rows and", "#os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf') ##########################################################################################", "+ SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $ & ')", "aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 = aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except:", "+ ' ' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable", "= B[:, [0]] C[:, [int((3 / 2) * (l + 1) - 2)]]", "\\ str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN + ','", "range(0, f): if o == int(f-1): aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) +", "True: with open(file, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') q = list(spamreader)", "else: rowDAUN = rowDAUN + ',' + \\ '' + ',' + \\", "'\\n' aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN +", "Variable data and uncertainty are stored.') ########################################################################################## ########################################################################################## # Disp a ProjectMaker variable.", "+ 2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o", "<reponame>marromlam/quick-memos # -*- coding: UTF-8 -*- ########################################################################################## # Importing packages. #################################################################### import numpy", "+ UNI[n] + ')} $ & ') # All rows and cols iterative.", "Units : ' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3", "'\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 =", "n in range(0, int(c / 2)): if n == c / 2 -", "')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else: print(' Exporter can not export the selected", "= \"your/path\"\\n') aux1 = 'Each time your database is modified you must refresh", "= np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y def UncPrint(x, ux): try: aux1 = UncRound(x,", "= row[2*m+1] Data[str(nam[2*m])]['dat'] = np.zeros(len(q)-2,) Data[str(nam[2*m])]['unc'] = np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)):", "needed. Set it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each time your database", "- 1: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ','", "in a database. ###################################################### def LoadVar(ppath, database): file = ppath + str(database) +", "aux.write('} \\\\hline\\n') # Headings. for n in range(0, int(c / 2)): if n", "+ '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same lenght.", "-> True, FrameLabel -> {\"' + SYM[0] + '(' + UNI[0] + ')\",", "Y - (10 ** Y > abs(X)); return Y def UncRound(x, ux): if", "Y = np.concatenate((x / (10 ** n), ux / (10 ** n)), axis=1)", "for o in range(0, f): if o == int(f-1): aux.write('{{' + str(float(MAT[o, [int(p", "\"your/path\"\\n') aux1 = 'Each time your database is modified you must refresh Data", "+ ',' + \\ '' print(' Variable data and uncertainty are stored.') ##########################################################################################", "# Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape", "the biggest vector in Data aux1 = 0 for l in range(0, len(Data.keys())):", "& ') # All rows and cols iterative. for o in range(0, f):", "Data[str(varname)] = vardata # Searching the biggest vector in Data aux1 = 0", "'' + ',' + \\ '' + '\\n' aux.write(rowDAUN) else: if l <=", "in range(0, int(c / 2)): if n == c / 2 - 1:", "and uncertainty row by row for l in range(0, max(aux1, len(vardata['dat']))): for m", "= str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' aux.write(rowDAUN) else: if m", "= 'Each time your database is modified you must refresh Data variable.' aux2", "') x 10[' + str(aux1[0]) + ']') except: aux1 = UncRound(x, ux); aux1", "print('Variables have not the same lenght. So no printing.') print('Variables to print metadata.')", "+ ',' + ' ' + '\\n' rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' +", "os.system('rm preview_TeX.pdf') ########################################################################################## ########################################################################################## # Wolfram. ############################################################################### def WolframEx(MAT, CAP, SYM, UNI, ppath):", "str(float(MAT[l, [int(p + 2)]])) + '},\\n') aux.write('};\\n\\n') # Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0", "'\\n' aux.write(rowDAUN) else: if m == 0: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1:", "+ '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM,", "[int(p + 2)]])) +'}') else: aux.write('{' + str(float(MAT[l, [int(p + 0)]])) + ','", "= {} else: print(' Database does not exist!') Data = {} return Data", "str(aux) + ',' + ' ' + '\\n' rowSYUN = rowSYUN + ','", "' + str(aux1[1]) + '(' + str(aux1[2]) + ') x 10[' + str(aux1[0])", "= float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 - aux[m", "str(l+1) + '] - Units : ' + var[l][\"uni\"] + '\\n' aux2 =", "+ '{}'.format('-').rjust(12,'-') aux3 = aux3 + '{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables", "l in range(0, max(aux1, len(vardata['dat']))): for m in range(0, len(Data.keys())): if len(Data.keys()) ==", "row for l in range(0, max(aux1, len(vardata['dat']))): for m in range(0, len(Data.keys())): if", "aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True, FrameLabel -> {\"' + SYM[0]", "',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s) are stored.') else: if", "function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set() def visit_Name(self, node): self.ids.add(node.id)", "\\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' print(' Variable name(s) are stored.') else: if l ==", "aux.write(rowDAUN) else: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN = rowDAUN + ','", "= ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True, FrameLabel -> {\"' + SYM[0] +", "\\ '' elif m == len(Data.keys()) - 1: if l <= len((Data[list(Data.keys())[m]]['dat'])) -", "aux4 + '{}'.format(str(var[l][\"unc\"][m])).rjust(12) print(aux4) print(aux3 + '\\n') except: print('Status Failure.') def disp(var): try:", "') else: if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' + str(float(C[o, [int(p", "+ SYM[1] + '(' + UNI[1] + ')\"},AspectRatio -> 1 / GoldenRatio]\\n\\n') else:", "not export the selected data to Wolfram: bad selection.') ########################################################################################## ########################################################################################## # Load", "for l in range(0, len(q)): row = q[l]; nam = q[0] if l", "row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\", "for l in range(0, len(Data.keys())): if len(Data.keys()) == 1: aux = list(Data.keys())[l] rowNAME", "if type(x) is float: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is", "ux = np.array([[ux]]) elif type(x) is np.ndarray: try: x.shape[1] except: x = x[:,", "+ '.csv', 'w') as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data and uncertainty row", "len(vardata['dat']))): for m in range(0, len(Data.keys())): if len(Data.keys()) == 1: # 1 var", "+ aux2 + '\\n' + aux3 + '\\n') def write_to_clipboard(output): process = subprocess.Popen(", "- 1 / 2) * c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for n", "/ (10 ** n), ux / (10 ** n)), axis=1) Y = np.concatenate((n,", "if Y[l][2] == 10: naux = n[l] + 1; xaux = x[l][0]; uxaux", "\\n\\\\centering\\n') aux.write('\\\\begin{tabular}{|') for m in range(0, int((1 - 1 / 2) * c)):", "== len(var[0]['dat']) except: print('Variables have not the same lenght. So no printing.') print('Variables", "lenght. So no printing.') print('Variables to print metadata.') print('The number of events is", "+ 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']},\\n') aux.write('}];\\n\\n') #", "functions. ########################################################### def NumPow(X): Y = np.around(np.log10(abs(X))); Y = Y - (10 **", "+ \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) else: rowDAUN = rowDAUN + ',' + \\ '' +", "1: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' + UNI[n] + ')}", "'(' + str(aux1[2]) + ') x 10[' + str(aux1[0]) + ']') except: aux1", "aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|') for m in range(0, int((1", "type(x) is np.ndarray: try: x.shape[1] except: x = x[:, np.newaxis] ux = ux[:,", "print('The number of events is ' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for", "################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self): self.ids = set() def visit_Name(self, node): self.ids.add(node.id) def", "print('# Loading ' + str(file) + '.') aux = np.zeros(len(q[0]) - 2, )", "UncPrint(x, ux): try: aux1 = UncRound(x, ux) print(' ' + str(aux1[1]) + '('", "is needed. Set it writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each time your", "writting:') print('CURRENTPATH = \"your/path\"\\n') aux1 = 'Each time your database is modified you", "B[:, [2]] with open(ppath + 'preview_TeX' + '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n')", "ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath + 'preview_TeX.tex'], stdout=DEVNULL, stderr=STDOUT) os.system('rm preview_TeX.log')", "# Errorbars. aux.write('EL=ErrorListPlot[{\\n'); p = 0 for o in range(0, f): if o", "# Final activities: caption and ending enviroment. aux.write('\\\\end{tabular}}\\n\\\\caption{'+CAP+'}\\n\\\\end{table}\\n\\\\end{document}\\n') # Comppiling check_call(['/usr/local/texlive/2017/bin/x86_64-darwin/pdflatex', ppath +", "1: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + ' ' +", "STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,) separ", "def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C = np.zeros([f,", "[int(p + 0)]])) + ',' + str(float(MAT[o, [int(p + 2)]])) + '},ErrorBar[' +", "+ '},ErrorBar[' + str(float(MAT[o, [int(p + 1)]])) + ',' + str(float(MAT[o, [int(p +", "+ ' \\\\pm ' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times", "' \\ + str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{\\\\infty}", "\\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ & ') aux.write('\\\\\\\\ \\\\hline\\n') #", "database dictionary.') Data[str(varname)] = vardata # Searching the biggest vector in Data aux1", "/ 10), axis=1) # Correction if exact decimal in round. f, c =", "in range(0, len(q)): row = q[l]; nam = q[0] if l == 0:", "symbols and units are stored.') elif l == len(Data.keys()) - 1: aux =", "Nothing to load.') Data = {} else: print(' Database does not exist!') Data", "return Data ########################################################################################## ########################################################################################## # Store ONE variable in a database. ###################################################### def", "+ 2)]])) + ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ (", "+ '.csv'; Data = {} if os.path.isfile(file) is True: with open(file, newline='') as", "print(aux3) for m in range(0, len(var[0]['dat'])): aux4 = '' for l in range(0,len(var)):", "Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'], \\ l - 2 - aux[m - 1]) aux[m", "number of events is ' + str(len(var[0]['dat']))) print(aux1); print(aux3); print(aux2); print(aux3) for m", "== 0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif l == 1:", "as:' aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2 + '\\n' + aux3", "== 1: aux = list(Data.keys())[l] rowNAME = str(aux) + ',' + ' '", "abs(X)); return Y def UncRound(x, ux): if type(x) is float: x = np.array([[x]])", "= Y.shape for l in range(0, f): if Y[l][2] == 10: naux =", "= ppath + str(database) + '.csv'; Data = {} if os.path.isfile(file) is True:", "- set(vars(math)) return extractor.ids ########################################################################################## ########################################################################################## # Round uncertainty functions. ########################################################### def NumPow(X):", "the selected data to Wolfram: bad selection.') ########################################################################################## ########################################################################################## # Load ALL variables", "Wolfram: bad selection.') ########################################################################################## ########################################################################################## # Load ALL variables in a database. ######################################################", "if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\", "########################################################################################## ########################################################################################## # Variable extractor class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor): def __init__(self):", "2)]])) + ' ) \\\\times 10^{\\\\infty} $ & ') else: aux.write('$ ( '", "# Correction if exact decimal in round. f, c = Y.shape for l", "######################################################## def TableToTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C =", "== 1: # 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) + \\ ',' + str(float(Data[list(Data.keys())[m]]['unc'][l]))", "= B[:, [2]] with open(ppath + 'preview_TeX' + '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n')", "'--------------------------------------------' sectspa = ' ' ########################################################################################## ########################################################################################## # Variable extractor class and function.", "very first time importing database is compulsury. This can be done as:' aux3", "str(float(C[o, [int(p + 1)]])) + ' \\\\pm ' \\ + str(float(C[o, [int(p +", "+ 1; xaux = x[l][0]; uxaux = ux[l][0] yaux = np.array([xaux, uxaux]) Y[l]", "\\ '' + ',' + \\ '' + '\\n' aux.write(rowDAUN) else: if l", "= np.zeros(len(q)-2,) else: for m in range(0,int(len(row)/2)): if row[2*m+0] != '': Data[str(nam[2*m])]['dat'][l-2] =", "StoreVar(vardata, varname, ppath, database): Data = LoadVar(ppath,database) print(' Creating label in database dictionary.')", "'{}'.format('-').rjust(12,'-') try: len(var[l]['dat']) == len(var[0]['dat']) except: print('Variables have not the same lenght. So", "aux1 = aux1 + 'Var[' + str(l+1) + '] - Symbol: ' +", "+ '.txt', 'a') as aux: aux.write(separ + 'x' + separ + '\\n\\n$PlotTheme =", "= 0 for o in range(0, f): if o == int(f-1): aux.write('{{' +", "# All rows and cols iterative. for o in range(0, f): for p", "/ 2 - 3): if C[o, [int(p)]] == -np.inf: aux.write('$ ( ' +", "as sp import scipy as sc from scipy import stats import sys import", "+ '\\n' + aux3 + '\\n') def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG':", "-> {\"' + SYM[0] + '(' + UNI[0] + ')\", \"' + SYM[1]", "+ ']') except: aux1 = UncRound(x, ux); aux1 = aux1[0] print(' ' +", "import DEVNULL, STDOUT, check_call from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations +", "']}') else: aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' + str(float(MAT[o, [int(p", "2)]])) + \\ ' ) \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $", "units are stored.') elif l == len(Data.keys()) - 1: aux = list(Data.keys())[l] rowNAME", "are stored.') else: if l == 0: aux = list(Data.keys())[l] rowNAME = str(aux)", "+ ppath + 'preview_TeX.tex') #os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open", "\\\\mathrm{(' + UNI[n] + ')} $') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ ' +", "= aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 + '{}'.format('-').rjust(12,'-') aux3 = aux3 +", "np.concatenate((n, np.around(10 * Y) / 10), axis=1) # Correction if exact decimal in", "- Units : ' + var[l][\"uni\"] + '\\n' aux2 = aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15)", "10), axis=1) # Correction if exact decimal in round. f, c = Y.shape", "decimal in round. f, c = Y.shape for l in range(0, f): if", "visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids -", "to Wolfram: bad selection.') ########################################################################################## ########################################################################################## # Load ALL variables in a database.", "UNI, ppath): f, c = MAT.shape if c==4: with open(ppath + 'export_WMT' +", "str(float(MAT[l, [int(p + 0)]])) + ',' + str(float(MAT[l, [int(p + 2)]])) + '},\\n')", "(parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------' sectspa = '", "2 - 2), 3): if p == int(3 * c / 2 -", "+ '\\n' rowSYUN = rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ',' +", "q[0] if l == 0: for m in range(0,int(len(row)/2)): Data[str(row[2*m])] = {} elif", "aux1 + 'Var[' + str(l+1) + '] - Symbol: ' + var[l][\"sym\"] +", "+ 1)]])) + ' \\\\pm ' + str(float(C[o, [int(p + 2)]])) + '", "rowSYUN = str(Data[list(Data.keys())[l]]['sym']) + ',' + \\ str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units", "aux.write(rowSYUN) # Writing data and uncertainty row by row for l in range(0,", "* (l + 1) - 1)]] = B[:, [2]] with open(ppath + 'export_TeX'", "= '' for l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(12) aux4 =", "1) - 3)]] = B[:, [0]] C[:, [int((3 / 2) * (l +", "+ ',' + str(float(MAT[o, [int(p + 3)]])) + ']}') else: aux.write('{{' + str(float(MAT[o,", "'preview_TeX.tex') #os.system('open -a Preview.app ' + '/Users/marcos/Documents/Python/ProjectMaker/' + 'preview_TeX.pdf') os.system('open preview_TeX.pdf') os.system('rm preview_TeX.pdf')", "\\\\pm ' + str(float(C[o, [int(p + 2)]])) + ' ) \\\\times 10^{' +", "are stored.') elif l == len(Data.keys()) - 1: aux = list(Data.keys())[l] rowNAME =", "if row[2*m+1] != '': Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] =", "- 2)]] = B[:, [1]] C[:, [int((3 / 2) * (l + 1)", ") \\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ ') else: if C[o,", "'}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI,", "'Var[' + str(l+1) + '] - Units : ' + var[l][\"uni\"] + '\\n'", "' ' ########################################################################################## ########################################################################################## # Variable extractor class and function. ################################################# class IdentifierExtractor(ast.NodeVisitor):", "+ 'Var[' + str(l+1) + '] - Units : ' + var[l][\"uni\"] +", "sympy as sp import scipy as sc from scipy import stats import sys", "for l in range(0,len(var)): aux1 = aux1 + 'Var[' + str(l+1) + ']", "This can be done as:' aux3 = 'Data = LoadVar(ppath,\"Data\")' print(aux1 + aux2", "' rowSYUN = rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni']", "return Y def UncPrint(x, ux): try: aux1 = UncRound(x, ux) print(' ' +", "and ending enviroment. aux.write('\\\\end{tabular}\\n\\\\caption{' + CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX", "type(x) is float: x = np.array([[x]]) ux = np.array([[ux]]) elif type(x) is int:", "if os.path.isfile(file) is True: with open(file, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',')", "'.csv'; Data = {} if os.path.isfile(file) is True: with open(file, newline='') as csvfile:", "MAT[:, [l]]) C[:, [int((3 / 2) * (l + 1) - 3)]] =", "enviroment. aux.write('LP = ListPlot[data];\\n') aux.write('LL = ListLinePlot[data];\\n') aux.write('Show[LP, LL, Frame -> True, FrameLabel", "l == 1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] = row[2*m+1]", "{} return Data ########################################################################################## ########################################################################################## # Store ONE variable in a database. ######################################################", "Data[str(nam[2*m])]['dat'][l-2] = float(row[2*m+0]) else: Data[str(nam[2*m])]['dat'] = \\ np.delete(Data[str(nam[2*m])]['dat'], \\ l - 2 -", "aux1 = aux1[0] print(' ' + str(aux1[1]) + '(' + str(aux1[2]) + ')", "aux3 = '' for l in range(0,len(var)): aux1 = aux1 + 'Var[' +", "\\ + str(float(C[o, [int(p + 2)]])) + \\ ' ) \\\\times 10^{' +", "database): Data = LoadVar(ppath,database) print(' Creating label in database dictionary.') Data[str(varname)] = vardata", "ux = ux[:, np.newaxis] n = NumPow(ux) Y = np.concatenate((x / (10 **", "= B[:, [1]] C[:, [int((3 / 2) * (l + 1) - 1)]]", "Y.shape for l in range(0, f): if Y[l][2] == 10: naux = n[l]", "sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------' sectspa", "uncertainty are stored.') ########################################################################################## ########################################################################################## # Disp a ProjectMaker variable. ########################################################## def dispu(var):", "def NumPow(X): Y = np.around(np.log10(abs(X))); Y = Y - (10 ** Y >", "rowDAUN + ',' + \\ '' + ',' + \\ '' + '\\n'", "/ 2 - 1: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{(' +", "[int(p + 1)]])) + ' \\\\pm ' + str(float(C[o, [int(p + 2)]])) +", "def LoadVar(ppath, database): file = ppath + str(database) + '.csv'; Data = {}", "rowSYUN = rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] + \\ ',' + Data[list(Data.keys())[l]]['uni'] with", "p = 0 for l in range(0, f): if l == int(f -", "+ ' \\\\ \\\\mathrm{(' + UNI[n] + ')} $ & ') # All", "# Importing packages. #################################################################### import numpy as np import csv import re import", "= aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(15) aux3 = aux3 + '{}'.format('-').rjust(15,'-') try: len(var[l]['dat']) == len(var[0]['dat'])", "2)]] = B[:, [1]] C[:, [int((3 / 2) * (l + 1) -", "',' + ' ' rowSYUN = rowSYUN + ',' + Data[list(Data.keys())[l]]['sym'] + \\", "variable.' aux2 = 'The very first time importing database is compulsury. This can", "range(0, len(Data.keys())): if len(Data.keys()) == 1: # 1 var rowDAUN = str(float(Data[list(Data.keys())[m]]['dat'][l])) +", "$') aux.write('\\\\\\\\ \\\\hline \\\\hline\\n') else: aux.write('$ ' + SYM[n] + ' \\\\ \\\\mathrm{('", "- Symbol: ' + var[l][\"sym\"] + '\\n' aux1 = aux1 + 'Var[' +", "from sympy.parsing.sympy_parser import (parse_expr,standard_transformations, implicit_multiplication) transformations = standard_transformations + (implicit_multiplication,) separ = '--------------------------------------------'", "CAP + '}\\n\\\\end{table}\\n\\n\\n\\n') ########################################################################################## ########################################################################################## # Preview LaTeX table. ################################################################### def PreviewTableTeX(MAT, CAP,", "l in range(0,len(var)): aux4 = aux4 + '{}'.format(str(var[l][\"dat\"][m])).rjust(15) print(aux4) print(aux3 + '\\n') except:", "as aux: aux.write(rowNAME) aux.write(rowSYUN) # Writing data and uncertainty row by row for", "########################################################################################## ########################################################################################## # Store ONE variable in a database. ###################################################### def StoreVar(vardata, varname,", "+ ',' + \\ str(Data[list(Data.keys())[l]]['uni']) + '\\n' else: aux = list(Data.keys())[l] rowNAME =", "x.shape[1] except: x = x[:, np.newaxis] ux = ux[:, np.newaxis] n = NumPow(ux)", "+ 'x' + separ + '\\n\\n$PlotTheme = \"Classic\";\\nNeeds[\"ErrorBarPlots`\"];\\n\\n') # Points. aux.write('data={\\n'); p =", "################################################################### def PreviewTableTeX(MAT, CAP, SYM, UNI, ppath): f, c = MAT.shape C =", "1] + 1 print(' Load file success.') if len(Data) is 0: print(' Nothing", "########################################################################################## # Round uncertainty functions. ########################################################### def NumPow(X): Y = np.around(np.log10(abs(X))); Y =", "max(aux1, len(vardata['dat']))): for m in range(0, len(Data.keys())): if len(Data.keys()) == 1: # 1", "= ux[l][0] yaux = np.array([xaux, uxaux]) Y[l] = np.concatenate((naux, np.around(10*yaux)/10), axis=0) return Y", "aux.write(rowDAUN) else: if m == 0: if l <= len((Data[list(Data.keys())[m]]['dat'])) - 1: rowDAUN", "!= '': Data[str(nam[2*m])]['unc'][l - 2] = \\ float(row[2*m+1]) else: Data[str(nam[2*m])]['unc'] = \\ np.delete(Data[str(nam[2*m])]['unc'],", "stored.') ########################################################################################## ########################################################################################## # Disp a ProjectMaker variable. ########################################################## def dispu(var): try: aux1", "if o == int(f-1): aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ',' +", "def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN)) extractor.ids = extractor.ids - set(vars(math)) return extractor.ids", "{} else: print(' Database does not exist!') Data = {} return Data ##########################################################################################", "import time from termcolor import colored, cprint import subprocess from subprocess import DEVNULL,", "max(len(Data[list(Data.keys())[l]]['dat']), aux1) # Headers of data table for l in range(0, len(Data.keys())): if", "\\\\times 10^{' + str(int(C[o, [int(p)]])) + '} $ ') else: if C[o, [int(p)]]", "rowNAME + ',' + str(aux) + ',' + ' ' rowSYUN = rowSYUN", "except: x = x[:, np.newaxis] ux = ux[:, np.newaxis] n = NumPow(ux) Y", "a database. ###################################################### def StoreVar(vardata, varname, ppath, database): Data = LoadVar(ppath,database) print(' Creating", "self.ids = set() def visit_Name(self, node): self.ids.add(node.id) def VariableExtractor(FUN): extractor = IdentifierExtractor() extractor.visit(ast.parse(FUN))", "+ 3)]])) + ']}') else: aux.write('{{' + str(float(MAT[o, [int(p + 0)]])) + ','", "+ '\\n') def write_to_clipboard(output): process = subprocess.Popen( 'pbcopy', env={'LANG': 'en_US.UTF-8'}, stdin=subprocess.PIPE) process.communicate(output.encode('utf-8')) def", "c)): aux.write('c|') aux.write('} \\\\hline\\n') # Headings. for n in range(0, int(c / 2)):", "= aux2 + 'd({})'.format(var[l][\"sym\"]).rjust(12) aux2 = aux2 + 'u({})'.format(var[l][\"sym\"]).rjust(12) aux3 = aux3 +", "of data table for l in range(0, len(Data.keys())): if len(Data.keys()) == 1: aux", "os import ast import math import sympy as sp import scipy as sc", "str(float(Data[list(Data.keys())[m]]['dat'][l])) + ',' + \\ str(float(Data[list(Data.keys())[m]]['unc'][l])) + '\\n' else: rowDAUN = rowDAUN +", "+ '.tex', 'w') as aux: aux.write('\\\\documentclass[varwidth=true,border=10pt,convert={size=640x}]{standalone}\\n') aux.write('\\\\usepackage{graphicx,float}\\n') aux.write('\\\\usepackage[utf8]{inputenc}') aux.write('\\\\usepackage[T1]{fontenc}\\n') aux.write('\\\\begin{document}\\n') aux.write('\\\\begin{table}[H] \\n\\\\centering\\n') aux.write('\\\\resizebox{12cm}{!}{\\\\begin{tabular}{|')", "str(Data[list(Data.keys())[l]]['uni']) print(' Variable symbols and units are stored.') elif l == len(Data.keys()) -", "np import csv import re import os import ast import math import sympy", "o in range(0, f): if o == int(f-1): aux.write('{{' + str(float(MAT[o, [int(p +", "str(float(MAT[o, [int(p + 1)]])) + ',' + str(float(MAT[o, [int(p + 3)]])) + ']}')", "in range(0, f): for p in range(0, int(3 * c / 2 -", "elif l == 1: for m in range(0,int(len(row)/2)): Data[str(nam[2*m])]['sym'] = row[2*m+0] Data[str(nam[2*m])]['uni'] =", "range(0, len(var[0]['dat'])): aux4 = '' for l in range(0,len(var)): aux4 = aux4 +", "'\\n' rowSYUN = rowSYUN + ',' + \\ str(Data[list(Data.keys())[l]]['sym']) + ',' + \\", "separ = '--------------------------------------------' sectspa = ' ' ########################################################################################## ########################################################################################## # Variable extractor class" ]
[ "VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name", "async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]:", "uuid import UUID from loguru import logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams,", "stored procedure is due # to the fact that the object doesn't exist.", "@property def table(self) -> str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return", "is due # to the fact that the object doesn't exist. return []", "await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows is None: # We", "= await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows is None: #", ") if db_rows is None: # We are assuming that any error on", "= \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def table(self) -> str:", "raise NotImplementedError(\"Get multi is not available for timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id:", "create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, ) ->", "return \"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self,", "db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return", "delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def table(self) -> str: return \"vaccine_availability_timeslot\"", ") from app.services.base import BaseService from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class", "app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ]", "VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi(", "typing import List, Optional, Type, Union from uuid import UUID from loguru import", "\"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def table(self) -> str: return \"vaccine_availability_timeslot\" @property def", "VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, )", "Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await", "import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ):", "that the object doesn't exist. return [] if ret_value == -1: raise InternalDatabaseError(f\"Failed", "ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows is", "\"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\"", "NotImplementedError(\"Get multi is not available for timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID,", "\"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\"", "update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property", "import logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base", "not available for timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID] =", "the stored procedure is due # to the fact that the object doesn't", "( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService from app.services.exceptions import", "return [] if ret_value == -1: raise InternalDatabaseError(f\"Failed to execute {procedure_name}\") return [VaccineAvailabilityTimeslotResponse(**o)", "VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not", "= \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if", "def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, )", "InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\"", "\"id\" @property def table(self) -> str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]:", "auth_key=auth_key, ) if db_rows is None: # We are assuming that any error", "@property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, ) ->", "is not available for timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID]", "-> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id},", "-> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get", ") class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter", "VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema(", "timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID] = None ) ->", "from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest,", "\"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, )", "Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi", "-> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not available for timeslots\") async def get_by_vaccine_availability_id(", ") -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise", "due # to the fact that the object doesn't exist. return [] if", "self, vaccine_availability_id: UUID, auth_key: Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\"", "import List, Optional, Type, Union from uuid import UUID from loguru import logger", "table(self) -> str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property", "VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name =", "= None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all(", "error on the stored procedure is due # to the fact that the", "def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, )", "update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def table(self) ->", "if db_rows is None: # We are assuming that any error on the", "return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def", "doesn't exist. return [] if ret_value == -1: raise InternalDatabaseError(f\"Failed to execute {procedure_name}\")", "update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, ) ->", "= \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter =", "procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows is None: # We are assuming", "from app.services.base import BaseService from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService(", "get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not available for timeslots\")", "exist. return [] if ret_value == -1: raise InternalDatabaseError(f\"Failed to execute {procedure_name}\") return", "VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\"", "read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name", "BaseService from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams,", "available for timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID] = None", ") -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\":", "create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter", "Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key,", "= \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter =", "loguru import logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from", "assuming that any error on the stored procedure is due # to the", "the fact that the object doesn't exist. return [] if ret_value == -1:", "( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name", "] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name =", "-> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams", "get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name =", "procedure is due # to the fact that the object doesn't exist. return", "ret_value == -1: raise InternalDatabaseError(f\"Failed to execute {procedure_name}\") return [VaccineAvailabilityTimeslotResponse(**o) for o in", "read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter", "None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name,", "fact that the object doesn't exist. return [] if ret_value == -1: raise", "@property def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self,", "auth_key: Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows =", "Type, Union from uuid import UUID from loguru import logger from app.schemas.vaccine_availability import", "\"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows", "Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async", "if ret_value == -1: raise InternalDatabaseError(f\"Failed to execute {procedure_name}\") return [VaccineAvailabilityTimeslotResponse(**o) for o", "app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService from", "Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property", "class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter =", "return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def", "= \"id\" @property def table(self) -> str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self) ->", "== -1: raise InternalDatabaseError(f\"Failed to execute {procedure_name}\") return [VaccineAvailabilityTimeslotResponse(**o) for o in db_rows]", "def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]:", "to the fact that the object doesn't exist. return [] if ret_value ==", "): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\"", "import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService from app.services.exceptions", "\"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def table(self) -> str: return", "that any error on the stored procedure is due # to the fact", "logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import", "delete_procedure_id_parameter = \"id\" @property def table(self) -> str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self)", "for timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID] = None )", "-> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest", "UUID from loguru import logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest,", "is None: # We are assuming that any error on the stored procedure", "import BaseService from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse,", "return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is", "VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService from app.services.exceptions import ( InternalDatabaseError,", ") -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return", "are assuming that any error on the stored procedure is due # to", "self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not available for timeslots\") async", "VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\"", "str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def create_response_schema(", ") -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not available for timeslots\") async def", "\"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def table(self)", "import UUID from loguru import logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse,", "We are assuming that any error on the stored procedure is due #", "[] if ret_value == -1: raise InternalDatabaseError(f\"Failed to execute {procedure_name}\") return [VaccineAvailabilityTimeslotResponse(**o) for", "\"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\"", "self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]: return VaccineAvailabilityTimeslotUpdateRequest async def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]:", "List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not available for timeslots\") async def get_by_vaccine_availability_id( self,", "BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name = \"vaccine_availability_timeslots_Read\" read_procedure_id_parameter = \"id\" create_procedure_name", "from uuid import UUID from loguru import logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest,", "db_rows is None: # We are assuming that any error on the stored", "the object doesn't exist. return [] if ret_value == -1: raise InternalDatabaseError(f\"Failed to", "object doesn't exist. return [] if ret_value == -1: raise InternalDatabaseError(f\"Failed to execute", "Union from uuid import UUID from loguru import logger from app.schemas.vaccine_availability import (", "def table(self) -> str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse", "from loguru import logger from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, )", "async def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not available", "parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows is None: # We are assuming that", "multi is not available for timeslots\") async def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key:", "= \"id\" create_procedure_name = \"vaccine_availability_timeslots_Create\" update_procedure_name = \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name =", "def get_by_vaccine_availability_id( self, vaccine_availability_id: UUID, auth_key: Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name", "VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest,", "None: # We are assuming that any error on the stored procedure is", "procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, )", "<gh_stars>10-100 from typing import List, Optional, Type, Union from uuid import UUID from", "= \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def table(self) -> str: return \"vaccine_availability_timeslot\" @property", "vaccine_availability_id: UUID, auth_key: Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value,", "Optional, Type, Union from uuid import UUID from loguru import logger from app.schemas.vaccine_availability", "from app.schemas.vaccine_availability import ( VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService", "self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows is None: # We are", "# to the fact that the object doesn't exist. return [] if ret_value", "app.services.base import BaseService from app.services.exceptions import ( InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[", "vaccine_availability_id}, auth_key=auth_key, ) if db_rows is None: # We are assuming that any", "self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotUpdateRequest]:", "# We are assuming that any error on the stored procedure is due", "from typing import List, Optional, Type, Union from uuid import UUID from loguru", "-> str: return \"vaccine_availability_timeslot\" @property def db_response_schema(self) -> Type[VaccineAvailabilityTimeslotResponse]: return VaccineAvailabilityTimeslotResponse @property def", "db_rows = await self._db.sproc_fetch_all( procname=procedure_name, parameters={\"parentID\": vaccine_availability_id}, auth_key=auth_key, ) if db_rows is None:", "@property def create_response_schema( self, ) -> Type[VaccineAvailabilityTimeslotCreateSprocParams]: return VaccineAvailabilityTimeslotCreateSprocParams @property def update_response_schema( self,", "on the stored procedure is due # to the fact that the object", "VaccineAvailabilityTimeslotCreateRequest, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotUpdateRequest, ) from app.services.base import BaseService from app.services.exceptions import (", "InternalDatabaseError, InvalidAuthenticationKeyForRequest, ) class VaccineAvailabilityTimeslotService( BaseService[ VaccineAvailabilityTimeslotResponse, VaccineAvailabilityTimeslotCreateSprocParams, VaccineAvailabilityTimeslotUpdateRequest, ] ): read_procedure_name =", "List, Optional, Type, Union from uuid import UUID from loguru import logger from", "UUID, auth_key: Optional[UUID] = None ) -> Optional[List[VaccineAvailabilityTimeslotResponse]]: procedure_name = \"vaccine_availability_timeslots_ReadByParent\" ret_value, db_rows", "def get_multi( self, ) -> List[VaccineAvailabilityTimeslotResponse]: raise NotImplementedError(\"Get multi is not available for", "= \"vaccine_availability_timeslots_Update\" update_procedure_id_parameter = \"id\" delete_procedure_name = \"vaccine_availability_timeslots_Delete\" delete_procedure_id_parameter = \"id\" @property def", "any error on the stored procedure is due # to the fact that" ]
[ "import os print(os.name) print(os.uname()) print(os.environ) print(os.environ.get('PATH')) p = os.path.join('.', 'test_dir') print(p) os.mkdir(p) os.rmdir(p)" ]
[ "os, subprocess def execute_shell_process(message, command): print(message) env_copy = os.environ.copy() output = subprocess.run(command, env=env_copy,", "command): print(message) env_copy = os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True) if output.returncode ==", "import os, subprocess def execute_shell_process(message, command): print(message) env_copy = os.environ.copy() output = subprocess.run(command,", "os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True) if output.returncode == 0: print(\"Success!\") else: print(\"Oops!", "env_copy = os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True) if output.returncode == 0: print(\"Success!\")", "subprocess.run(command, env=env_copy, shell=True) if output.returncode == 0: print(\"Success!\") else: print(\"Oops! Please try again.\")", "= os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True) if output.returncode == 0: print(\"Success!\") else:", "def execute_shell_process(message, command): print(message) env_copy = os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True) if", "print(message) env_copy = os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True) if output.returncode == 0:", "output = subprocess.run(command, env=env_copy, shell=True) if output.returncode == 0: print(\"Success!\") else: print(\"Oops! Please", "subprocess def execute_shell_process(message, command): print(message) env_copy = os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True)", "= subprocess.run(command, env=env_copy, shell=True) if output.returncode == 0: print(\"Success!\") else: print(\"Oops! Please try", "execute_shell_process(message, command): print(message) env_copy = os.environ.copy() output = subprocess.run(command, env=env_copy, shell=True) if output.returncode" ]
[ "LinearSVC from tpot.builtins import StackingEstimator # NOTE: Make sure that the outcome column", "column is labeled 'target' in the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64)", "= \\ train_test_split(features, tpot_data['target'], random_state=None) # Average CV score on the training set", "sklearn.pipeline import make_pipeline, make_union from sklearn.svm import LinearSVC from tpot.builtins import StackingEstimator #", "make_union from sklearn.svm import LinearSVC from tpot.builtins import StackingEstimator # NOTE: Make sure", "score on the training set was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True,", "that the outcome column is labeled 'target' in the data file tpot_data =", "from sklearn.svm import LinearSVC from tpot.builtins import StackingEstimator # NOTE: Make sure that", "was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)),", "exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False,", "sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline, make_union from sklearn.svm import LinearSVC from", "penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\", tol=1e-05) ) exported_pipeline.fit(training_features, training_target) results = exported_pipeline.predict(testing_features)", "pd from sklearn.linear_model import SGDClassifier from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline,", "from tpot.builtins import StackingEstimator # NOTE: Make sure that the outcome column is", "import LinearSVC from tpot.builtins import StackingEstimator # NOTE: Make sure that the outcome", "Make sure that the outcome column is labeled 'target' in the data file", "import SGDClassifier from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline, make_union from sklearn.svm", "training_features, testing_features, training_target, testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None) # Average CV score", "fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\", tol=1e-05) ) exported_pipeline.fit(training_features,", "from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline, make_union from sklearn.svm import LinearSVC", "random_state=None) # Average CV score on the training set was: 0.7222949874965342 exported_pipeline =", "# Average CV score on the training set was: 0.7222949874965342 exported_pipeline = make_pipeline(", "StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\", tol=1e-05)", "loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\", tol=1e-05) ) exported_pipeline.fit(training_features, training_target) results =", "sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target = \\ train_test_split(features,", "sklearn.svm import LinearSVC from tpot.builtins import StackingEstimator # NOTE: Make sure that the", "import make_pipeline, make_union from sklearn.svm import LinearSVC from tpot.builtins import StackingEstimator # NOTE:", "0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1,", "testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None) # Average CV score on the training", "on the training set was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0,", "train_test_split from sklearn.pipeline import make_pipeline, make_union from sklearn.svm import LinearSVC from tpot.builtins import", "the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features,", "# NOTE: Make sure that the outcome column is labeled 'target' in the", "outcome column is labeled 'target' in the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR',", "SGDClassifier from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline, make_union from sklearn.svm import", "= tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None) #", "features = tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None)", "labeled 'target' in the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features =", "learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\", tol=1e-05) ) exported_pipeline.fit(training_features, training_target) results", "axis=1) training_features, testing_features, training_target, testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None) # Average CV", "CV score on the training set was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1,", "np import pandas as pd from sklearn.linear_model import SGDClassifier from sklearn.model_selection import train_test_split", "eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\", tol=1e-05) )", "sklearn.linear_model import SGDClassifier from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline, make_union from", "StackingEstimator # NOTE: Make sure that the outcome column is labeled 'target' in", "<reponame>nachovazquez98/COVID-19_Paper import numpy as np import pandas as pd from sklearn.linear_model import SGDClassifier", "import numpy as np import pandas as pd from sklearn.linear_model import SGDClassifier from", "\\ train_test_split(features, tpot_data['target'], random_state=None) # Average CV score on the training set was:", "the training set was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\",", "set was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\",", "tpot.builtins import StackingEstimator # NOTE: Make sure that the outcome column is labeled", "tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None) # Average", "train_test_split(features, tpot_data['target'], random_state=None) # Average CV score on the training set was: 0.7222949874965342", "sure that the outcome column is labeled 'target' in the data file tpot_data", "in the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1)", "dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target = \\ train_test_split(features, tpot_data['target'],", "training_target, testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None) # Average CV score on the", "from sklearn.linear_model import SGDClassifier from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline, make_union", "pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target = \\", "import pandas as pd from sklearn.linear_model import SGDClassifier from sklearn.model_selection import train_test_split from", "testing_features, training_target, testing_target = \\ train_test_split(features, tpot_data['target'], random_state=None) # Average CV score on", "tpot_data['target'], random_state=None) # Average CV score on the training set was: 0.7222949874965342 exported_pipeline", "= pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target =", "l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\", tol=1e-05) ) exported_pipeline.fit(training_features, training_target)", "as pd from sklearn.linear_model import SGDClassifier from sklearn.model_selection import train_test_split from sklearn.pipeline import", "= make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\",", "import train_test_split from sklearn.pipeline import make_pipeline, make_union from sklearn.svm import LinearSVC from tpot.builtins", "import StackingEstimator # NOTE: Make sure that the outcome column is labeled 'target'", "from sklearn.pipeline import make_pipeline, make_union from sklearn.svm import LinearSVC from tpot.builtins import StackingEstimator", "make_pipeline, make_union from sklearn.svm import LinearSVC from tpot.builtins import StackingEstimator # NOTE: Make", "training set was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\",", "file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features, testing_features, training_target,", "numpy as np import pandas as pd from sklearn.linear_model import SGDClassifier from sklearn.model_selection", "'target' in the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target',", "as np import pandas as pd from sklearn.linear_model import SGDClassifier from sklearn.model_selection import", "pandas as pd from sklearn.linear_model import SGDClassifier from sklearn.model_selection import train_test_split from sklearn.pipeline", "data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features, testing_features,", "the outcome column is labeled 'target' in the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE',", "Average CV score on the training set was: 0.7222949874965342 exported_pipeline = make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0,", "tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features = tpot_data.drop('target', axis=1) training_features, testing_features, training_target, testing_target", "NOTE: Make sure that the outcome column is labeled 'target' in the data", "make_pipeline( StackingEstimator(estimator=SGDClassifier(alpha=0.0, eta0=0.1, fit_intercept=True, l1_ratio=0.0, learning_rate=\"constant\", loss=\"hinge\", penalty=\"elasticnet\", power_t=10.0)), LinearSVC(C=0.1, dual=False, loss=\"squared_hinge\", penalty=\"l2\",", "is labeled 'target' in the data file tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64) features" ]
[ "ccxt import time import re from .db import Database from .exchange import build_exchange,", "use since as an exclusive limit # we ask again for the last", "time import re from .db import Database from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis", "ccxt.NetworkError as err: print(f'ERROR: {err}') data = [] return data def fetch_and_save(self): data", "data, it was not closed yet and contained partial data if since is", "from datetime import datetime import ccxt import time import re from .db import", "self.symbol = symbol self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45 def fetch_ohlcv(self):", "candle in case the last time we # got that data, it was", "we ask again for the last candle in case the last time we", "fetch_ohlcv(self): since = self.db.newest_timestamp() # some exchanges use since as an exclusive limit", "symbol self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45 def fetch_ohlcv(self): since =", "self.db.add(data) for datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def", "again for the last candle in case the last time we # got", "Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45 def fetch_ohlcv(self): since = self.db.newest_timestamp() # some", "45 def fetch_ohlcv(self): since = self.db.newest_timestamp() # some exchanges use since as an", "build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def __init__(self, exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name)", "seconds, ccxt uses millis since = secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since)", "datetime import ccxt import time import re from .db import Database from .exchange", "in case the last time we # got that data, it was not", "datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def run(self): print(f'sleep:", "len(data) > 0: self.db.add(data) for datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date,", "[] return data def fetch_and_save(self): data = self.fetch_ohlcv() if len(data) > 0: self.db.add(data)", "class Updater: def __init__(self, exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol = symbol", "limit # we ask again for the last candle in case the last", "closed yet and contained partial data if since is not None: since -=", "None: since -= 1 # we store timestamps in seconds, ccxt uses millis", "1 # we store timestamps in seconds, ccxt uses millis since = secs_to_millis(since)", "= self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR: {err}') data =", "= [] return data def fetch_and_save(self): data = self.fetch_ohlcv() if len(data) > 0:", "from .db import Database from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def", "it was not closed yet and contained partial data if since is not", "fetch_and_save(self): data = self.fetch_ohlcv() if len(data) > 0: self.db.add(data) for datum in data:", "since is not None: since -= 1 # we store timestamps in seconds,", "that data, it was not closed yet and contained partial data if since", "# we store timestamps in seconds, ccxt uses millis since = secs_to_millis(since) try:", "the last candle in case the last time we # got that data,", "as an exclusive limit # we ask again for the last candle in", "ccxt uses millis since = secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data)", "def fetch_and_save(self): data = self.fetch_ohlcv() if len(data) > 0: self.db.add(data) for datum in", "partial data if since is not None: since -= 1 # we store", "= datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def run(self): print(f'sleep: {self.sleep_time}') while True: self.fetch_and_save()", "self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45 def fetch_ohlcv(self): since = self.db.newest_timestamp()", "# got that data, it was not closed yet and contained partial data", "data = self.fetch_ohlcv() if len(data) > 0: self.db.add(data) for datum in data: date", "data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def run(self): print(f'sleep: {self.sleep_time}') while", "for the last candle in case the last time we # got that", "import re from .db import Database from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis class", "since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR: {err}') data = [] return data", "datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def run(self): print(f'sleep: {self.sleep_time}') while True: self.fetch_and_save() time.sleep(self.sleep_time)", "secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR:", "an exclusive limit # we ask again for the last candle in case", "self.sleep_time = 45 def fetch_ohlcv(self): since = self.db.newest_timestamp() # some exchanges use since", "exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol = symbol self.db = Database(exchange_name, self.symbol,", "as err: print(f'ERROR: {err}') data = [] return data def fetch_and_save(self): data =", "millis since = secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError", "contained partial data if since is not None: since -= 1 # we", "# we ask again for the last candle in case the last time", "= Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45 def fetch_ohlcv(self): since = self.db.newest_timestamp() #", "__init__(self, exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol = symbol self.db = Database(exchange_name,", "in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def run(self): print(f'sleep: {self.sleep_time}')", "timestamps_to_seconds, secs_to_millis class Updater: def __init__(self, exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol", "case the last time we # got that data, it was not closed", "timestamps in seconds, ccxt uses millis since = secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol,", "build_exchange(exchange_name) self.symbol = symbol self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45 def", "some exchanges use since as an exclusive limit # we ask again for", "db_base_path): self.exchange = build_exchange(exchange_name) self.symbol = symbol self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time", "= build_exchange(exchange_name) self.symbol = symbol self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45", "= self.db.newest_timestamp() # some exchanges use since as an exclusive limit # we", "last candle in case the last time we # got that data, it", "self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR: {err}') data = []", "we store timestamps in seconds, ccxt uses millis since = secs_to_millis(since) try: data", "self.symbol, db_base_path) self.sleep_time = 45 def fetch_ohlcv(self): since = self.db.newest_timestamp() # some exchanges", "{err}') data = [] return data def fetch_and_save(self): data = self.fetch_ohlcv() if len(data)", "the last time we # got that data, it was not closed yet", "if len(data) > 0: self.db.add(data) for datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S')", "if since is not None: since -= 1 # we store timestamps in", "in seconds, ccxt uses millis since = secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m',", "since -= 1 # we store timestamps in seconds, ccxt uses millis since", "Updater: def __init__(self, exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol = symbol self.db", "we # got that data, it was not closed yet and contained partial", "def __init__(self, exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol = symbol self.db =", "since = secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as", "0: self.db.add(data) for datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print()", "= self.fetch_ohlcv() if len(data) > 0: self.db.add(data) for datum in data: date =", "ask again for the last candle in case the last time we #", ".db import Database from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def __init__(self,", "db_base_path) self.sleep_time = 45 def fetch_ohlcv(self): since = self.db.newest_timestamp() # some exchanges use", "= symbol self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time = 45 def fetch_ohlcv(self): since", "try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR: {err}')", "import ccxt import time import re from .db import Database from .exchange import", "data = [] return data def fetch_and_save(self): data = self.fetch_ohlcv() if len(data) >", "uses millis since = secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except", "was not closed yet and contained partial data if since is not None:", "print(f'ERROR: {err}') data = [] return data def fetch_and_save(self): data = self.fetch_ohlcv() if", "since = self.db.newest_timestamp() # some exchanges use since as an exclusive limit #", "return data def fetch_and_save(self): data = self.fetch_ohlcv() if len(data) > 0: self.db.add(data) for", "re from .db import Database from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis class Updater:", "symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol = symbol self.db = Database(exchange_name, self.symbol, db_base_path)", "not closed yet and contained partial data if since is not None: since", "data def fetch_and_save(self): data = self.fetch_ohlcv() if len(data) > 0: self.db.add(data) for datum", "= secs_to_millis(since) try: data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err:", "data if since is not None: since -= 1 # we store timestamps", "datetime import datetime import ccxt import time import re from .db import Database", "import datetime import ccxt import time import re from .db import Database from", "import time import re from .db import Database from .exchange import build_exchange, timestamps_to_seconds,", "> 0: self.db.add(data) for datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum)", "since as an exclusive limit # we ask again for the last candle", "Database from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def __init__(self, exchange_name, symbol,", "yet and contained partial data if since is not None: since -= 1", "is not None: since -= 1 # we store timestamps in seconds, ccxt", "timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR: {err}') data = [] return data def", "self.fetch_ohlcv() if len(data) > 0: self.db.add(data) for datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d", "# some exchanges use since as an exclusive limit # we ask again", "and contained partial data if since is not None: since -= 1 #", "-= 1 # we store timestamps in seconds, ccxt uses millis since =", ".exchange import build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def __init__(self, exchange_name, symbol, db_base_path): self.exchange", "got that data, it was not closed yet and contained partial data if", "def fetch_ohlcv(self): since = self.db.newest_timestamp() # some exchanges use since as an exclusive", "exclusive limit # we ask again for the last candle in case the", "store timestamps in seconds, ccxt uses millis since = secs_to_millis(since) try: data =", "not None: since -= 1 # we store timestamps in seconds, ccxt uses", "data = self.exchange.fetch_ohlcv(self.symbol, timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR: {err}') data", "= 45 def fetch_ohlcv(self): since = self.db.newest_timestamp() # some exchanges use since as", "except ccxt.NetworkError as err: print(f'ERROR: {err}') data = [] return data def fetch_and_save(self):", "secs_to_millis class Updater: def __init__(self, exchange_name, symbol, db_base_path): self.exchange = build_exchange(exchange_name) self.symbol =", "time we # got that data, it was not closed yet and contained", "for datum in data: date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def run(self):", "exchanges use since as an exclusive limit # we ask again for the", "from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def __init__(self, exchange_name, symbol, db_base_path):", "import build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def __init__(self, exchange_name, symbol, db_base_path): self.exchange =", "timeframe='1m', since=since) timestamps_to_seconds(data) except ccxt.NetworkError as err: print(f'ERROR: {err}') data = [] return", "import Database from .exchange import build_exchange, timestamps_to_seconds, secs_to_millis class Updater: def __init__(self, exchange_name,", "err: print(f'ERROR: {err}') data = [] return data def fetch_and_save(self): data = self.fetch_ohlcv()", "<reponame>luisparravicini/backtesting-learning from datetime import datetime import ccxt import time import re from .db", "self.db.newest_timestamp() # some exchanges use since as an exclusive limit # we ask", "date = datetime.fromtimestamp(datum[0]).strftime('%Y-%m-%d %H:%M:%S') print(date, datum) print() def run(self): print(f'sleep: {self.sleep_time}') while True:", "self.exchange = build_exchange(exchange_name) self.symbol = symbol self.db = Database(exchange_name, self.symbol, db_base_path) self.sleep_time =", "last time we # got that data, it was not closed yet and" ]
[ "in repl_kwargs: stack = inspect.stack() frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] =", "= stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings if 'warnings'", "else: # single threaded if single_threaded: repl = Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown()", "cancel multi line statements') # NOQA # network embed if bind: single_threaded =", "if multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython:", "rlpython.repl_server import ReplServer from rlpython.repl import Repl # debug mode if debug: logging.basicConfig(level=logging.DEBUG)", "not in repl_kwargs: repl_kwargs['warnings'] = [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation", "frame_info.frame.f_locals # setup warnings if 'warnings' not in repl_kwargs: repl_kwargs['warnings'] = [] if", "multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython: ERROR:", "in repl_kwargs: repl_kwargs['warnings'] = [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using", "repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() #", "# FIXME # single threaded if single_threaded: repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK,", "threaded if single_threaded: repl = Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() # multi threaded", "threaded else: repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK, ) repl_server.setup() port = repl_server.get_port()", "logging import os def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from", "finally: repl.shutdown() # multi threaded else: repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK, )", "import Repl # debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use", "# NOQA # network embed if bind: single_threaded = True # FIXME #", "single threaded: cancellation using CTRL-C will not work') # NOQA if single_threaded and", "try: repl.interact() finally: repl.shutdown() # multi threaded else: repl_server = ReplServer( url='localhost:0', permissions=permissions,", "repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded", "start_frontend from rlpython.repl_server import ReplServer from rlpython.repl import Repl # debug mode if", "if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace of caller instead of", "**repl_kwargs, ) if multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as", "ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded else: raise NotImplementedError # local embed", "repl.shutdown() # multi threaded else: repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK, ) repl_server.setup()", "multi line statements') # NOQA # network embed if bind: single_threaded = True", "Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() # multi threaded else: repl_server = ReplServer( url='localhost:0',", "import os def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend", "{}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded else: raise NotImplementedError # local embed else:", "True # FIXME # single threaded if single_threaded: repl_server = ReplServer( url=bind, permissions=permissions,", "if 'warnings' not in repl_kwargs: repl_kwargs['warnings'] = [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single", "repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings if 'warnings' not in", "repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings if 'warnings' not in repl_kwargs: repl_kwargs['warnings'] =", "raise NotImplementedError # local embed else: # single threaded if single_threaded: repl =", "debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace of caller instead of own", "bind: single_threaded = True # FIXME # single threaded if single_threaded: repl_server =", "# local embed else: # single threaded if single_threaded: repl = Repl(**repl_kwargs) try:", "threaded: Use \"!\" to cancel multi line statements') # NOQA # network embed", "work') # NOQA if single_threaded and not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single", "= inspect.stack() frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup", "return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1]))", "embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend import start_frontend from", "cancellation using CTRL-C will not work') # NOQA if single_threaded and not bind", "repl_kwargs['warnings'].append('running single threaded: Use \"!\" to cancel multi line statements') # NOQA #", "repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK, ) repl_server.setup() port = repl_server.get_port() start_frontend(port) repl_server.run_single_session(**repl_kwargs)", "# debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace of", "finally: repl_server.shutdown() # multi threaded else: raise NotImplementedError # local embed else: #", "single_threaded: repl = Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() # multi threaded else: repl_server", "from rlpython.frontend import start_frontend from rlpython.repl_server import ReplServer from rlpython.repl import Repl #", "of own if nothing is set if 'globals' not in repl_kwargs and 'locals'", "= frame_info.frame.f_locals # setup warnings if 'warnings' not in repl_kwargs: repl_kwargs['warnings'] = []", "# multi threaded else: raise NotImplementedError # local embed else: # single threaded", "repl_kwargs and 'locals' not in repl_kwargs: stack = inspect.stack() frame_info = stack[1] repl_kwargs['globals']", "else: repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK, ) repl_server.setup() port = repl_server.get_port() start_frontend(port)", "frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings if 'warnings' not in repl_kwargs: repl_kwargs['warnings']", "single_threaded = True # FIXME # single threaded if single_threaded: repl_server = ReplServer(", "not work') # NOQA if single_threaded and not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running", "# use namespace of caller instead of own if nothing is set if", "ReplServer from rlpython.repl import Repl # debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] =", "and 'locals' not in repl_kwargs: stack = inspect.stack() frame_info = stack[1] repl_kwargs['globals'] =", "repl_server.shutdown() # multi threaded else: raise NotImplementedError # local embed else: # single", "def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend import start_frontend", "<reponame>ukleinek/rlpython<gh_stars>1-10 import inspect import logging import os def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False,", "of caller instead of own if nothing is set if 'globals' not in", "multi threaded else: raise NotImplementedError # local embed else: # single threaded if", "started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C will not work') # NOQA if", "if single_threaded: repl = Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() # multi threaded else:", "multi threaded else: repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK, ) repl_server.setup() port =", "# network embed if bind: single_threaded = True # FIXME # single threaded", "# single threaded if single_threaded: repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs,", "OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded else: raise", ") if multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception:", "= ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session: return repl_server try:", "if nothing is set if 'globals' not in repl_kwargs and 'locals' not in", "exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded else: raise NotImplementedError # local", "multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend import start_frontend from rlpython.repl_server import ReplServer", "inspect import logging import os def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False,", "started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend import start_frontend from rlpython.repl_server import ReplServer from", "url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations()", "if 'globals' not in repl_kwargs and 'locals' not in repl_kwargs: stack = inspect.stack()", "from rlpython.repl import Repl # debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True'", "Use \"!\" to cancel multi line statements') # NOQA # network embed if", "repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session: return repl_server", "else: raise NotImplementedError # local embed else: # single threaded if single_threaded: repl", "local embed else: # single threaded if single_threaded: repl = Repl(**repl_kwargs) try: repl.interact()", "frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings if", "line statements') # NOQA # network embed if bind: single_threaded = True #", "logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace of caller instead of own if", "ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session: return repl_server try: repl_server.setup()", "import ReplServer from rlpython.repl import Repl # debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG']", "import logging import os def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs):", "try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown()", "= Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() # multi threaded else: repl_server = ReplServer(", "will not work') # NOQA if single_threaded and not bind and not started_from_cmd_line:", "print=print, **repl_kwargs, ) if multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError", "and not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\" to cancel", "exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded else: raise NotImplementedError #", "set if 'globals' not in repl_kwargs and 'locals' not in repl_kwargs: stack =", "and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\" to cancel multi line statements')", "if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C will not work') #", "permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs)", "statements') # NOQA # network embed if bind: single_threaded = True # FIXME", "started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\" to cancel multi line statements') # NOQA", "nothing is set if 'globals' not in repl_kwargs and 'locals' not in repl_kwargs:", "NOQA if single_threaded and not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use", "stack = inspect.stack() frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals #", "repl.interact() finally: repl.shutdown() # multi threaded else: repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK,", "import inspect import logging import os def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print,", "warnings if 'warnings' not in repl_kwargs: repl_kwargs['warnings'] = [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running", "print=print, debug=False, **repl_kwargs): from rlpython.frontend import start_frontend from rlpython.repl_server import ReplServer from rlpython.repl", "single_threaded: repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session: return", "threaded else: raise NotImplementedError # local embed else: # single threaded if single_threaded:", "except OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded else:", "# NOQA if single_threaded and not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded:", "repl_kwargs['warnings'] = [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C will", "not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\" to cancel multi line statements') #", "own if nothing is set if 'globals' not in repl_kwargs and 'locals' not", "if bind: single_threaded = True # FIXME # single threaded if single_threaded: repl_server", "single threaded: Use \"!\" to cancel multi line statements') # NOQA # network", "repl = Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() # multi threaded else: repl_server =", "rlpython.repl import Repl # debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' #", "rlpython.frontend import start_frontend from rlpython.repl_server import ReplServer from rlpython.repl import Repl # debug", "is set if 'globals' not in repl_kwargs and 'locals' not in repl_kwargs: stack", "single threaded if single_threaded: repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, )", "setup warnings if 'warnings' not in repl_kwargs: repl_kwargs['warnings'] = [] if not started_from_cmd_line:", "not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C will not work') # NOQA", "os def embed(single_threaded=False, bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend import", "from rlpython.repl_server import ReplServer from rlpython.repl import Repl # debug mode if debug:", "network embed if bind: single_threaded = True # FIXME # single threaded if", "# multi threaded else: repl_server = ReplServer( url='localhost:0', permissions=permissions, repl_domain=Repl.DOMAIN.LOCAL_NETWORK, ) repl_server.setup() port", "inspect.stack() frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings", "'warnings' not in repl_kwargs: repl_kwargs['warnings'] = [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded:", "repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session: return repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except", "= [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C will not", "bind='', permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend import start_frontend from rlpython.repl_server", "**repl_kwargs): from rlpython.frontend import start_frontend from rlpython.repl_server import ReplServer from rlpython.repl import Repl", "in repl_kwargs and 'locals' not in repl_kwargs: stack = inspect.stack() frame_info = stack[1]", "not in repl_kwargs: stack = inspect.stack() frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals']", "CTRL-C will not work') # NOQA if single_threaded and not bind and not", "stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings if 'warnings' not", "single threaded if single_threaded: repl = Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() # multi", "repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C will not work') # NOQA if single_threaded", "embed else: # single threaded if single_threaded: repl = Repl(**repl_kwargs) try: repl.interact() finally:", "caller instead of own if nothing is set if 'globals' not in repl_kwargs", "# setup warnings if 'warnings' not in repl_kwargs: repl_kwargs['warnings'] = [] if not", "threaded: cancellation using CTRL-C will not work') # NOQA if single_threaded and not", "= True # FIXME # single threaded if single_threaded: repl_server = ReplServer( url=bind,", "repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi", "'locals' not in repl_kwargs: stack = inspect.stack() frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals", "use namespace of caller instead of own if nothing is set if 'globals'", "[] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C will not work')", "namespace of caller instead of own if nothing is set if 'globals' not", "to cancel multi line statements') # NOQA # network embed if bind: single_threaded", "os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace of caller instead of own if nothing", "if single_threaded and not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\"", "instead of own if nothing is set if 'globals' not in repl_kwargs and", "repl_server try: repl_server.setup() repl_server.print_bind_informations() repl_server.run_single_session(**repl_kwargs) except OSError as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally:", "single_threaded and not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\" to", "= 'True' # use namespace of caller instead of own if nothing is", "repl_kwargs: stack = inspect.stack() frame_info = stack[1] repl_kwargs['globals'] = frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals", "if single_threaded: repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if multi_session:", "mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace of caller instead", "permissions='600', multi_session=False, started_from_cmd_line=False, print=print, debug=False, **repl_kwargs): from rlpython.frontend import start_frontend from rlpython.repl_server import", "repl_kwargs: repl_kwargs['warnings'] = [] if not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: cancellation using CTRL-C", "embed if bind: single_threaded = True # FIXME # single threaded if single_threaded:", "debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace of caller", "import start_frontend from rlpython.repl_server import ReplServer from rlpython.repl import Repl # debug mode", "not in repl_kwargs and 'locals' not in repl_kwargs: stack = inspect.stack() frame_info =", "as exception: exit('rlpython: ERROR: {}'.format(exception.args[1])) finally: repl_server.shutdown() # multi threaded else: raise NotImplementedError", "FIXME # single threaded if single_threaded: repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print,", "debug=False, **repl_kwargs): from rlpython.frontend import start_frontend from rlpython.repl_server import ReplServer from rlpython.repl import", "\"!\" to cancel multi line statements') # NOQA # network embed if bind:", "using CTRL-C will not work') # NOQA if single_threaded and not bind and", "Repl # debug mode if debug: logging.basicConfig(level=logging.DEBUG) os.environ['RLPYTHON_DEBUG'] = 'True' # use namespace", "= frame_info.frame.f_globals repl_kwargs['locals'] = frame_info.frame.f_locals # setup warnings if 'warnings' not in repl_kwargs:", "NOQA # network embed if bind: single_threaded = True # FIXME # single", "threaded if single_threaded: repl_server = ReplServer( url=bind, permissions=permissions, repl_domain=Repl.DOMAIN.NETWORK, print=print, **repl_kwargs, ) if", "NotImplementedError # local embed else: # single threaded if single_threaded: repl = Repl(**repl_kwargs)", "# single threaded if single_threaded: repl = Repl(**repl_kwargs) try: repl.interact() finally: repl.shutdown() #", "not bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\" to cancel multi", "'globals' not in repl_kwargs and 'locals' not in repl_kwargs: stack = inspect.stack() frame_info", "'True' # use namespace of caller instead of own if nothing is set", "bind and not started_from_cmd_line: repl_kwargs['warnings'].append('running single threaded: Use \"!\" to cancel multi line" ]
[ "i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs", "{}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"]", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs = { \"one.two.three\":", "CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs = {", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs = { \"one\":", "self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {})", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"]", "def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [])", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"]", "parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"]", "args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self):", "\"one\": '1', \"two.three\": '1', } extraArgs = [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs =", "test_emptyArgs(self): args = [] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(),", "\"/usr/bin/whatever\") def test_complexKey(self): kwargs = { \"one.two.three\": '1', } extraArgs = [] args", "kwargs = { \"one\": '1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5', } extraArgs", "args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {\"test\": \"255\"})", "args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self):", "= [\"one\", \"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args)", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs =", "args = [] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None)", "\"one\": '1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5', } extraArgs = [] args", "= CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs =", "(i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args)", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\":", "CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs = {", "{\"--verbose\": 1} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"} extraArgs", "parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\",", "= CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs = []", "= {\"verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"]", "kwargs = {\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "kwargs = {\"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args =", "test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "= CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {\"test\": \"255\"}) self.assertEqual(parser.getExtraArguments(), [\"verbose\",", "Cannot parse string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\":", "TestCase from jsonconf import CommandLineParser class ConfigTests(TestCase): def setUp(self): pass def test_constructor(self): parser", "self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"} extraArgs = [] args =", "kwargs = {\"--verbose\": \"hello\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"]", "= CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs", "test_requiredTest(self): kwargs = {} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() # Cannot", "parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs = [] args = [\"/usr/bin/whatever\"]", "self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs", "self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {} extraArgs = [] args = [\"/usr/bin/whatever\"]", "{}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs = {} extraArgs = [\"-v\"]", "[\"verbose\"]) extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "= [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args)", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"} extraArgs =", "\"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "{}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser = CommandLineParser()", "{\"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"]", "\"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "parse string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs =", "\"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs)", "\"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self):", "[\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs = { \"one.two.three\": '1', }", "CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs = []", "args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(),", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\",", "args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(),", "[]) kwargs = {\"-v\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "{\"-v\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "\"hello\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "\"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "= {} extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "[\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "{}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args = [] parser = CommandLineParser()", "parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"])", "= [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs = { \"one\": '1', \"two.three\": '1', } extraArgs", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"} extraArgs", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args = [] parser =", "i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() # Cannot parse string to int parser.requireKey(\"--verbose\",", "\"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"]", "test_renameExtraArguments(self): kwargs = {} extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "= {} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"] args", "i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\")", "i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse,", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser", "{\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "[\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\",", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs =", "int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs = []", "(i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self):", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\":", "\"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"}", "test_keyArgs(self): kwargs = { \"one\": '1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5', }", "self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs = []", "ConfigTests(TestCase): def setUp(self): pass def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is not None)", "def setUp(self): pass def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(),", "i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(),", "def test_complexKey(self): kwargs = { \"one.two.three\": '1', } extraArgs = [] args =", "import CommandLineParser class ConfigTests(TestCase): def setUp(self): pass def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser", "= CommandLineParser() # Cannot parse string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args)", "\"-d\": \"hello\", \"--ignore\": '5', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\")", "self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args", "to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"}", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser =", "= [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\")", "self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args = [] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {})", "# Cannot parse string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self):", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs = {} extraArgs", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\",", "unittest import TestCase from jsonconf import CommandLineParser class ConfigTests(TestCase): def setUp(self): pass def", "parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args", "test_complexKey(self): kwargs = { \"one.two.three\": '1', } extraArgs = [] args = [\"/usr/bin/whatever\"]", "= {\"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "'5', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "{ \"one\": '1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5', } extraArgs = []", "\"two.three\": '1', } extraArgs = [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "\"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"} extraArgs = []", "self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs = [] args", "\"hello\", \"--ignore\": '5', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {} extraArgs", "[\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "args.extend(extraArgs) parser = CommandLineParser() # Cannot parse string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception,", "def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [])", "= {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs = [] args", "= CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs =", "(i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() # Cannot parse string to int", "\"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"]", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args", "\"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs", "CommandLineParser class ConfigTests(TestCase): def setUp(self): pass def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is", "parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"} extraArgs = [] args", "parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs = {\"--verbose\": 1}", "\"/usr/bin/whatever\") def test_keyArgs(self): kwargs = { \"one\": '1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\":", "is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args =", "parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"} extraArgs =", "parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs", "parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"} extraArgs = [] args", "\"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args", "string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs = {\"--verbose\":", "{}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "\"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "kwargs = {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs = { \"one\": '1',", "kwargs = {\"-v\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args", "class ConfigTests(TestCase): def setUp(self): pass def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is not", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\",", "\"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() # Cannot parse string", "[]) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"] args =", "not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args = []", "CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {\"test\": \"255\"}) self.assertEqual(parser.getExtraArguments(), [\"verbose\", \"otherArg\"])", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args =", "\"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(),", "} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "% (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() # Cannot parse string to", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"} extraArgs", "self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "= [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"]", "kwargs = { \"one.two.three\": '1', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs", "\"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs = {} extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"]", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs = { \"one\":", "args) def test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs = [] args = [\"/usr/bin/whatever\"]", "= { \"one\": '1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5', } extraArgs =", "kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs = { \"one\": '1', \"two.three\":", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {\"test\":", "} extraArgs = [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\", \"--verbose\":", "def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "[\"-v\", \"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "\"/usr/bin/whatever\") def test_both(self): kwargs = { \"one\": '1', \"two.three\": '1', } extraArgs =", "{}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"]", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"} extraArgs =", "self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"]", "int) parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args =", "def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs = { \"one.two.three\": '1', } extraArgs = []", "= { \"one\": '1', \"two.three\": '1', } extraArgs = [\"--test\", \"-v\"] args =", "[\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\")", "self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs", "[]) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args = [] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(),", "CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\",", "[\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "from unittest import TestCase from jsonconf import CommandLineParser class ConfigTests(TestCase): def setUp(self): pass", "args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int)", "self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args)", "jsonconf import CommandLineParser class ConfigTests(TestCase): def setUp(self): pass def test_constructor(self): parser = CommandLineParser()", "setUp(self): pass def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {})", "[\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() #", "None) def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(),", "[\"one\", \"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(),", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\",", "args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception,", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs =", "\"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "= [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"}", "parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs", "i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def", "\"--ignore\": '5', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self):", "CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {}", "\"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {})", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"}", "'1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5', } extraArgs = [] args =", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\":", "= [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser()", "{ \"one\": '1', \"two.three\": '1', } extraArgs = [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"]", "= [] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def", "= CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs =", "[]) kwargs = {\"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "= CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs =", "\"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self):", "CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs = [] args", "i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs)", "\"-v\": \"1\", \"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "% (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"])", "\"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"} extraArgs = []", "\"2\", \"-d\": \"hello\", \"--ignore\": '5', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args =", "= CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs =", "= {\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "from jsonconf import CommandLineParser class ConfigTests(TestCase): def setUp(self): pass def test_constructor(self): parser =", "parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args", "= CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args =", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs", "= {\"--verbose\": \"hello\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "[\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\",", "CommandLineParser() # Cannot parse string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def", "kwargs = {\"--verbose\": 1} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs = { \"one\": '1', \"two\": \"2\",", "'1', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs = { \"one\": '1',", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args =", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"]", "[\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args)", "self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"} extraArgs = [] args =", "self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\":", "[\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self):", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs = {}", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "pass def test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(),", "parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {\"test\": \"255\"}) self.assertEqual(parser.getExtraArguments(),", "args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(),", "args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs", "self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs = [\"--verbose\",", "(i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(),", "test_singleArg(self): args = [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(),", "extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "\"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs = {} extraArgs =", "args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs = {\"--verbose\":", "i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() # Cannot parse", "[\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\")", "self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\",", "= CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def", "extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser()", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def", "[]) def test_renameExtraArguments(self): kwargs = {} extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "(i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs", "= [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "\"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5', } extraArgs = [] args = [\"/usr/bin/whatever\"]", "test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse, args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs =", "kwargs = {} extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs =", "% (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs)", "[]) kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs =", "args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser =", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args", "import TestCase from jsonconf import CommandLineParser class ConfigTests(TestCase): def setUp(self): pass def test_constructor(self):", "extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs = { \"one\": '1', \"two.three\": '1', }", "kwargs = {} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "[\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "= CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs =", "self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"} extraArgs = [] args =", "parser = CommandLineParser() # Cannot parse string to int parser.requireKey(\"--verbose\", int) self.assertRaises(Exception, parser.parse,", "[\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs = { \"one.two.three\": '1', } extraArgs =", "self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args", "\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "\"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\":", "extraArgs = [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "(i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs =", "CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs =", "extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "[]) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args = [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(),", "CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self):", "[] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self):", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"}", "extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(),", "[]) kwargs = {\"verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "= { \"one.two.three\": '1', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {})", "extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs = { \"one\": '1', \"two\": \"2\", \"-d\":", "1} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser =", "'1', } extraArgs = [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "= {\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() # Cannot parse string to int parser.requireKey(\"--verbose\", int)", "CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs = {", "[] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "{}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs = { \"one\": '1', \"two\":", "% (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def", "\"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"]", "\"1\", \"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "\"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs", "\"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\",", "parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs = [] args =", "= [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "def test_requiredTest(self): kwargs = {} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "def test_emptyArgs(self): args = [] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [])", "parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"} extraArgs", "\"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "= [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "= [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "= CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def", "parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"]", "CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self):", "'1', \"two.three\": '1', } extraArgs = [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_extraArgs(self): extraArgs = [\"one\", \"two\",", "i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args)", "= CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs", "def test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "[\"verbose\"]) extraArgs = [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "= CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs", "= [\"verb\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "def test_both(self): kwargs = { \"one\": '1', \"two.three\": '1', } extraArgs = [\"--test\",", "\"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs)", "parser = CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None)", "\"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\":", "\"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"-v\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"]", "kwargs = {\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs = []", "args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\",", "= {\"-v\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "[\"verbose\"]) def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args =", "self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_both(self): kwargs = { \"one\": '1', \"two.three\": '1',", "args = [\"/usr/bin/whatever\"] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\")", "args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self):", "None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args = [] parser", "= {\"--verbose\": 1} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {} extraArgs =", "[\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "\"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args)", "\"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(),", "{} extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "{\"--verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "def test_extraArgs(self): extraArgs = [\"one\", \"two\", \"-d\", \"--ignore\"] args = [\"/usr/bin/whatever\"] args.extend(extraArgs) parser", "self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_keyArgs(self): kwargs = { \"one\": '1', \"two\": \"2\", \"-d\": \"hello\",", "i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\":", "{\"--verbose\": \"hello\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {} extraArgs = [] args", "[\"verbose\"]) extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_emptyArgs(self): args = [] parser = CommandLineParser() parser.parse(args)", "{ \"one.two.three\": '1', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "parser.parse, args) def test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs = [] args =", "i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\",", "i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"),", "% (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs =", "parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"--verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\") parser.parse(args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"}", "[\"/usr/bin/whatever\"] args.extend(extraArgs) parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def", "= CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def", "def test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"]", "{\"verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "kwargs = {\"verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\"", "parser.parse, args) def test_invalidConverter(self): kwargs = {\"--verbose\": \"1\"} extraArgs = [] args =", "self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0],", "CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs =", "test_both(self): kwargs = { \"one\": '1', \"two.three\": '1', } extraArgs = [\"--test\", \"-v\"]", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verb\": \"1\"}", "args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self): kwargs = {\"--verbose\": \"1\"}", "parser = CommandLineParser() parser.renameKeys(\"verbose\", [\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), [])", "test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {} extraArgs = []", "kwargs = { \"one\": '1', \"two.three\": '1', } extraArgs = [\"--test\", \"-v\"] args", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verb\"] args", "{\"verbose\": \"1\", \"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs = [] args =", "[\"-v\", \"--verbose\", \"verbose\", \"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\",", "{}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"-v\", \"--verbose\", \"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "self.assertEqual(parser.getKeywordArguments(), kwargs) self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs = { \"one.two.three\": '1',", "\"verb\"]) parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"} extraArgs = []", "def test_keyArgs(self): kwargs = { \"one\": '1', \"two\": \"2\", \"-d\": \"hello\", \"--ignore\": '5',", "\"one.two.three\": '1', } extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {} extraArgs = [] args =", "\"--verbose\": \"1\", \"-v\": \"1\", \"verb\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "% (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def test_renameKeywordArguments(self):", "self.assertEqual(parser.getExtraArguments(), extraArgs) self.assertEqual(parser.getProgram(), \"/usr/bin/whatever\") def test_complexKey(self): kwargs = { \"one.two.three\": '1', } extraArgs", "parser.parse(args) self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"} extraArgs = [] args", "CommandLineParser() parser.requireKey(\"verbose\") self.assertRaises(Exception, parser.parse, args) def test_requiredTest2(self): kwargs = {\"--verbose\": 1} extraArgs =", "\"1\") self.assertEqual(parser.getExtraArguments(), []) kwargs = {\"verbose\": \"1\"} extraArgs = [] args = [\"/usr/bin/whatever\"]", "\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser", "extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items()))", "None) def test_emptyArgs(self): args = [] parser = CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(),", "CommandLineParser() parser.parse(args) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(), None) def test_singleArg(self): args = [\"/usr/bin/whatever\"]", "def test_invalidConverter(self): kwargs = {\"--verbose\": \"hello\"} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "self.assertEqual(parser.get(\"verbose\"), \"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs = {} extraArgs = [\"-v\"] args", "{}) self.assertEqual(parser.getExtraArguments(), [\"verbose\"]) extraArgs = [\"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" %", "\"verb\", \"verbose\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs)", "extraArgs = [\"--test\", \"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "\"/usr/bin/whatever\") def test_requiredTest(self): kwargs = {} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda", "test_constructor(self): parser = CommandLineParser() self.assertTrue(parser is not None) self.assertEqual(parser.getKeywordArguments(), {}) self.assertEqual(parser.getExtraArguments(), []) self.assertEqual(parser.getProgram(),", "\"%s=%s\" % (i[0], i[1]), kwargs.items())) args.extend(extraArgs) parser = CommandLineParser() parser.requireKey(\"--verbose\", int) parser.parse(args) def", "\"1\") self.assertEqual(parser.getExtraArguments(), []) def test_renameExtraArguments(self): kwargs = {} extraArgs = [\"-v\"] args =", "{} extraArgs = [] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i: \"%s=%s\" % (i[0], i[1]),", "def test_renameExtraArguments(self): kwargs = {} extraArgs = [\"-v\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda i:", "test_renameOtherArgs(self): kwargs = {\"test\": \"255\"} extraArgs = [\"--verbose\", \"otherArg\"] args = [\"/usr/bin/whatever\"] args.extend(map(lambda" ]
[ "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id =", "{}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self):", "('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID1'), ('othermarker', 'no link')])", "entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')])", "self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1',", "s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen", "'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1',", "= s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs':", "'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def", "= [1, 3, 5] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1,", "too!' expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no", "{}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry =", "def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link:", "def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row =", "('sep', 'value'), ('marker2', 'value2')] expected = [ [('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]]", "expected = {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1': 'Column1',", "{'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown',", "'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no", "self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self): def iseven(x): return x %", "[label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry", "'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')])", "'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no", "'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1',", "'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == ['abc',", "_ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')])", "sep = 'sep' input_markers = [ ('marker1', 'value1'), ('marker2', 'value2')] expected = [", "= sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry)", "expected) def test_split_groups_on_separator(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('sep', 'value'),", "'value2')] expected = [ [('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)),", "OLDID2'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1:", "sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None,", "sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(),", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry)", "('othermarker', 'no link')]) original_entry.id = 'I have an ID, too!' expected = sfm.Entry([", "cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2',", "input_markers = [ ('marker1', 'value1'), ('marker2', 'value2')] expected = [ [('marker1', 'value1'), ('marker2',", "= sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping,", "1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([", "'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self):", "'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry", "'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1',", "['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids =", "original_entry.id = 'I have an ID, too!' expected = sfm.Entry([ ('linkmarker1', 'no link'),", "iseven(x): return x % 2 == 0 elements = [1, 2, 3, 4]", "'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([", "sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID':", "['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID':", "{}, set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {},", "test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(),", "= 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry)", "label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no", "gen = s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen = s.IDGenerator()", "('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1',", "{'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')])", "0 elements = [1, 3, 5] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [])", "{ 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index = { 'NEWID1': 'label 1',", "sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row =", "('cap', 'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected =", "class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry", "['file1', 'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'},", "'cap') _ = caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry", "{} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'}", "sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry", "= [1, 2, 3, 4] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4])", "GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self):", "('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id = 'I have an ID, too!'", "'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {}", "set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry", "1: OLDID1; link 2: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no", "caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1',", "label_index = { 'NEWID1': 'label 1', 'NEWID2': 'label 2', 'NEWID3': 'label 3'} link_markers", "1', 'NEWID2': 'label 2', 'NEWID3': 'label 3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex =", "% 2 == 0 elements = [2, 4, 6] even, odd = s.split_by_pred(iseven,", "{'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1'", "def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2']", "elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x): return x", "('cap', 'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected =", "def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no", "'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected", "'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2',", "= ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row,", "== 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] ==", "1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2',", "('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'),", "'label 2', 'NEWID3': 'label 3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) expected = sfm.Entry([", "== ['abc', 'def'] def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row =", "expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker',", "elements = [1, 3, 5] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd,", "[2, 4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id", "= sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None,", "expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap',", "test_all_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements = [2, 4,", "self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id =", "def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('marker2', 'value2')] expected", "cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss']", "clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers = [", "link 2: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def", "('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry)", "'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {},", "s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = { 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions,", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'),", "'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(), sfm_entry) assert cldf_row['See_Also'] == 'val1", "4, 6] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, [])", "OLDID1000'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'),", "'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1'", "[label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id = 'I have an ID, too!' new_entry", "OLDID1'), ('othermarker', 'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry =", "'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge':", "link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID1'), ('othermarker', 'no link')]) expected =", "list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return x % 2", "'Column2'} def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping,", "'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id =", "'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker', 'no", "cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1',", "('linkmarker2', 'no link'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self):", "'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(),", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) _ =", "expected = sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no", "expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker',", "SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return x % 2 == 0 elements =", "odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def", "{'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def test_multimarkers():", "first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def'] def test_cf(): sfm_entry =", "self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry =", "link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id = 'I have an", "sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([", "def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'),", "link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "= s.LinkProcessor( id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "4] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3]) class", "('linkmarker2', 'link 1: OLDID1; link 2: OLDID2'), ('othermarker', 'no link')]) expected = sfm.Entry([", "('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker',", "'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected)", "sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row,", "assert cldf_row['Gloss'] == ['abc', 'def'] def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')])", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2',", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "[label 1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker',", "('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([", "('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) expected =", "class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'),", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) _ = self.link_processor(original_entry)", "link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'),", "self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry =", "sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping,", "= [ ('marker1', 'value1'), ('marker2', 'value2')] expected = [ [('marker1', 'value1'), ('marker2', 'value2')]]", "[ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep =", "'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no", "'link 1: [label 1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry =", "test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1' cldf_row =", "test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row", "'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids =", "{}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def", "expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self):", "self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'}) def", "sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def'] def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf',", "link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no", "set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID':", "caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected) class", "self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'),", "test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')])", "cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'})", "sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual(", "'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1',", "[label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1);", "return x % 2 == 0 elements = [2, 4, 6] even, odd", "('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id =", "= s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self): def iseven(x):", "def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {},", "[2, 4, 6] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd,", "self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'),", "'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1')", "'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x):", "== 0 elements = [1, 3, 5] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even,", "('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry)", "'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id =", "import clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers =", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label", "[ ('marker1', 'value1'), ('marker2', 'value2')] expected = [ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual(", "= s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row =", "('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(), sfm_entry) assert cldf_row['See_Also'] ==", "{'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'),", "= sfm.Entry() sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row,", "'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs']", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) expected = sfm.Entry([", "test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {},", "'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1',", "= sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'],", "'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = { 'image1-name': 'caption1',", "'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap')", "OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'),", "= { 'NEWID1': 'label 1', 'NEWID2': 'label 2', 'NEWID3': 'label 3'} link_markers =", "'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row", "'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self):", "'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id = 'I have an ID, too!' expected", "an ID, too!' expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'),", "expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker',", "self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry", "'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id =", "'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef'", "def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'),", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id", "'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "= {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index, link_markers, link_regex)", "original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) expected", "'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no", "'label 1', 'NEWID2': 'label 2', 'NEWID3': 'label 3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex", "'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no", "sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap')", "1: [label 1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry)", "= caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry = sfm.Entry([", "('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return", "sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers = [ ('marker1', 'value1'),", "set(), sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def'] def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'),", "'no link'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry", "3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001')", "link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'),", "'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids", "OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'link: [label", "'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID1'), ('othermarker', 'no link')]) expected", "s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable',", "too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([", "test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')])", "'NEWID2', 'OLDID3': 'NEWID3'} label_index = { 'NEWID1': 'label 1', 'NEWID2': 'label 2', 'NEWID3':", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2']", "sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')])", "sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'},", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'),", "cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')])", "'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self):", "link 2: [label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def", "('cap', 'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected =", "('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker',", "caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1':", "= {'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id = 'id1'", "def test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')])", "odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def", "s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def'] def test_cf():", "cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1',", "self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id =", "expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return x % 2 == 0", "sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping,", "link_markers = {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index, link_markers,", "= s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen = s.IDGenerator() first_id", "1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry =", "have an ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self):", "{'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')])", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(),", "cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def']", "s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self):", "s import clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers", "test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None,", "first_id = gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen = s.IDGenerator() first_id = gen.next_id()", "'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id = 'id1' cldf_row =", "test_split_groups_on_separator(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')]", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) new_entry =", "= gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index = {", "= s.IDGenerator() first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def", "{ 'NEWID1': 'label 1', 'NEWID2': 'label 2', 'NEWID3': 'label 3'} link_markers = {'linkmarker1',", "'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids", "'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id = 'I have", "'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry)", "5]) def test_all_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements =", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry)", "['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1'", "test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id,", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID2'), ('othermarker', 'no", "_ = caption_finder(entry) expected = { 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def", "sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID':", "def test_some_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements = [1,", "'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap',", "test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None,", "'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id = 'I have an", "= [ [('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class", "def test_split_groups_on_separator(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('sep', 'value'), ('marker2',", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) expected =", "link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id = 'I have an ID,", "cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(), sfm_entry) assert cldf_row['See_Also'] == 'val1 ;", "gen = s.IDGenerator() first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002')", "'OLDID3': 'NEWID3'} label_index = { 'NEWID1': 'label 1', 'NEWID2': 'label 2', 'NEWID3': 'label", "sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'),", "self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'),", "test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2:", "0 elements = [2, 4, 6] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2,", "link')]) expected.id = 'I have an ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "[label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry", "('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = { 'image1-name':", "'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc',", "{}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry =", "('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected = sfm.Entry([", "even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase):", "'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1',", "'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1',", "2: OLDID2'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link", "= s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs':", "elements = [1, 2, 3, 4] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2,", "'link 1: OLDID1; link 2: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1',", "caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected)", "OLDID1'), ('othermarker', 'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label", "def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no", "self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'),", "test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')])", "'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(", "expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) new_entry", "sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(),", "('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def", "cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')])", "set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(),", "= 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def", "assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf',", "= 'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry)", "= sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')])", "'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1',", "[1, 3, 5] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3,", "class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return x % 2 == 0 elements", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link:", "% 2 == 0 elements = [1, 2, 3, 4] even, odd =", "= 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1',", "5] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5]) def", "'id1', 'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')])", "'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1':", "[]) def test_some_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements =", "== 0 elements = [1, 2, 3, 4] even, odd = s.split_by_pred(iseven, elements)", "('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry,", "'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1',", "link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry", "'value'), ('marker2', 'value2')] expected = [ [('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual(", "'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row,", "self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id = gen.next_id()", "'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index = { 'OLDID1': 'NEWID1', 'OLDID2':", "'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected)", "= {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc',", "cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row", "def test_sequence_counts_up(self): gen = s.IDGenerator() first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001')", "= [2, 4, 6] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6])", "setUp(self): id_index = { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index = {", "= 'sep' input_markers = [ ('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')] expected =", "expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) new_entry", "expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) _", "def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link", "'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1':", "class LinkProcessing(unittest.TestCase): def setUp(self): id_index = { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'}", "def iseven(x): return x % 2 == 0 elements = [1, 2, 3,", "'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self):", "'marker2': 'Column2'} def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None,", "('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _", "[('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def", "s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self): def iseven(x): return", "def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'},", "('linkmarker2', 'no link'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "expected) def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2',", "gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE')", "id_index = { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index = { 'NEWID1':", "x % 2 == 0 elements = [2, 4, 6] even, odd =", "{'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id", "'NEWID2': 'label 2', 'NEWID3': 'label 3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b'", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "[1, 2, 3, 4] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd,", "('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'],", "def test_no_element_matches_pred(self): def iseven(x): return x % 2 == 0 elements = [1,", "'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {},", "sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry)", "'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {", "('marker2', 'value2')] expected = [ [('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep,", "link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker', 'no link')])", "{}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self):", "sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'})", "s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs':", "self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1:", "'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {},", "1](NEWID1)'), ('othermarker', 'no link')]) expected.id = 'I have an ID, too!' new_entry =", "('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id = 'I have", "('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry =", "'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id", "sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row =", "expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1;", "def setUp(self): self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry = sfm.Entry()", "ID, too!' expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker',", "'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1':", "gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen = s.IDGenerator() first_id = gen.next_id() second_id =", "'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf':", "'link 1: OLDID1; link 2: OLDID2'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1',", "test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder", "import pydictionaria.sfm2cldf as s import clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep", "1: OLDID1; link 2: OLDID2'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no", "['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1',", "have an ID, too!' expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label", "def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1', 'file2']", "'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return x", "expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')])", "test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')])", "'sep' input_markers = [ ('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')] expected = [", "('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry)", "= sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None,", "sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping,", "'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1',", "'link: OLDID1'), ('othermarker', 'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry", "'label 3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index,", "s.IDGenerator('PRE') first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase):", "'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(), sfm_entry) assert cldf_row['See_Also']", "expected = [ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self):", "class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('marker2',", "'I have an ID, too!' expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "'link: OLDID1000'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')])", "'value1'), ('sep', 'value'), ('marker2', 'value2')] expected = [ [('marker1', 'value1')], [('sep', 'value'), ('marker2',", "{}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2',", "self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']})", "def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link", "self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x): return x %", "'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ =", "= s.IDGenerator('PRE') first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class", "'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker', 'no", "iseven(x): return x % 2 == 0 elements = [1, 3, 5] even,", "('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] ==", "self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'),", "OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link:", "entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'),", "('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link", "s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'})", "sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(),", "sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry =", "{}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'],", "[ ('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')] expected = [ [('marker1', 'value1')], [('sep',", "OLDID1'), ('othermarker', 'no link')]) original_entry.id = 'I have an ID, too!' expected =", "sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'})", "('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id = 'I", "OLDID1; link 2: OLDID2'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'),", "link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link", "second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index =", "link'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry =", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID2'), ('othermarker',", "('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1',", "0 elements = [1, 2, 3, 4] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even,", "self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id = gen.next_id() second_id = gen.next_id()", "'Column1': 'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1'", "'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id", "[label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry", "('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([", "= { 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([", "self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1:", "= s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID':", "link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def", "'I have an ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def", "'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {'image-name':", "= sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(), sfm_entry)", "sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id", "{'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id = 'id1' cldf_row", "['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1',", "self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'} def", "OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label", "'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1',", "gen = s.IDGenerator('PRE') first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002')", "def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen", "{}, set(), sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def'] def test_cf(): sfm_entry = sfm.Entry([('cf',", "sep = 'sep' input_markers = [ ('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')] expected", "% 2 == 0 elements = [1, 3, 5] even, odd = s.split_by_pred(iseven,", "MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2',", "def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'},", "self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']})", "OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link:", "expected = { 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry =", "= s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2':", "link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID2'), ('othermarker', 'no link')]) expected =", "6] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, []) def", "'000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id,", "self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator()", "('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions,", "test_some_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements = [1, 2,", "'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1'", "s.IDGenerator() first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self):", "s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def", "original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected =", "== 0 elements = [2, 4, 6] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even,", "set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self):", "'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id", "link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link:", "link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'),", "'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry)", "('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep = 'sep' input_markers =", "2: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self):", "'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id = 'I have an ID, too!'", "link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')])", "= gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self):", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "'value1'), ('marker2', 'value2')] expected = [ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)),", "sfm_entry = sfm.Entry() sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry)", "'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id =", "list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep = 'sep' input_markers = [ ('marker1', 'value1'),", "= {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping = {'marker1': 'Column1', 'marker2':", "test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen =", "= s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase):", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1',", "elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen =", "test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3',", "('marker2', 'value2')] expected = [ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected)", "def test_all_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements = [2,", "['abc', 'def'] def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable',", "= ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID':", "self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return x %", "link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no link'),", "even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self):", "test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row", "'no link')]) expected = sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'),", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link", "link'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'),", "{}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def", "'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'),", "original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID2'),", "'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')])", "_ = caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping", "= s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry =", "link')]) expected = sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker',", "link 2: OLDID2'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self):", "test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('marker2', 'value2')] expected =", "expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1;", "input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def iseven(x): return x % 2 ==", "('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry)", "'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index = { 'NEWID1': 'label 1', 'NEWID2': 'label 2',", "def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no", "1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry =", "sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(),", "= s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def'] def", "'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id =", "('othermarker', 'no link')]) expected.id = 'I have an ID, too!' new_entry = self.link_processor(original_entry)", "'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index = { 'NEWID1': 'label 1', 'NEWID2': 'label", "ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry =", "{'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf',", "'no link'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no", "= [ ('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')] expected = [ [('marker1', 'value1')],", "'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self):", "= s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry =", "def iseven(x): return x % 2 == 0 elements = [2, 4, 6]", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 2](NEWID2)'),", "'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker',", "'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row", "def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no", "3, 4] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3])", "= s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def", "'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1',", "test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')])", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 1](NEWID1)'),", "'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id", "('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([", "'NEWID3': 'label 3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor(", "test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row", "sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry) assert", "'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry)", "first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen", "('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _", "'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row", "{'ID': 'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1'", "3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index,", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected =", "= gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen = s.IDGenerator() first_id = gen.next_id() second_id", "('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry)", "3, 5] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5])", "'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no", "('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([", "'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "= caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self): self.mapping =", "sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry)", "sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry =", "def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no", "import unittest import pydictionaria.sfm2cldf as s import clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def", "expected = [ [('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected)", "test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc',", "'id1' sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row,", "expected.id = 'I have an ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class", "= 'id1' sfm_entry.sense_ids = ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry,", "2', 'NEWID3': 'label 3'} link_markers = {'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor =", "'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap')", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([", "'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row =", "an ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry", "'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'link:", "2](NEWID2)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry =", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge',", "{'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge':", "2, 3, 4] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1,", "('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID2'), ('othermarker', 'no link')])", "6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x): return x % 2 == 0", "test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry)", "cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry", "self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1',", "sfm_entry.media_ids = ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row,", "x % 2 == 0 elements = [1, 3, 5] even, odd =", "2 == 0 elements = [1, 3, 5] even, odd = s.split_by_pred(iseven, elements)", "{'linkmarker1', 'linkmarker2'} link_regex = r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index, link_markers, link_regex) def", "= sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss']", "('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def", "[]) self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self): def iseven(x): return x % 2", "= 'sep' input_markers = [ ('marker1', 'value1'), ('marker2', 'value2')] expected = [ [('marker1',", "sfm.Entry() sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID':", "self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id", "original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected", "('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1',", "= r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry =", "sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder =", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID1'), ('othermarker',", "link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker',", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID1'), ('othermarker', 'no", "link 2: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "2: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {},", "= [ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep", "sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry", "= sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected =", "== ['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row", "expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker',", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "[ [('marker1', 'value1')], [('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase):", "s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x): return", "'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions,", "= sfm.Entry([ ('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap',", "'lang1'}) def test_map_media_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1',", "self.link_processor = s.LinkProcessor( id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1',", "original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) expected", "[2, 4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x): return x % 2", "gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id = gen.next_id()", "link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker', 'no link')])", "'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep = 'sep' input_markers = [", "'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(", "expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker',", "def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001')", "s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def", "('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker',", "set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry", "'val2', 'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None,", "('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'),", "s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1',", "def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row =", "'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1':", "set(), sfm_entry) self.assertEqual(cldf_row, {'ID': 'id1'}) def test_map_columns(self): sfm_entry = sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')])", "link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')])", "'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index = { 'NEWID1': 'label 1', 'NEWID2':", "self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'),", "= 'I have an ID, too!' expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "= 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1',", "'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no", "('marker1', 'val1'), ('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3',", "gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index = { 'OLDID1':", "test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2:", "link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "2 == 0 elements = [2, 4, 6] even, odd = s.split_by_pred(iseven, elements)", "sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] ==", "test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {},", "('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry", "('othermarker', 'link: OLDID2')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_ignore_regex_matches_that_are_not_in_the_index(self): original_entry = sfm.Entry([", "s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'})", "= self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self):", "_ = caption_finder(entry) expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry =", "('pc', 'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder", "('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1',", "link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1',", "2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "x % 2 == 0 elements = [1, 2, 3, 4] even, odd", "= gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id =", "('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')] expected = [ [('marker1', 'value1')], [('sep', 'value'),", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.entry_id = 'entry1' cldf_row", "('linkmarker2', 'link 1: OLDID1; link 2: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id =", "{'ge': 'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == ['abc', 'def'] def test_cf(): sfm_entry", "def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no", "odd = s.split_by_pred(iseven, elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self): def", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link:", "= ['sense1', 'sense2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID':", "cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry", "as s import clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep'", "def setUp(self): id_index = { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index =", "expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap',", "'value2')] expected = [ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def", "s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen = s.IDGenerator() first_id =", "OLDID1; link 2: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'),", "'image1-name'), ('cap', 'caption1'), ('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder =", "= s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x):", "= 'id1' sfm_entry.entry_id = 'entry1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(", "'no link')]) original_entry.id = 'I have an ID, too!' expected = sfm.Entry([ ('linkmarker1',", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry", "link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def", "= s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert cldf_row['Entry_IDs'] == ['val1', 'val2', 'val3']", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_single_link_is_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "2 == 0 elements = [1, 2, 3, 4] even, odd = s.split_by_pred(iseven,", "self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id = gen.next_id() second_id", "self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self): def iseven(x): return x % 2 ==", "('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry =", "4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x): return x % 2 ==", "s.LinkProcessor( id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'),", "('marker2', 'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _", "2: [label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self):", "('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker',", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) new_entry =", "LinkProcessing(unittest.TestCase): def setUp(self): id_index = { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index", "('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'],", "input_markers = [ ('marker1', 'value1'), ('sep', 'value'), ('marker2', 'value2')] expected = [ [('marker1',", "link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'),", "self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep = 'sep' input_markers = [ ('marker1',", "= sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {},", "('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1',", "'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids =", "'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID2'), ('othermarker', 'no link')]) expected", "link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'),", "('othermarker', 'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'),", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'link: OLDID2')]) new_entry =", "('linkmarker2', 'link 1: [label 1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry", "= caption_finder(entry) expected = { 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self):", "test_sequence_counts_up(self): gen = s.IDGenerator() first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id,", "[1, 3, 5]) def test_all_elements_match_pred(self): def iseven(x): return x % 2 == 0", "self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([ ('marker1', 'val1'), ('pc',", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'}, {}, set(), sfm_entry) assert", "1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry =", "even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4, 6]) self.assertEqual(odd, []) def test_some_elements_match_pred(self):", "[label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry", "cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1', 'value1'),", "('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'no link'), ('othermarker',", "expected = {'image-name': 'caption'} self.assertEqual(caption_finder.captions, expected) def test_find_multiple_captions(self): entry = sfm.Entry([ ('marker1', 'val1'),", "'000001') def test_sequence_counts_up(self): gen = s.IDGenerator() first_id = gen.next_id() second_id = gen.next_id() self.assertEqual(first_id,", "'no link')]) expected.id = 'I have an ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "self.assertEqual(first_id, '000001') def test_sequence_counts_up(self): gen = s.IDGenerator() first_id = gen.next_id() second_id = gen.next_id()", "'link: OLDID1000'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry", "original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: OLDID1; link 2: OLDID1'),", "'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1', 'val1'),", "r'\\bOLDID\\d+\\b' self.link_processor = s.LinkProcessor( id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([", "as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers = [ ('marker1',", "self.assertEqual(odd, []) def test_some_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements", "test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')])", "OLDID1000'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self): original_entry =", "SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('marker2', 'value2')]", "= sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry", "'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected", "= { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3': 'NEWID3'} label_index = { 'NEWID1': 'label", "original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected", "s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self):", "'def'] def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf':", "second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id", "= s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1',", "'NEWID1': 'label 1', 'NEWID2': 'label 2', 'NEWID3': 'label 3'} link_markers = {'linkmarker1', 'linkmarker2'}", "{'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self): sfm_entry =", "1: [label 1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry)", "sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.media_ids = ['file1', 'file2'] cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping,", "('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = {'image-name': 'caption'}", "class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id, '000001') def", "'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected)", "pydictionaria.sfm2cldf as s import clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self): sep =", "link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_links_in_same_marker_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1'}) def test_map_entry_id(self): sfm_entry = sfm.Entry([('marker1',", "'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected", "id_index, label_index, link_markers, link_regex) def test_entries_without_links_dont_change(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1:", "('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_dont_mutate_original_entry(self):", "'value1', 'Sense_IDs': ['sense1', 'sense2']}) def test_map_language_id(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1'", "'000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen = s.IDGenerator('PRE') first_id = gen.next_id() second_id =", "[label 1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry,", "'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep = 'sep' input_markers", "sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row = s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(), sfm_entry) assert", "'cap') _ = caption_finder(entry) expected = {} self.assertEqual(caption_finder.captions, expected) class MapSfmToCldf(unittest.TestCase): def setUp(self):", "new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase): def test_find_caption(self): entry = sfm.Entry([ ('marker1',", "setUp(self): self.mapping = {'marker1': 'Column1', 'marker2': 'Column2'} def test_map_id(self): sfm_entry = sfm.Entry() sfm_entry.id", "sfm.Entry([('marker1', 'value1'), ('marker2', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(),", "[1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id = gen.next_id() self.assertEqual(first_id,", "expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker',", "3, 5]) def test_all_elements_match_pred(self): def iseven(x): return x % 2 == 0 elements", "self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'),", "1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry =", "= gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, '000001') self.assertEqual(second_id, '000002') def test_adding_prefix(self): gen =", "return x % 2 == 0 elements = [1, 3, 5] even, odd", "cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry, 'lang1') self.assertEqual( cldf_row, {'ID': 'id1', 'Column1':", "'cap') _ = caption_finder(entry) expected = { 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected)", "'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry = sfm.Entry([('ge', 'abc\\tdef')]) cldf_row = s.sfm_entry_to_cldf_row(None,", "['val1', 'val2', 'val3'] def test_multimarkers(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2')]) cldf_row =", "4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self): gen = s.IDGenerator() first_id =", "set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss():", "OLDID1'), ('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'),", "'NEWID3'} label_index = { 'NEWID1': 'label 1', 'NEWID2': 'label 2', 'NEWID3': 'label 3'}", "self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Column2': 'value2'}) def test_ignore_unexpected_sfm_markers(self): sfm_entry = sfm.Entry([('marker1',", "caption_finder = s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = { 'image1-name': 'caption1', 'image2-name':", "('linkmarker2', 'link: [label 1](NEWID1)'), ('othermarker', 'no link')]) expected.id = 'I have an ID,", "gen.next_id() second_id = gen.next_id() self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index", "'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_same_link_twice_in_the_same_marker(self): original_entry = sfm.Entry([ ('linkmarker1',", "('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label", "caption_finder(entry) expected = { 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry", "cldf_row['Gloss'] == ['abc', 'def'] def test_cf(): sfm_entry = sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row", "'link 1: [label 1](NEWID1); link 2: [label 2](NEWID2)'), ('othermarker', 'no link')]) new_entry =", "link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([ ('linkmarker1', 'no", "'value1', 'Entry_ID': 'entry1'}) def test_map_sense_ids(self): sfm_entry = sfm.Entry([('marker1', 'value1')]) sfm_entry.id = 'id1' sfm_entry.sense_ids", "self.assertEqual(first_id, 'PRE000001') self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index = { 'OLDID1': 'NEWID1',", "original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) expected", "self.assertEqual(new_entry, expected) def test_links_in_different_markers_are_replaced(self): original_entry = sfm.Entry([ ('linkmarker1', 'link: OLDID2'), ('linkmarker2', 'link: OLDID1'),", "entry = sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder =", "'Gloss'}, {}, set(), sfm_entry) assert cldf_row['Gloss'] == 'abc\\tdef' cldf_row = s.sfm_entry_to_cldf_row('ExampleTable', {'ge': 'Gloss'},", "sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) original_entry.id = 'I", "iseven(x): return x % 2 == 0 elements = [2, 4, 6] even,", "sfm.Entry([('cf', 'val1'), ('cf', 'val2;val3')]) cldf_row = s.sfm_entry_to_cldf_row('EntryTable', {'cf': 'Entry_IDs'}, {}, {'Entry_IDs'}, sfm_entry) assert", "{ 'image1-name': 'caption1', 'image2-name': 'caption2'} self.assertEqual(caption_finder.captions, expected) def test_captions_need_to_be_adjacent(self): entry = sfm.Entry([ ('marker1',", "self.mapping, {}, set(), sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Entry_ID': 'entry1'}) def", "def iseven(x): return x % 2 == 0 elements = [1, 3, 5]", "link'), ('linkmarker2', 'link: OLDID1'), ('othermarker', 'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def", "('linkmarker2', 'link: OLDID1'), ('othermarker', 'link: OLDID2')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "'val1'), ('pc', 'image-name'), ('cap', 'caption'), ('marker2', 'val2')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ =", "test_no_element_matches_pred(self): def iseven(x): return x % 2 == 0 elements = [1, 3,", "link')]) expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1000'), ('othermarker', 'no link')])", "= self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2',", "= sfm.Entry([('marker1', 'value1'), ('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {},", "sfm_entry) self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Media_IDs': ['file1', 'file2']}) def test_gloss(): sfm_entry", "return x % 2 == 0 elements = [1, 2, 3, 4] even,", "self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link: OLDID1'),", "expected = sfm.Entry([ ('linkmarker1', 'no link'), ('linkmarker2', 'link 1: [label 1](NEWID1); link 2:", "input_markers)), expected) def test_split_groups_on_separator(self): sep = 'sep' input_markers = [ ('marker1', 'value1'), ('sep',", "= s.sfm_entry_to_cldf_row(None, {'cf': 'See_Also'}, {}, set(), sfm_entry) assert cldf_row['See_Also'] == 'val1 ; val2'", "('othermarker', 'no link')]) _ = self.link_processor(original_entry) self.assertEqual(original_entry, expected) def test_carry_over_attributes(self): original_entry = sfm.Entry([", "'val2'), ('pc', 'image2-name'), ('cap', 'caption2'), ('marker3', 'val3')]) caption_finder = s.CaptionFinder(['pc'], 'cap') _ =", "('marker1', 'value1'), ('marker2', 'value2')] expected = [ [('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep,", "elements = [2, 4, 6] even, odd = s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4,", "= sfm.Entry([ ('marker1', 'val1'), ('pc', 'image-name'), ('marker2', 'val2'), ('cap', 'caption'), ('marker3', 'val3')]) caption_finder", "'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index = { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2', 'OLDID3':", "('unknown', 'value2')]) sfm_entry.id = 'id1' cldf_row = s.sfm_entry_to_cldf_row(None, self.mapping, {}, set(), sfm_entry) self.assertEqual(", "= 'I have an ID, too!' new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) class MediaCaptionExtraction(unittest.TestCase):", "link')]) original_entry.id = 'I have an ID, too!' expected = sfm.Entry([ ('linkmarker1', 'no", "[label 1](NEWID1)'), ('othermarker', 'no link')]) new_entry = self.link_processor(original_entry) self.assertEqual(new_entry, expected) def test_only_process_links_in_specified_markers(self): original_entry", "= s.CaptionFinder(['pc'], 'cap') _ = caption_finder(entry) expected = { 'image1-name': 'caption1', 'image2-name': 'caption2'}", "[('marker1', 'value1'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) def test_split_groups_on_separator(self): sep = 'sep'", "('othermarker', 'no link')]) expected = sfm.Entry([ ('linkmarker1', 'link: [label 2](NEWID2)'), ('linkmarker2', 'link: [label", "self.assertEqual( cldf_row, {'ID': 'id1', 'Column1': 'value1', 'Sense_IDs': ['sense1', 'sense2'], 'Language_ID': 'lang1'}) def test_map_media_ids(self):", "'sep' input_markers = [ ('marker1', 'value1'), ('marker2', 'value2')] expected = [ [('marker1', 'value1'),", "= s.split_by_pred(iseven, elements) self.assertEqual(even, [2, 4]) self.assertEqual(odd, [1, 3]) class GenerateSequentialIDs(unittest.TestCase): def test_sequence_starts_with_one(self):", "unittest import pydictionaria.sfm2cldf as s import clldutils.sfm as sfm class SplitMarkersWithSeparators(unittest.TestCase): def test_lump_everything_together_if_seperator_isnt_found(self):", "elements) self.assertEqual(even, []) self.assertEqual(odd, [1, 3, 5]) def test_all_elements_match_pred(self): def iseven(x): return x", "[('sep', 'value'), ('marker2', 'value2')]] self.assertEqual( list(s.group_by_separator(sep, input_markers)), expected) class SplitListByPredicate(unittest.TestCase): def test_no_element_matches_pred(self): def", "self.assertEqual(second_id, 'PRE000002') class LinkProcessing(unittest.TestCase): def setUp(self): id_index = { 'OLDID1': 'NEWID1', 'OLDID2': 'NEWID2'," ]
[ "fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) ,", "ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax, 40),", "2 VARIABLE NORMAL DISTIBUTION ##------------------------------------------- import matplotlib.pyplot as plt import numpy as np", "FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION def N(x,", "1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal')", "ymax, 40)) #SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS);", "POINTS SAMPLED FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION", "out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER)", "#FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5;", "1000).T # DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER)", "fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X,", "Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER PLOT", "#GENERATE POINTS SAMPLED FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE", "CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON", "#MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X,", "Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.')", "#MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION xp, yp", "as np #USER INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution param", "u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2),", "METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T #", "ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin,", "L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y =", "fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.') plt.show();", "= np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\":", "#COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T", "#'RdYlBu' #normal distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION", "xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5)", "40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS);", "rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS", "xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax,", "ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp,", "(DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig,", "np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y',", "fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y,", "xmin=-L; xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin,", "FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV", "x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax,", "#[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED", "PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal') plt.contour(X, Y, N(X, Y), 20, cmap=CMAP);", "surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER", "out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y =", "ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X,", "'.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal') plt.contour(X, Y, N(X,", "#SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal') plt.contour(X, Y, N(X, Y), 20,", "ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy]", "import numpy as np #USER INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal", "Y)) , '.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal') plt.contour(X,", "= np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return", "#STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE", "Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig, ax =", "as plt import numpy as np #USER INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv'", "y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y", "out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20))", "= plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y),", "CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION", "ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y))", "40)) #SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)',", "NORMAL DISTIBUTION ##------------------------------------------- import matplotlib.pyplot as plt import numpy as np #USER INPUTS", "yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2)))", "N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER PLOT plt.plot(xp,", "np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE", "sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC", "X, Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig, ax", "#normal distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]])", "distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN", "np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out", "DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L;", "ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.')", "SAMPLED FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION def", "cmap=CMAP) ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR", "FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L;", "#MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT", "INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5; uy=0.0 sx=2.0;", "param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR", "\"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP) ax.scatter(xp,", "(SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y", "xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x',", "DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s, 1000).T # DEFINE FUNCTION def N(x, y):", "FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0", "## 2 VARIABLE NORMAL DISTIBUTION ##------------------------------------------- import matplotlib.pyplot as plt import numpy as", "plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X, Y, N(X, Y), cmap=CMAP)", "#SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS)", "PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"}) ax.set_xlabel('x', fontsize=FS); ax.set_ylabel('y', fontsize=FS); ax.set_zlabel('p(x,y)', fontsize=FS) surf=ax.plot_surface(X,", "plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal') plt.contour(X, Y, N(X, Y),", "N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L", "ax.scatter(xp, yp, 1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT", "RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM", "#USER INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5; uy=0.0", "= np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2 (DENSER) X, Y = np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40))", "yp, 1.1*np.max(N(X, Y)) , '.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT #", "np.meshgrid(np.linspace(xmin, xmax, 40), np.linspace(ymin, ymax, 40)) #SURFACE PLOT fig, ax = plt.subplots(subplot_kw={\"projection\": \"3d\"})", "return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L; ymax=L x,y = np.meshgrid(np.linspace(xmin,xmax,20),np.linspace(ymin,ymax,20)) #MESH-2", "matplotlib.pyplot as plt import numpy as np #USER INPUTS FUNC=2 FS=18 #FONT SIZE", "# DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy)", "def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1 (SMALLER) L=3*max(sx,sy) xmin=-L; xmax=L; ymin=-L;", "numpy as np #USER INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution", "plt import numpy as np #USER INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu'", "VARIABLE NORMAL DISTIBUTION ##------------------------------------------- import matplotlib.pyplot as plt import numpy as np #USER", "import matplotlib.pyplot as plt import numpy as np #USER INPUTS FUNC=2 FS=18 #FONT", "SIZE CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5; uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1)", "##------------------------------------------- ## 2 VARIABLE NORMAL DISTIBUTION ##------------------------------------------- import matplotlib.pyplot as plt import numpy", "VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION xp, yp =", "##------------------------------------------- import matplotlib.pyplot as plt import numpy as np #USER INPUTS FUNC=2 FS=18", "uy=0.0 sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]])", "np #USER INPUTS FUNC=2 FS=18 #FONT SIZE CMAP='hsv' #'RdYlBu' #normal distribution param ux=0.5;", "s, 1000).T # DEFINE FUNCTION def N(x, y): out=1.0/(2*3.1415*sx*sy*(1-rho**2.0)**0.5) out=out*np.exp(-(((x-ux)/sx)**2.0-2*rho*((x-ux)/sx)*((y-uy)/sy)+((y-uy)/sy)**2.0)/(2*(1-rho**2))) return out #MESH-1", "sx=2.0; sy=1.0 #STD-DEV rho=0.5; #[0,1) RHO=PEARSON CORRELATION u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE", "u=np.array([[ux],[uy]]) #MEAN VECTOR u=[ux,uy] s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION xp,", ", '.') plt.show(); #SCATTER PLOT plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal') plt.contour(X, Y,", "s=np.array([[sx**2.0,rho*sy*sx],[rho*sy*sx,sy**2.0]]) #COVARIANCE METRIC #GENERATE POINTS SAMPLED FROM DISTRIBUTION xp, yp = np.random.multivariate_normal(u.reshape(2), s,", "plt.plot(xp, yp,'.') #CONTOUR PLOT # plt.axis('equal') plt.contour(X, Y, N(X, Y), 20, cmap=CMAP); plt.show();", "DISTIBUTION ##------------------------------------------- import matplotlib.pyplot as plt import numpy as np #USER INPUTS FUNC=2" ]
[ "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "of monhorn module. This subclass of monhorn module is intended for providing Monhorn", "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "terminated.\") session.close() return False return True def start_monhorn(self, session: Session) -> None: \"\"\"", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "Commands, FS): \"\"\" Subclass of monhorn module. This subclass of monhorn module is", "this software and associated documentation files (the \"Software\"), to deal in the Software", "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "= session def monhorn_console(self, session: Session) -> None: \"\"\" Start Monhorn console. :param", "session: session to check :return bool: True if session is alive \"\"\" if", "Session from hatsploit.lib.commands import Commands from pex.fs import FS class Console(Plugins, Badges, Runtime,", "+ 'commands/' + session.details['Platform'].lower() exists, is_dir = self.exists(commands) if exists and not is_dir:", "EntySec Permission is hereby granted, free of charge, to any person obtaining a", "shell. :param Session session: session to start Monhorn shell for :return bool: True", "OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "Start Monhorn console. :param Session session: session to start Monhorn console for :return", "the Software without restriction, including without limitation the rights to use, copy, modify,", "person obtaining a copy of this software and associated documentation files (the \"Software\"),", "the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "session to check :return bool: True if session is alive \"\"\" if session.channel.terminated:", "-> bool: \"\"\" Check is session alive. :param Session session: session to check", "= {} def check_session(self, session: Session) -> bool: \"\"\" Check is session alive.", "session.monhorn + 'commands/' + session.details['Platform'].lower() exists, is_dir = self.exists(commands) if exists and not", "without restriction, including without limitation the rights to use, copy, modify, merge, publish,", "merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit", "and not is_dir: self.commands.update( self.load_commands(commands) ) commands = session.monhorn + 'commands/generic' exists, is_dir", "\"\"\" Start Monhorn console. :param Session session: session to start Monhorn console for", "'Show available commands.'), ('quit', 'Stop interaction.') ] commands = {} def check_session(self, session:", "bool: True if session is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return", "Session) -> None: \"\"\" Start Monhorn console. :param Session session: session to start", "in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED", "{} def check_session(self, session: Session) -> bool: \"\"\" Check is session alive. :param", "is_dir: self.commands.update( self.load_commands(commands) ) commands = session.monhorn + 'commands/generic' exists, is_dir = self.exists(commands)", "sublicense, and/or sell copies of the Software, and to permit persons to whom", "this permission notice shall be included in all copies or substantial portions of", "modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to", "from hatsploit.lib.runtime import Runtime from hatsploit.lib.session import Session from hatsploit.lib.commands import Commands from", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "if exists and not is_dir: self.commands.update( self.load_commands(commands) ) commands = session.monhorn + 'commands/generic'", "result: break def monhorn_shell(self, session: Session) -> bool: \"\"\" Start Monhorn shell. :param", "command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated = True return True else: self.check_session(session) self.execute_custom_command(command, self.commands)", "hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import Runtime from hatsploit.lib.session import Session from hatsploit.lib.commands", "Check is session alive. :param Session session: session to check :return bool: True", "WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A", "None: \"\"\" Start Monhorn console. :param Session session: session to start Monhorn console", "notice and this permission notice shall be included in all copies or substantial", "'commands/generic' exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) )", "Plugins from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import Runtime from hatsploit.lib.session import Session", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE", "commands.'), ('quit', 'Stop interaction.') ] commands = {} def check_session(self, session: Session) ->", "<reponame>EntySec/Monhorn<gh_stars>0 \"\"\" MIT License Copyright (c) 2020-2022 EntySec Permission is hereby granted, free", "*self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated = True return True else:", "charge, to any person obtaining a copy of this software and associated documentation", "\"\"\" from .plugins import Plugins from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import Runtime", "KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "module is intended for providing Monhorn main console. \"\"\" prompt = '%linemonhorn%end >", "session: Session) -> None: \"\"\" Start Monhorn. :param Session session: session to start", "monhorn module. This subclass of monhorn module is intended for providing Monhorn main", "\"\"\" self.start_monhorn(session) if self.check_session(session): while True: result = self.catch(self.monhorn_shell, [session]) if result is", "True def start_monhorn(self, session: Session) -> None: \"\"\" Start Monhorn. :param Session session:", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION", "persons to whom the Software is furnished to do so, subject to the", "self.commands.update( self.load_commands(commands) ) commands = session.monhorn + 'commands/generic' exists, is_dir = self.exists(commands) if", "Software is furnished to do so, subject to the following conditions: The above", "Badges from hatsploit.lib.runtime import Runtime from hatsploit.lib.session import Session from hatsploit.lib.commands import Commands", "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "if session is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False return", "hatsploit.lib.session import Session from hatsploit.lib.commands import Commands from pex.fs import FS class Console(Plugins,", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND", "self.commands.update( self.load_commands(commands) ) for command in self.commands: self.commands[command].session = session def monhorn_console(self, session:", "command = self.input_empty(self.prompt) if command: if command[0] == 'quit': return True elif command[0]", "Badges, Runtime, Commands, FS): \"\"\" Subclass of monhorn module. This subclass of monhorn", "(c) 2020-2022 EntySec Permission is hereby granted, free of charge, to any person", "to deal in the Software without restriction, including without limitation the rights to", "monhorn_console(self, session: Session) -> None: \"\"\" Start Monhorn console. :param Session session: session", "providing Monhorn main console. \"\"\" prompt = '%linemonhorn%end > ' core_commands = [", "prompt = '%linemonhorn%end > ' core_commands = [ ('exit', 'Terminate Monhorn session.'), ('help',", "to whom the Software is furnished to do so, subject to the following", "Runtime, Commands, FS): \"\"\" Subclass of monhorn module. This subclass of monhorn module", "documentation files (the \"Software\"), to deal in the Software without restriction, including without", "not is_dir: self.commands.update( self.load_commands(commands) ) for command in self.commands: self.commands[command].session = session def", "self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated = True return True else: self.check_session(session)", "files (the \"Software\"), to deal in the Software without restriction, including without limitation", "Copyright (c) 2020-2022 EntySec Permission is hereby granted, free of charge, to any", "Software without restriction, including without limitation the rights to use, copy, modify, merge,", "\"\"\" command = self.input_empty(self.prompt) if command: if command[0] == 'quit': return True elif", "to do so, subject to the following conditions: The above copyright notice and", "self.commands[command].session = session def monhorn_console(self, session: Session) -> None: \"\"\" Start Monhorn console.", "Start Monhorn. :param Session session: session to start Monhorn for :return None: None", "in the Software without restriction, including without limitation the rights to use, copy,", "import Runtime from hatsploit.lib.session import Session from hatsploit.lib.commands import Commands from pex.fs import", "None \"\"\" commands = session.monhorn + 'commands/' + session.details['Platform'].lower() exists, is_dir = self.exists(commands)", "elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated = True return True else: self.check_session(session) self.execute_custom_command(command,", "the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "to any person obtaining a copy of this software and associated documentation files", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "pex.fs import FS class Console(Plugins, Badges, Runtime, Commands, FS): \"\"\" Subclass of monhorn", "Console(Plugins, Badges, Runtime, Commands, FS): \"\"\" Subclass of monhorn module. This subclass of", "command: if command[0] == 'quit': return True elif command[0] == 'help': self.print_table(\"Core Commands\",", "('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated = True return", "and not is_dir: self.commands.update( self.load_commands(commands) ) for command in self.commands: self.commands[command].session = session", "> ' core_commands = [ ('exit', 'Terminate Monhorn session.'), ('help', 'Show available commands.'),", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from", "session alive. :param Session session: session to check :return bool: True if session", "a copy of this software and associated documentation files (the \"Software\"), to deal", "start Monhorn shell for :return bool: True if Monhorn shell completed \"\"\" command", "Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "available commands.'), ('quit', 'Stop interaction.') ] commands = {} def check_session(self, session: Session)", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", ") for command in self.commands: self.commands[command].session = session def monhorn_console(self, session: Session) ->", "Monhorn console for :return None: None \"\"\" self.start_monhorn(session) if self.check_session(session): while True: result", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "= self.input_empty(self.prompt) if command: if command[0] == 'quit': return True elif command[0] ==", "elif command[0] == 'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] ==", "commands = session.monhorn + 'commands/generic' exists, is_dir = self.exists(commands) if exists and not", "free of charge, to any person obtaining a copy of this software and", "and this permission notice shall be included in all copies or substantial portions", "('quit', 'Stop interaction.') ] commands = {} def check_session(self, session: Session) -> bool:", "FS class Console(Plugins, Badges, Runtime, Commands, FS): \"\"\" Subclass of monhorn module. This", "and to permit persons to whom the Software is furnished to do so,", "True if session is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False", "session: Session) -> bool: \"\"\" Start Monhorn shell. :param Session session: session to", "start_monhorn(self, session: Session) -> None: \"\"\" Start Monhorn. :param Session session: session to", "rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "for command in self.commands: self.commands[command].session = session def monhorn_console(self, session: Session) -> None:", "\"\"\" commands = session.monhorn + 'commands/' + session.details['Platform'].lower() exists, is_dir = self.exists(commands) if", "EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "OTHER DEALINGS IN THE SOFTWARE. \"\"\" from .plugins import Plugins from hatsploit.core.cli.badges import", "Start Monhorn shell. :param Session session: session to start Monhorn shell for :return", "associated documentation files (the \"Software\"), to deal in the Software without restriction, including", "start Monhorn console for :return None: None \"\"\" self.start_monhorn(session) if self.check_session(session): while True:", "= self.catch(self.monhorn_shell, [session]) if result is not Exception and result: break def monhorn_shell(self,", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,", "is session alive. :param Session session: session to check :return bool: True if", "True elif command[0] == 'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0]", "session.details['Platform'].lower() exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) )", "session def monhorn_console(self, session: Session) -> None: \"\"\" Start Monhorn console. :param Session", "notice shall be included in all copies or substantial portions of the Software.", "DEALINGS IN THE SOFTWARE. \"\"\" from .plugins import Plugins from hatsploit.core.cli.badges import Badges", "-> None: \"\"\" Start Monhorn console. :param Session session: session to start Monhorn", ":param Session session: session to start Monhorn shell for :return bool: True if", "2020-2022 EntySec Permission is hereby granted, free of charge, to any person obtaining", "self.load_commands(commands) ) commands = session.monhorn + 'commands/generic' exists, is_dir = self.exists(commands) if exists", "Commands from pex.fs import FS class Console(Plugins, Badges, Runtime, Commands, FS): \"\"\" Subclass", "session.monhorn + 'commands/generic' exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update(", "command[0] == 'quit': return True elif command[0] == 'help': self.print_table(\"Core Commands\", ('Command', 'Description'),", "monhorn module is intended for providing Monhorn main console. \"\"\" prompt = '%linemonhorn%end", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "copy of this software and associated documentation files (the \"Software\"), to deal in", "substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "MIT License Copyright (c) 2020-2022 EntySec Permission is hereby granted, free of charge,", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION", "commands = {} def check_session(self, session: Session) -> bool: \"\"\" Check is session", "command in self.commands: self.commands[command].session = session def monhorn_console(self, session: Session) -> None: \"\"\"", "completed \"\"\" command = self.input_empty(self.prompt) if command: if command[0] == 'quit': return True", "obtaining a copy of this software and associated documentation files (the \"Software\"), to", "interaction.') ] commands = {} def check_session(self, session: Session) -> bool: \"\"\" Check", "session.close() return False return True def start_monhorn(self, session: Session) -> None: \"\"\" Start", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated =", "== 'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\")", "Subclass of monhorn module. This subclass of monhorn module is intended for providing", "OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "class Console(Plugins, Badges, Runtime, Commands, FS): \"\"\" Subclass of monhorn module. This subclass", "self.start_monhorn(session) if self.check_session(session): while True: result = self.catch(self.monhorn_shell, [session]) if result is not", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "console. \"\"\" prompt = '%linemonhorn%end > ' core_commands = [ ('exit', 'Terminate Monhorn", "OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) for command", ":param Session session: session to start Monhorn for :return None: None \"\"\" commands", "publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons", "including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,", "main console. \"\"\" prompt = '%linemonhorn%end > ' core_commands = [ ('exit', 'Terminate", "of monhorn module is intended for providing Monhorn main console. \"\"\" prompt =", "not Exception and result: break def monhorn_shell(self, session: Session) -> bool: \"\"\" Start", "or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", ":return None: None \"\"\" commands = session.monhorn + 'commands/' + session.details['Platform'].lower() exists, is_dir", "self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) for command in self.commands:", "console. :param Session session: session to start Monhorn console for :return None: None", "def monhorn_shell(self, session: Session) -> bool: \"\"\" Start Monhorn shell. :param Session session:", "hatsploit.lib.commands import Commands from pex.fs import FS class Console(Plugins, Badges, Runtime, Commands, FS):", "import Plugins from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import Runtime from hatsploit.lib.session import", "session to start Monhorn for :return None: None \"\"\" commands = session.monhorn +", "all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS", "SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "True: result = self.catch(self.monhorn_shell, [session]) if result is not Exception and result: break", "Monhorn session.'), ('help', 'Show available commands.'), ('quit', 'Stop interaction.') ] commands = {}", "start Monhorn for :return None: None \"\"\" commands = session.monhorn + 'commands/' +", ":param Session session: session to start Monhorn console for :return None: None \"\"\"", ") commands = session.monhorn + 'commands/generic' exists, is_dir = self.exists(commands) if exists and", "for :return None: None \"\"\" self.start_monhorn(session) if self.check_session(session): while True: result = self.catch(self.monhorn_shell,", "for :return bool: True if Monhorn shell completed \"\"\" command = self.input_empty(self.prompt) if", "OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "monhorn_shell(self, session: Session) -> bool: \"\"\" Start Monhorn shell. :param Session session: session", "is intended for providing Monhorn main console. \"\"\" prompt = '%linemonhorn%end > '", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the", "USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from .plugins import Plugins from", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN", "if self.check_session(session): while True: result = self.catch(self.monhorn_shell, [session]) if result is not Exception", "ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "self.check_session(session): while True: result = self.catch(self.monhorn_shell, [session]) if result is not Exception and", "('exit', 'Terminate Monhorn session.'), ('help', 'Show available commands.'), ('quit', 'Stop interaction.') ] commands", "OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "result is not Exception and result: break def monhorn_shell(self, session: Session) -> bool:", ":param Session session: session to check :return bool: True if session is alive", "\"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False return True def start_monhorn(self, session:", "above copyright notice and this permission notice shall be included in all copies", "Session) -> None: \"\"\" Start Monhorn. :param Session session: session to start Monhorn", "WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "not is_dir: self.commands.update( self.load_commands(commands) ) commands = session.monhorn + 'commands/generic' exists, is_dir =", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS", "permission notice shall be included in all copies or substantial portions of the", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS", "\"\"\" Start Monhorn shell. :param Session session: session to start Monhorn shell for", ":return bool: True if Monhorn shell completed \"\"\" command = self.input_empty(self.prompt) if command:", "alive. :param Session session: session to check :return bool: True if session is", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH", "to start Monhorn console for :return None: None \"\"\" self.start_monhorn(session) if self.check_session(session): while", ".plugins import Plugins from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import Runtime from hatsploit.lib.session", "None \"\"\" self.start_monhorn(session) if self.check_session(session): while True: result = self.catch(self.monhorn_shell, [session]) if result", "the following conditions: The above copyright notice and this permission notice shall be", "while True: result = self.catch(self.monhorn_shell, [session]) if result is not Exception and result:", "def start_monhorn(self, session: Session) -> None: \"\"\" Start Monhorn. :param Session session: session", "for :return None: None \"\"\" commands = session.monhorn + 'commands/' + session.details['Platform'].lower() exists,", "from pex.fs import FS class Console(Plugins, Badges, Runtime, Commands, FS): \"\"\" Subclass of", "Monhorn. :param Session session: session to start Monhorn for :return None: None \"\"\"", "import Commands from pex.fs import FS class Console(Plugins, Badges, Runtime, Commands, FS): \"\"\"", "if command: if command[0] == 'quit': return True elif command[0] == 'help': self.print_table(\"Core", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT", "to start Monhorn shell for :return bool: True if Monhorn shell completed \"\"\"", "import FS class Console(Plugins, Badges, Runtime, Commands, FS): \"\"\" Subclass of monhorn module.", "'commands/' + session.details['Platform'].lower() exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update(", "furnished to do so, subject to the following conditions: The above copyright notice", "result = self.catch(self.monhorn_shell, [session]) if result is not Exception and result: break def", "to start Monhorn for :return None: None \"\"\" commands = session.monhorn + 'commands/'", "permit persons to whom the Software is furnished to do so, subject to", "any person obtaining a copy of this software and associated documentation files (the", "if Monhorn shell completed \"\"\" command = self.input_empty(self.prompt) if command: if command[0] ==", "copies of the Software, and to permit persons to whom the Software is", "'exit': session.send_command(\"exit\") session.channel.terminated = True return True else: self.check_session(session) self.execute_custom_command(command, self.commands) return False", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False return True def", "-> None: \"\"\" Start Monhorn. :param Session session: session to start Monhorn for", "\"\"\" Start Monhorn. :param Session session: session to start Monhorn for :return None:", "included in all copies or substantial portions of the Software. THE SOFTWARE IS", "= session.monhorn + 'commands/' + session.details['Platform'].lower() exists, is_dir = self.exists(commands) if exists and", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and", "'%linemonhorn%end > ' core_commands = [ ('exit', 'Terminate Monhorn session.'), ('help', 'Show available", "= self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) commands = session.monhorn", "session to start Monhorn console for :return None: None \"\"\" self.start_monhorn(session) if self.check_session(session):", "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "the Software, and to permit persons to whom the Software is furnished to", "-> bool: \"\"\" Start Monhorn shell. :param Session session: session to start Monhorn", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,", "def check_session(self, session: Session) -> bool: \"\"\" Check is session alive. :param Session", "following conditions: The above copyright notice and this permission notice shall be included", "from .plugins import Plugins from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import Runtime from", "copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\",", "== 'quit': return True elif command[0] == 'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands)", "NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "return True def start_monhorn(self, session: Session) -> None: \"\"\" Start Monhorn. :param Session", "The above copyright notice and this permission notice shall be included in all", "Monhorn for :return None: None \"\"\" commands = session.monhorn + 'commands/' + session.details['Platform'].lower()", "= session.monhorn + 'commands/generic' exists, is_dir = self.exists(commands) if exists and not is_dir:", "False return True def start_monhorn(self, session: Session) -> None: \"\"\" Start Monhorn. :param", "+ session.details['Platform'].lower() exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands)", "from hatsploit.lib.commands import Commands from pex.fs import FS class Console(Plugins, Badges, Runtime, Commands,", "\"\"\" Subclass of monhorn module. This subclass of monhorn module is intended for", "shell for :return bool: True if Monhorn shell completed \"\"\" command = self.input_empty(self.prompt)", "\"Software\"), to deal in the Software without restriction, including without limitation the rights", "deal in the Software without restriction, including without limitation the rights to use,", "exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) for", "granted, free of charge, to any person obtaining a copy of this software", ":return bool: True if session is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close()", "limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from .plugins import Plugins from hatsploit.core.cli.badges", "for providing Monhorn main console. \"\"\" prompt = '%linemonhorn%end > ' core_commands =", "[ ('exit', 'Terminate Monhorn session.'), ('help', 'Show available commands.'), ('quit', 'Stop interaction.') ]", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "exists and not is_dir: self.commands.update( self.load_commands(commands) ) commands = session.monhorn + 'commands/generic' exists,", "ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "None: None \"\"\" self.start_monhorn(session) if self.check_session(session): while True: result = self.catch(self.monhorn_shell, [session]) if", "of this software and associated documentation files (the \"Software\"), to deal in the", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False return True def start_monhorn(self, session: Session) ->", "sell copies of the Software, and to permit persons to whom the Software", "session is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False return True", "core_commands = [ ('exit', 'Terminate Monhorn session.'), ('help', 'Show available commands.'), ('quit', 'Stop", "shell completed \"\"\" command = self.input_empty(self.prompt) if command: if command[0] == 'quit': return", "session: session to start Monhorn console for :return None: None \"\"\" self.start_monhorn(session) if", "bool: \"\"\" Check is session alive. :param Session session: session to check :return", "do so, subject to the following conditions: The above copyright notice and this", "' core_commands = [ ('exit', 'Terminate Monhorn session.'), ('help', 'Show available commands.'), ('quit',", "] commands = {} def check_session(self, session: Session) -> bool: \"\"\" Check is", "break def monhorn_shell(self, session: Session) -> bool: \"\"\" Start Monhorn shell. :param Session", "is furnished to do so, subject to the following conditions: The above copyright", "if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False return True def start_monhorn(self, session: Session)", "so, subject to the following conditions: The above copyright notice and this permission", "THE SOFTWARE. \"\"\" from .plugins import Plugins from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime", "console for :return None: None \"\"\" self.start_monhorn(session) if self.check_session(session): while True: result =", "session.'), ('help', 'Show available commands.'), ('quit', 'Stop interaction.') ] commands = {} def", "Session session: session to check :return bool: True if session is alive \"\"\"", "SOFTWARE. \"\"\" from .plugins import Plugins from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR", "'quit': return True elif command[0] == 'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands)", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR", "hatsploit.lib.runtime import Runtime from hatsploit.lib.session import Session from hatsploit.lib.commands import Commands from pex.fs", "of the Software, and to permit persons to whom the Software is furnished", "alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\") session.close() return False return True def start_monhorn(self,", "and/or sell copies of the Software, and to permit persons to whom the", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from .plugins", "\"\"\" Check is session alive. :param Session session: session to check :return bool:", "def monhorn_console(self, session: Session) -> None: \"\"\" Start Monhorn console. :param Session session:", "session: Session) -> bool: \"\"\" Check is session alive. :param Session session: session", "of charge, to any person obtaining a copy of this software and associated", "(the \"Software\"), to deal in the Software without restriction, including without limitation the", "THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from .plugins import Plugins", "copyright notice and this permission notice shall be included in all copies or", "True if Monhorn shell completed \"\"\" command = self.input_empty(self.prompt) if command: if command[0]", "Monhorn shell completed \"\"\" command = self.input_empty(self.prompt) if command: if command[0] == 'quit':", "to permit persons to whom the Software is furnished to do so, subject", "FS): \"\"\" Subclass of monhorn module. This subclass of monhorn module is intended", "('help', 'Show available commands.'), ('quit', 'Stop interaction.') ] commands = {} def check_session(self,", "exists and not is_dir: self.commands.update( self.load_commands(commands) ) for command in self.commands: self.commands[command].session =", "conditions: The above copyright notice and this permission notice shall be included in", "if command[0] == 'quit': return True elif command[0] == 'help': self.print_table(\"Core Commands\", ('Command',", "'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated", "THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "subclass of monhorn module is intended for providing Monhorn main console. \"\"\" prompt", "is_dir: self.commands.update( self.load_commands(commands) ) for command in self.commands: self.commands[command].session = session def monhorn_console(self,", "Session) -> bool: \"\"\" Start Monhorn shell. :param Session session: session to start", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER", "Permission is hereby granted, free of charge, to any person obtaining a copy", "be included in all copies or substantial portions of the Software. THE SOFTWARE", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "Session session: session to start Monhorn for :return None: None \"\"\" commands =", "whom the Software is furnished to do so, subject to the following conditions:", ":return None: None \"\"\" self.start_monhorn(session) if self.check_session(session): while True: result = self.catch(self.monhorn_shell, [session])", "is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) commands =", "self.load_commands(commands) ) for command in self.commands: self.commands[command].session = session def monhorn_console(self, session: Session)", "self.input_empty(self.prompt) if command: if command[0] == 'quit': return True elif command[0] == 'help':", "= [ ('exit', 'Terminate Monhorn session.'), ('help', 'Show available commands.'), ('quit', 'Stop interaction.')", "from hatsploit.core.cli.badges import Badges from hatsploit.lib.runtime import Runtime from hatsploit.lib.session import Session from", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\"", "IN THE SOFTWARE. \"\"\" from .plugins import Plugins from hatsploit.core.cli.badges import Badges from", "module. This subclass of monhorn module is intended for providing Monhorn main console.", "in self.commands: self.commands[command].session = session def monhorn_console(self, session: Session) -> None: \"\"\" Start", "FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "bool: \"\"\" Start Monhorn shell. :param Session session: session to start Monhorn shell", "session: session to start Monhorn shell for :return bool: True if Monhorn shell", "Monhorn console. :param Session session: session to start Monhorn console for :return None:", "== 'exit': session.send_command(\"exit\") session.channel.terminated = True return True else: self.check_session(session) self.execute_custom_command(command, self.commands) return", "= self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) for command in", "check_session(self, session: Session) -> bool: \"\"\" Check is session alive. :param Session session:", "self.print_warning(\"Connection terminated.\") session.close() return False return True def start_monhorn(self, session: Session) -> None:", "portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "Runtime from hatsploit.lib.session import Session from hatsploit.lib.commands import Commands from pex.fs import FS", "self.commands: self.commands[command].session = session def monhorn_console(self, session: Session) -> None: \"\"\" Start Monhorn", "DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "session: Session) -> None: \"\"\" Start Monhorn console. :param Session session: session to", "= '%linemonhorn%end > ' core_commands = [ ('exit', 'Terminate Monhorn session.'), ('help', 'Show", "distribute, sublicense, and/or sell copies of the Software, and to permit persons to", "of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "software and associated documentation files (the \"Software\"), to deal in the Software without", "session to start Monhorn shell for :return bool: True if Monhorn shell completed", "return False return True def start_monhorn(self, session: Session) -> None: \"\"\" Start Monhorn.", "Session session: session to start Monhorn shell for :return bool: True if Monhorn", "Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated = True", "exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) commands", "\"\"\" prompt = '%linemonhorn%end > ' core_commands = [ ('exit', 'Terminate Monhorn session.'),", "Monhorn shell for :return bool: True if Monhorn shell completed \"\"\" command =", "import Session from hatsploit.lib.commands import Commands from pex.fs import FS class Console(Plugins, Badges,", "License Copyright (c) 2020-2022 EntySec Permission is hereby granted, free of charge, to", "shall be included in all copies or substantial portions of the Software. THE", "self.catch(self.monhorn_shell, [session]) if result is not Exception and result: break def monhorn_shell(self, session:", "check :return bool: True if session is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection terminated.\")", "NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands) ) commands = session.monhorn +", "session: session to start Monhorn for :return None: None \"\"\" commands = session.monhorn", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit': session.send_command(\"exit\") session.channel.terminated = True return True", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT", "intended for providing Monhorn main console. \"\"\" prompt = '%linemonhorn%end > ' core_commands", "the Software is furnished to do so, subject to the following conditions: The", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING", "[session]) if result is not Exception and result: break def monhorn_shell(self, session: Session)", "bool: True if Monhorn shell completed \"\"\" command = self.input_empty(self.prompt) if command: if", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT", "is not Exception and result: break def monhorn_shell(self, session: Session) -> bool: \"\"\"", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "import Badges from hatsploit.lib.runtime import Runtime from hatsploit.lib.session import Session from hatsploit.lib.commands import", "to check :return bool: True if session is alive \"\"\" if session.channel.terminated: self.print_warning(\"Connection", "subject to the following conditions: The above copyright notice and this permission notice", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE", "Monhorn main console. \"\"\" prompt = '%linemonhorn%end > ' core_commands = [ ('exit',", "'Stop interaction.') ] commands = {} def check_session(self, session: Session) -> bool: \"\"\"", "'Terminate Monhorn session.'), ('help', 'Show available commands.'), ('quit', 'Stop interaction.') ] commands =", "\"\"\" MIT License Copyright (c) 2020-2022 EntySec Permission is hereby granted, free of", "None: None \"\"\" commands = session.monhorn + 'commands/' + session.details['Platform'].lower() exists, is_dir =", "commands = session.monhorn + 'commands/' + session.details['Platform'].lower() exists, is_dir = self.exists(commands) if exists", "+ 'commands/generic' exists, is_dir = self.exists(commands) if exists and not is_dir: self.commands.update( self.load_commands(commands)", "is hereby granted, free of charge, to any person obtaining a copy of", "Exception and result: break def monhorn_shell(self, session: Session) -> bool: \"\"\" Start Monhorn", "and associated documentation files (the \"Software\"), to deal in the Software without restriction,", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or", "This subclass of monhorn module is intended for providing Monhorn main console. \"\"\"", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "return True elif command[0] == 'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif", "None: \"\"\" Start Monhorn. :param Session session: session to start Monhorn for :return", "hereby granted, free of charge, to any person obtaining a copy of this", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from .plugins import", "command[0] == 'help': self.print_table(\"Core Commands\", ('Command', 'Description'), *self.core_commands) self.show_commands(self.commands) elif command[0] == 'exit':", "restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR", "and result: break def monhorn_shell(self, session: Session) -> bool: \"\"\" Start Monhorn shell.", "to the following conditions: The above copyright notice and this permission notice shall", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "Software, and to permit persons to whom the Software is furnished to do", "Session session: session to start Monhorn console for :return None: None \"\"\" self.start_monhorn(session)", "if exists and not is_dir: self.commands.update( self.load_commands(commands) ) for command in self.commands: self.commands[command].session", "if result is not Exception and result: break def monhorn_shell(self, session: Session) ->", "from hatsploit.lib.session import Session from hatsploit.lib.commands import Commands from pex.fs import FS class", "Monhorn shell. :param Session session: session to start Monhorn shell for :return bool:", "Session) -> bool: \"\"\" Check is session alive. :param Session session: session to" ]
[]
[ "get_input(self): usr = input('> ') found = False for Option in opt: if", "opt.append(o) def remove_option(self, _option): for Option in opt: if Option.option == _option: opt.remove(Option)", "False for Option in opt: if usr == str(Option.index): found = True Option.command()", "def __init__(self, option, command): global index self.option = option self.command = command self.index", "_option): for Option in opt: if Option.option == _option: opt.remove(Option) def clear_option(self): opt.clear()", "print_all(self): self.print_menu() self.print_options() def add_option(self, option, command): o = Option(option, command) opt.append(o) def", "Option in opt: print('[{}] {}'.format(option_n, Option.option)) option_n += 1 def print_all(self): self.print_menu() self.print_options()", "== True: pass else: print(\"Invalid command\") def about(self): return print('Made by {}\\nVersion: {}\\nLicense:", "__license__ = ('GNU General Public License v3.0') opt = [] index = 1", "global index self.option = option self.command = command self.index = index opt.append(self) index", "= True Option.command() if found == True: pass else: print(\"Invalid command\") def about(self):", "('<NAME> (<NAME>)') __version__ = ('3.5') __license__ = ('GNU General Public License v3.0') opt", "{}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option, command): global index self.option = option self.command", "opt.clear() def get_input(self): usr = input('> ') found = False for Option in", "option_n += 1 def print_all(self): self.print_menu() self.print_options() def add_option(self, option, command): o =", "Option.option)) option_n += 1 def print_all(self): self.print_menu() self.print_options() def add_option(self, option, command): o", "print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n = 1 for Option in opt: print('[{}] {}'.format(option_n,", "def get_input(self): usr = input('> ') found = False for Option in opt:", "-*- __author__ = ('<NAME> (<NAME>)') __version__ = ('3.5') __license__ = ('GNU General Public", "opt: print('[{}] {}'.format(option_n, Option.option)) option_n += 1 def print_all(self): self.print_menu() self.print_options() def add_option(self,", "command) opt.append(o) def remove_option(self, _option): for Option in opt: if Option.option == _option:", "= 1 class Menu(): def __init__(self, title, character, char_length): self.title = title self.character", "self.character = character self.char_length = int(char_length) def change_title(self, new_title): self.title = new_title def", "python #-*- coding:utf-8 -*- __author__ = ('<NAME> (<NAME>)') __version__ = ('3.5') __license__ =", "_option: opt.remove(Option) def clear_option(self): opt.clear() def get_input(self): usr = input('> ') found =", "def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n = 1 for Option in", "self.title = title self.character = character self.char_length = int(char_length) def change_title(self, new_title): self.title", "print(\"Invalid command\") def about(self): return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def", "License v3.0') opt = [] index = 1 class Menu(): def __init__(self, title,", "self.print_menu() self.print_options() def add_option(self, option, command): o = Option(option, command) opt.append(o) def remove_option(self,", "return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n = 1 for Option in opt: print('[{}]", "new_title): self.title = new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n =", "+= 1 def print_all(self): self.print_menu() self.print_options() def add_option(self, option, command): o = Option(option,", "self.print_options() def add_option(self, option, command): o = Option(option, command) opt.append(o) def remove_option(self, _option):", "__init__(self, option, command): global index self.option = option self.command = command self.index =", "('3.5') __license__ = ('GNU General Public License v3.0') opt = [] index =", "input('> ') found = False for Option in opt: if usr == str(Option.index):", "def print_options(self): option_n = 1 for Option in opt: print('[{}] {}'.format(option_n, Option.option)) option_n", "True Option.command() if found == True: pass else: print(\"Invalid command\") def about(self): return", "return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option, command): global", "if Option.option == _option: opt.remove(Option) def clear_option(self): opt.clear() def get_input(self): usr = input('>", "index = 1 class Menu(): def __init__(self, title, character, char_length): self.title = title", "{}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option, command): global index self.option = option", "(<NAME>)') __version__ = ('3.5') __license__ = ('GNU General Public License v3.0') opt =", "Option in opt: if usr == str(Option.index): found = True Option.command() if found", "character self.char_length = int(char_length) def change_title(self, new_title): self.title = new_title def print_menu(self): return", "__author__ = ('<NAME> (<NAME>)') __version__ = ('3.5') __license__ = ('GNU General Public License", "__version__ = ('3.5') __license__ = ('GNU General Public License v3.0') opt = []", "self.option = option self.command = command self.index = index opt.append(self) index += 1", "for Option in opt: if Option.option == _option: opt.remove(Option) def clear_option(self): opt.clear() def", "= title self.character = character self.char_length = int(char_length) def change_title(self, new_title): self.title =", "= character self.char_length = int(char_length) def change_title(self, new_title): self.title = new_title def print_menu(self):", "General Public License v3.0') opt = [] index = 1 class Menu(): def", "coding:utf-8 -*- __author__ = ('<NAME> (<NAME>)') __version__ = ('3.5') __license__ = ('GNU General", "for Option in opt: print('[{}] {}'.format(option_n, Option.option)) option_n += 1 def print_all(self): self.print_menu()", "= [] index = 1 class Menu(): def __init__(self, title, character, char_length): self.title", "= ('3.5') __license__ = ('GNU General Public License v3.0') opt = [] index", "{}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option, command): global index self.option =", "str(Option.index): found = True Option.command() if found == True: pass else: print(\"Invalid command\")", "') found = False for Option in opt: if usr == str(Option.index): found", "found == True: pass else: print(\"Invalid command\") def about(self): return print('Made by {}\\nVersion:", "Option(): def __init__(self, option, command): global index self.option = option self.command = command", "== _option: opt.remove(Option) def clear_option(self): opt.clear() def get_input(self): usr = input('> ') found", "__init__(self, title, character, char_length): self.title = title self.character = character self.char_length = int(char_length)", "print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n = 1 for Option in opt:", "Menu(): def __init__(self, title, character, char_length): self.title = title self.character = character self.char_length", "= 1 for Option in opt: print('[{}] {}'.format(option_n, Option.option)) option_n += 1 def", "title self.character = character self.char_length = int(char_length) def change_title(self, new_title): self.title = new_title", "= ('GNU General Public License v3.0') opt = [] index = 1 class", "Option.command() if found == True: pass else: print(\"Invalid command\") def about(self): return print('Made", "int(char_length) def change_title(self, new_title): self.title = new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def", "usr = input('> ') found = False for Option in opt: if usr", "character, char_length): self.title = title self.character = character self.char_length = int(char_length) def change_title(self,", "#!/usr/bin/env python #-*- coding:utf-8 -*- __author__ = ('<NAME> (<NAME>)') __version__ = ('3.5') __license__", "def clear_option(self): opt.clear() def get_input(self): usr = input('> ') found = False for", "if found == True: pass else: print(\"Invalid command\") def about(self): return print('Made by", "Option.option == _option: opt.remove(Option) def clear_option(self): opt.clear() def get_input(self): usr = input('> ')", "[] index = 1 class Menu(): def __init__(self, title, character, char_length): self.title =", "def change_title(self, new_title): self.title = new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self):", "def __init__(self, title, character, char_length): self.title = title self.character = character self.char_length =", "by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option, command): global index self.option", "class Menu(): def __init__(self, title, character, char_length): self.title = title self.character = character", "change_title(self, new_title): self.title = new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n", "= int(char_length) def change_title(self, new_title): self.title = new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title))", "1 def print_all(self): self.print_menu() self.print_options() def add_option(self, option, command): o = Option(option, command)", "self.title)) def print_options(self): option_n = 1 for Option in opt: print('[{}] {}'.format(option_n, Option.option))", "self.char_length = int(char_length) def change_title(self, new_title): self.title = new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length,", "('GNU General Public License v3.0') opt = [] index = 1 class Menu():", "Option(option, command) opt.append(o) def remove_option(self, _option): for Option in opt: if Option.option ==", "remove_option(self, _option): for Option in opt: if Option.option == _option: opt.remove(Option) def clear_option(self):", "v3.0') opt = [] index = 1 class Menu(): def __init__(self, title, character,", "about(self): return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option, command):", "def add_option(self, option, command): o = Option(option, command) opt.append(o) def remove_option(self, _option): for", "opt: if usr == str(Option.index): found = True Option.command() if found == True:", "add_option(self, option, command): o = Option(option, command) opt.append(o) def remove_option(self, _option): for Option", "char_length): self.title = title self.character = character self.char_length = int(char_length) def change_title(self, new_title):", "pass else: print(\"Invalid command\") def about(self): return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class", "= ('<NAME> (<NAME>)') __version__ = ('3.5') __license__ = ('GNU General Public License v3.0')", "self.title = new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n = 1", "in opt: if Option.option == _option: opt.remove(Option) def clear_option(self): opt.clear() def get_input(self): usr", "option, command): global index self.option = option self.command = command self.index = index", "= False for Option in opt: if usr == str(Option.index): found = True", "option_n = 1 for Option in opt: print('[{}] {}'.format(option_n, Option.option)) option_n += 1", "usr == str(Option.index): found = True Option.command() if found == True: pass else:", "clear_option(self): opt.clear() def get_input(self): usr = input('> ') found = False for Option", "for Option in opt: if usr == str(Option.index): found = True Option.command() if", "class Option(): def __init__(self, option, command): global index self.option = option self.command =", "option, command): o = Option(option, command) opt.append(o) def remove_option(self, _option): for Option in", "= Option(option, command) opt.append(o) def remove_option(self, _option): for Option in opt: if Option.option", "found = False for Option in opt: if usr == str(Option.index): found =", "in opt: print('[{}] {}'.format(option_n, Option.option)) option_n += 1 def print_all(self): self.print_menu() self.print_options() def", "command): o = Option(option, command) opt.append(o) def remove_option(self, _option): for Option in opt:", "o = Option(option, command) opt.append(o) def remove_option(self, _option): for Option in opt: if", "= input('> ') found = False for Option in opt: if usr ==", "opt.remove(Option) def clear_option(self): opt.clear() def get_input(self): usr = input('> ') found = False", "def print_all(self): self.print_menu() self.print_options() def add_option(self, option, command): o = Option(option, command) opt.append(o)", "in opt: if usr == str(Option.index): found = True Option.command() if found ==", "def remove_option(self, _option): for Option in opt: if Option.option == _option: opt.remove(Option) def", "else: print(\"Invalid command\") def about(self): return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option():", "print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option, command): global index", "found = True Option.command() if found == True: pass else: print(\"Invalid command\") def", "Option in opt: if Option.option == _option: opt.remove(Option) def clear_option(self): opt.clear() def get_input(self):", "new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n = 1 for Option", "print('[{}] {}'.format(option_n, Option.option)) option_n += 1 def print_all(self): self.print_menu() self.print_options() def add_option(self, option,", "Public License v3.0') opt = [] index = 1 class Menu(): def __init__(self,", "opt = [] index = 1 class Menu(): def __init__(self, title, character, char_length):", "def about(self): return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self, option,", "True: pass else: print(\"Invalid command\") def about(self): return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__))", "== str(Option.index): found = True Option.command() if found == True: pass else: print(\"Invalid", "= new_title def print_menu(self): return print('{0}{1}{0}'.format(self.character*self.char_length, self.title)) def print_options(self): option_n = 1 for", "1 for Option in opt: print('[{}] {}'.format(option_n, Option.option)) option_n += 1 def print_all(self):", "if usr == str(Option.index): found = True Option.command() if found == True: pass", "title, character, char_length): self.title = title self.character = character self.char_length = int(char_length) def", "{}'.format(option_n, Option.option)) option_n += 1 def print_all(self): self.print_menu() self.print_options() def add_option(self, option, command):", "print_options(self): option_n = 1 for Option in opt: print('[{}] {}'.format(option_n, Option.option)) option_n +=", "command): global index self.option = option self.command = command self.index = index opt.append(self)", "command\") def about(self): return print('Made by {}\\nVersion: {}\\nLicense: {}'.format(__author__,__version__,__license__)) class Option(): def __init__(self,", "1 class Menu(): def __init__(self, title, character, char_length): self.title = title self.character =", "index self.option = option self.command = command self.index = index opt.append(self) index +=", "#-*- coding:utf-8 -*- __author__ = ('<NAME> (<NAME>)') __version__ = ('3.5') __license__ = ('GNU", "opt: if Option.option == _option: opt.remove(Option) def clear_option(self): opt.clear() def get_input(self): usr =" ]
[ "'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class' not in unused, 'unused_kwargs leaks used.'", "did not work correctly.' y = engine.permute(x, 'DBC', 'CDB') assert list(y.shape) == [4,", "a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent result mismatchs", "base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3, 'forward_kw did not work correctly.'", "engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did not work.' def test_initialize(): a = nn.Parameter(torch.zeros(3,", "m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to test tuple_out def forward(self, x,", "'BDC', 'BCD') assert list(y.shape) == [1, 2], 'permute 2d tensor with str in_shape", "x) assert engine.is_ready(m), 'prepare_model_ did not work correctly.' assert m.linear_1.bias is None, 'linear_1", "work correctly.' y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] ==", "to reset names assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did not work.' def", "correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3, 'forward_kw", "forward(self, x): x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), # in_features, out_features,", "torch.randn(1, 2, 3) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 2,", "== 3, 'forward_kw did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,))", "4)) b = nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1)", "assert list(y.shape) == [3, 2, 1], 'permute 3d tensor with str in_shape and", "in_shape and str out_shape did not work correctly.' y = engine.permute(x, 'DBC', 'CDB')", "= nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name) for i in range(2)]) @engine.namespace", "3) m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to test tuple_out def forward(self,", "nn import torch.nn.functional as F import copy from pathlib import Path import sys", "'unused2'}, 'unused_kwargs did not filter kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self,", "c), 'initialize_ with function spec did not work correctly.' def test_activate(): a =", "test_permute(): x = torch.randn(1, 2, 3) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape)", "correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not", "== 'test_1', 'forward_pre_hook did not work.' def test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b", "2]) assert list(y.shape) == [2, 1, 3], 'permute tensor with list out_shape did", "sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def test_set_get_default_parent(): a = nn.Identity() b = nn.Identity()", "TestModel(nn.Module): def forward(self, x): x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), #", "after linear projection.' def test_forward(): x = torch.randn(1, 2, 3) m = nn.Module()", "False), # in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return", "str out_shape did not work correctly.' y = engine.permute(x, 'BCD', None) assert list(y.shape)", "torch.equal(a, c), 'initialize_ with function spec did not work correctly.' def test_activate(): a", "y = engine.permute(x, 'DBC', 'CDB') assert list(y.shape) == [4, 1, 2, 3], 'permute", "not work.' def test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4)) c", "'activate with str spec did not work correctly.' a = engine.activate(a, 'relu') b", "kw = {'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class' not in unused,", "not work correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation", "base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4, 'base_kw, infer_kw did not work", "leaks used.' assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did not filter kw correctly.'", "cases for warm.engine. \"\"\" import torch import torch.nn as nn import torch.nn.functional as", "== [3, 2, 1], 'permute 3d tensor with str in_shape and str out_shape", "engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with str spec did not", "with str spec did not work correctly.' a = engine.activate(a, 'relu') b =", "4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b),", "activation=(F.dropout, {'p':1.0}), ) return x x = torch.randn(1, 2, 3) m = TestModel()", "base_shape='BDC') assert y.shape[1] == 4, 'base_kw, infer_kw did not work correctly.' def test_namespace():", "torch.randn(1, 2, 3) m = TestModel() assert not engine.is_ready(m), 'is_ready did not work", "initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x x = torch.randn(1, 2, 3) m =", "a) == 'test_1', 'forward_pre_hook did not work.' def test_initialize(): a = nn.Parameter(torch.zeros(3, 4))", "b, 'get_default_parent result mismatchs set_default_parent.' def test_auto_name(): a = nn.Identity() for i in", "= torch.randn(1, 2, 3, 4) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) ==", "engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not work correctly.' y", "1], 'permute 3d tensor with str in_shape and str out_shape did not work", "= F.hardshrink(b) assert torch.equal(a, b), 'activate with str spec did not work correctly.'", "torch.allclose(y, torch.Tensor([0.0])), 'y should be all 0s because we dropout everything.' assert list(y.shape)", "set_default_parent.' def test_auto_name(): a = nn.Identity() for i in range(10): assert engine._auto_name('test', a)", "i in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert s1 == 'f1_2-f2_1;f1_2-f2_2' assert s2", "def test_forward(): x = torch.randn(1, 2, 3) m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module):", "= engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not work correctly.'", "'is_ready did not work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did not work", "{'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not work correctly.' y = engine.forward( x,", "did not work correctly.' def test_permute(): x = torch.randn(1, 2, 3) y =", "not work correctly.' assert m.linear_1.bias is None, 'linear_1 should not have bias.' assert", "3 and y[-1] == '2', 'tuple_out did not work correctly.' y = engine.forward(x,", "assert list(y.shape) == [2, 1], 'permute 2d tensor with str in_shape and str", "not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg", "test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name) for i in", "should be initialized to all 1s.' y = m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y", "base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3, 'forward_kw did not work correctly.' y", "torch import torch.nn as nn import torch.nn.functional as F import copy from pathlib", "= engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 4, 2, 1], 'permute 4d", "work correctly.' assert not torch.equal(a, c), 'initialize_ with str spec did not work.'", "def test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name) for i", "set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent result mismatchs set_default_parent.' def test_auto_name(): a", "did not work correctly.' y = engine.permute(x, 'CBD', 'DBC') assert list(y.shape) == [2,", "'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent result mismatchs set_default_parent.'", "not work correctly.' a = engine.activate(a, 'relu') b = F.relu(b) assert torch.equal(a, b),", "engine.get_default_parent() is b, 'get_default_parent result mismatchs set_default_parent.' def test_auto_name(): a = nn.Identity() for", "2], 'permute 2d tensor with str in_shape and str out_shape did not work", "initialized to all 1s.' y = m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should be", "2, 3, 4], 'permute 5d tensor with str in_shape and str out_shape did", "with None out_shape did not work corretly.' y = engine.permute(x, 'BCD', [1, 0,", "= engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple) and len(y) == 3 and", "is triggered to reset names assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did not", "correctly.' x = torch.randn(1, 2, 3, 4, 5) y = engine.permute(x, 'BDC', 'BCD')", "from warm import engine def test_set_get_default_parent(): a = nn.Identity() b = nn.Identity() engine.set_default_parent(a)", "result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent result mismatchs set_default_parent.' def", "list(y.shape) == [1, 2, 3], 'permute tensor with None out_shape did not work", "tensor with list out_shape did not work corretly.' x = torch.randn(1, 2, 3,", "used.' assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did not filter kw correctly.' def", "x = torch.randn(1, 2, 3) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) ==", "range(2)]) @engine.namespace def f2(name=''): return name s0, s1, s2 = [f1() for i", "engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 2, 1], 'permute 3d tensor with", "all 0s because we dropout everything.' assert list(y.shape) == [1, 2, 4], 'y", "is None, 'linear_1 should not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be", "with str in_shape and str out_shape did not work correctly.' def test_unused_kwargs(): kw", "as F import copy from pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm", "and y[-1] == '2', 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut,", "'BCD', [1, 0, 2]) assert list(y.shape) == [2, 1, 3], 'permute tensor with", "and str out_shape did not work correctly.' def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0,", "import torch.nn as nn import torch.nn.functional as F import copy from pathlib import", "0s because we dropout everything.' assert list(y.shape) == [1, 2, 4], 'y should", "'initialize_ with str spec did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c),", "'permute 2d tensor with str in_shape and str out_shape did not work correctly.'", "TripleOut(nn.Module): # to test tuple_out def forward(self, x, b=1, c='2'): return x+b, x,", "nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_')", "= nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert", "work correctly.' y = engine.permute(x, 'BCD', None) assert list(y.shape) == [1, 2, 3],", "correctly.' y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4,", "correctly.' assert not torch.equal(a, c), 'initialize_ with str spec did not work.' torch.manual_seed(1)", "3], 'permute tensor with list out_shape did not work corretly.' x = torch.randn(1,", "torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with str spec did", "return ';'.join([f2(name=name) for i in range(2)]) @engine.namespace def f2(name=''): return name s0, s1,", "s2 = [f1() for i in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert s1", "= torch.randn(1, 2, 3) m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to test", "torch.randn(1, 2) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 2], 'permute", "= torch.randn(1, 2) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 2],", "def test_set_get_default_parent(): a = nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a,", "str in_shape and str out_shape did not work correctly.' x = torch.randn(1, 2,", "assert torch.equal(a, b), 'activate with str spec did not work correctly.' a =", "F import copy from pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import", "2, 3], 'permute 4d tensor with str in_shape and str out_shape did not", "forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work correctly.' y = engine.forward(x, base_class=TripleOut,", "y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 5, 2, 3, 4],", "= nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent result mismatchs", "out_shape did not work correctly.' def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0} unused", "= torch.randn(1, 2, 3) m = TestModel() assert not engine.is_ready(m), 'is_ready did not", "= torch.randn(1, 2, 3, 4, 5) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape)", "[4, 1, 2, 3], 'permute 4d tensor with str in_shape and str out_shape", "'y should be all 0s because we dropout everything.' assert list(y.shape) == [1,", "def test_auto_name(): a = nn.Identity() for i in range(10): assert engine._auto_name('test', a) ==", "projection.' def test_forward(): x = torch.randn(1, 2, 3) m = nn.Module() engine.set_default_parent(m) class", "'new calls to _auto_name failed to increment name count.' a(None) # test if", "base_name='tri', tuple_out=True) assert isinstance(y, tuple) and len(y) == 3 and y[-1] == '2',", "x = torch.randn(1, 2, 3) m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to", "'prepare_model_ did not work correctly.' assert m.linear_1.bias is None, 'linear_1 should not have", "[1, 2, 3], 'permute tensor with None out_shape did not work corretly.' y", "= [f1() for i in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert s1 ==", "work correctly.' x = torch.randn(1, 2) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape)", "list(y.shape) == [1, 2], 'permute 2d tensor with str in_shape and str out_shape", "str in_shape and str out_shape did not work correctly.' y = engine.permute(x, 'BCD',", "correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple) and len(y) ==", "a = torch.randn(3, 4) b = copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b =", "not engine.is_ready(m), 'is_ready did not work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did", "tensor with str in_shape and str out_shape did not work correctly.' x =", "x, c y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did", "'BCD', None) assert list(y.shape) == [1, 2, 3], 'permute tensor with None out_shape", "[1, 0, 2]) assert list(y.shape) == [2, 1, 3], 'permute tensor with list", "2d tensor with str in_shape and str out_shape did not work correctly.' def", "correctly.' assert m.linear_1.bias is None, 'linear_1 should not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])),", "2, 4], 'y should have shape [1, 2, 4] after linear projection.' def", "in range(10): assert engine._auto_name('test', a) == f'test_{i+1}', 'new calls to _auto_name failed to", "'forward_pre_hook did not work.' def test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3,", "and str out_shape did not work correctly.' y = engine.permute(x, 'CBD', 'DBC') assert", "= torch.randn(3, 4) b = copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b = F.hardshrink(b)", "<reponame>adithyavis/pywarm<filename>tests/test_engine.py # 08-31-2019; \"\"\" Test cases for warm.engine. \"\"\" import torch import torch.nn", "i in range(2)]) @engine.namespace def f2(name=''): return name s0, s1, s2 = [f1()", "tensor with str in_shape and str out_shape did not work correctly.' y =", "= torch.randn(1, 2, 3) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3,", "assert not engine.is_ready(m), 'is_ready did not work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_", "torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_ with function spec did not work", "nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with str", "out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x x = torch.randn(1, 2, 3)", "not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] ==", "work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_ with function spec did not", "'CDB') assert list(y.shape) == [4, 1, 2, 3], 'permute 4d tensor with str", "list(y.shape) == [4, 1, 2, 3], 'permute 4d tensor with str in_shape and", "work correctly.' def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert", "a = nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1)", "x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), # in_features, out_features, bias in_shape=None,", "== [2, 1, 3], 'permute tensor with list out_shape did not work corretly.'", "from pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def test_set_get_default_parent():", "work correctly.' def test_activate(): a = torch.randn(3, 4) b = copy.deepcopy(a) a =", "== [1, 2, 4], 'y should have shape [1, 2, 4] after linear", "'relu') b = F.relu(b) assert torch.equal(a, b), 'activate with str spec did not", "2, 3], 'permute tensor with None out_shape did not work corretly.' y =", "torch.equal(a, c), 'initialize_ with str spec did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert", "assert 'base_class' not in unused, 'unused_kwargs leaks used.' assert set(unused.keys()) == {'unused1', 'unused2'},", "str out_shape did not work correctly.' x = torch.randn(1, 2) y = engine.permute(x,", "assert list(y.shape) == [1, 2, 3], 'permute tensor with None out_shape did not", "c y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did not", "str in_shape and str out_shape did not work correctly.' y = engine.permute(x, 'DBC',", "assert y.shape[1] == 4, 'base_kw, infer_kw did not work correctly.' def test_namespace(): m", "did not work correctly.' def test_activate(): a = torch.randn(3, 4) b = copy.deepcopy(a)", "[3, 2, 1], 'permute 3d tensor with str in_shape and str out_shape did", "return x x = torch.randn(1, 2, 3) m = TestModel() assert not engine.is_ready(m),", "import engine def test_set_get_default_parent(): a = nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent()", "did not work correctly.' a = engine.activate(a, 'relu') b = F.relu(b) assert torch.equal(a,", "did not work correctly.' y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert", "'base_class' not in unused, 'unused_kwargs leaks used.' assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs", "== [3, 4, 2, 1], 'permute 4d tensor with str in_shape and str", "c='2'): return x+b, x, c y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y,", "assert engine.get_default_parent() is b, 'get_default_parent result mismatchs set_default_parent.' def test_auto_name(): a = nn.Identity()", "class TestModel(nn.Module): def forward(self, x): x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False),", "correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did not work correctly.' assert m.linear_1.bias is", "assert list(y.shape) == [2, 1, 3], 'permute tensor with list out_shape did not", "with list out_shape did not work corretly.' x = torch.randn(1, 2, 3, 4)", "engine.permute(x, 'CBD', 'DBC') assert list(y.shape) == [2, 1], 'permute 2d tensor with str", "engine.is_ready(m), 'is_ready did not work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did not", "def test_activate(): a = torch.randn(3, 4) b = copy.deepcopy(a) a = engine.activate(a, 'hardshrink')", "assert list(y.shape) == [1, 2], 'permute 2d tensor with str in_shape and str", "and str out_shape did not work correctly.' x = torch.randn(1, 2, 3, 4,", "2d tensor with str in_shape and str out_shape did not work correctly.' y", "a(None) # test if forward pre hook is triggered to reset names assert", "a = engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert torch.equal(a, b), 'activate with str", "assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized to all 1s.' y = m(x)", "infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4, 'base_kw, infer_kw did not work correctly.' def", "torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized to all 1s.' y = m(x) assert", "did not work.' def test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4))", "list(y.shape) == [3, 2, 1], 'permute 3d tensor with str in_shape and str", "test_activate(): a = torch.randn(3, 4) b = copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b", "b = F.hardshrink(b) assert torch.equal(a, b), 'activate with str spec did not work", "assert engine.get_default_parent() is a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b,", "have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized to all 1s.' y", "assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not work correctly.' y = engine.forward( x, base_class=nn.Linear,", "be all 0s because we dropout everything.' assert list(y.shape) == [1, 2, 4],", "did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_ with function spec", "'permute tensor with None out_shape did not work corretly.' y = engine.permute(x, 'BCD',", "engine.unused_kwargs(kw) assert 'base_class' not in unused, 'unused_kwargs leaks used.' assert set(unused.keys()) == {'unused1',", "shape [1, 2, 4] after linear projection.' def test_forward(): x = torch.randn(1, 2,", "torch.nn.functional as F import copy from pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from", "y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 2], 'permute 2d tensor", "test_auto_name(): a = nn.Identity() for i in range(10): assert engine._auto_name('test', a) == f'test_{i+1}',", "3d tensor with str in_shape and str out_shape did not work correctly.' y", "assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did not work.' def test_initialize(): a =", "0, 2]) assert list(y.shape) == [2, 1, 3], 'permute tensor with list out_shape", "torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0}))", "list(y.shape) == [3, 4, 2, 1], 'permute 4d tensor with str in_shape and", "and str out_shape did not work correctly.' y = engine.permute(x, 'BCD', None) assert", "filter kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x): x = engine.forward(x,", "with str spec did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_", "def forward(self, x): x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), # in_features,", "engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple) and len(y) == 3 and y[-1]", "test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4))", "work correctly.' a = engine.activate(a, 'relu') b = F.relu(b) assert torch.equal(a, b), 'activate", "with str in_shape and str out_shape did not work correctly.' y = engine.permute(x,", "in_shape and str out_shape did not work correctly.' def test_unused_kwargs(): kw = {'unused1':0,", "to test tuple_out def forward(self, x, b=1, c='2'): return x+b, x, c y", "= engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), # in_features, out_features, bias in_shape=None, out_shape=None,", "test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class' not in", "= copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert torch.equal(a, b), 'activate", "_auto_name failed to increment name count.' a(None) # test if forward pre hook", "for i in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert s1 == 'f1_2-f2_1;f1_2-f2_2' assert", "= engine.permute(x, 'DBC', 'CDB') assert list(y.shape) == [4, 1, 2, 3], 'permute 4d", "engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name) for i in range(2)]) @engine.namespace def f2(name=''):", "b = nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b)", "== [4, 1, 2, 3], 'permute 4d tensor with str in_shape and str", "not work correctly.' y = engine.permute(x, 'CBD', 'DBC') assert list(y.shape) == [2, 1],", "None) assert list(y.shape) == [1, 2, 3], 'permute tensor with None out_shape did", "spec did not work correctly.' assert not torch.equal(a, c), 'initialize_ with str spec", "correctly.' x = torch.randn(1, 2) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) ==", "4], 'y should have shape [1, 2, 4] after linear projection.' def test_forward():", "str spec did not work correctly.' assert not torch.equal(a, c), 'initialize_ with str", ") return x x = torch.randn(1, 2, 3) m = TestModel() assert not", "test if forward pre hook is triggered to reset names assert engine._auto_name('test', a)", "= nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert", "= engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did not work correctly.'", "engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 2], 'permute 2d tensor with str", "correctly.' def test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name) for", "assert torch.allclose(y, torch.Tensor([0.0])), 'y should be all 0s because we dropout everything.' assert", "correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not", "torch.Tensor([1.0])), 'linear_1.weight should be initialized to all 1s.' y = m(x) assert torch.allclose(y,", "1s.' y = m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should be all 0s because", "'unused_kwargs did not filter kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x):", "'permute 4d tensor with str in_shape and str out_shape did not work correctly.'", "test tuple_out def forward(self, x, b=1, c='2'): return x+b, x, c y =", "b), 'activate with str spec did not work correctly.' def test_permute(): x =", "'permute 3d tensor with str in_shape and str out_shape did not work correctly.'", "should have shape [1, 2, 4] after linear projection.' def test_forward(): x =", "sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def test_set_get_default_parent(): a = nn.Identity() b =", "[1, 2, 4], 'y should have shape [1, 2, 4] after linear projection.'", "F.relu(b) assert torch.equal(a, b), 'activate with str spec did not work correctly.' def", "did not work correctly.' assert m.linear_1.bias is None, 'linear_1 should not have bias.'", "name count.' a(None) # test if forward pre hook is triggered to reset", "1], 'permute 4d tensor with str in_shape and str out_shape did not work", "= engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert torch.equal(a, b), 'activate with str spec", "str out_shape did not work correctly.' x = torch.randn(1, 2, 3, 4, 5)", "torch.Tensor([0.0])), 'y should be all 0s because we dropout everything.' assert list(y.shape) ==", "in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert s1 == 'f1_2-f2_1;f1_2-f2_2' assert s2 ==", "i in range(10): assert engine._auto_name('test', a) == f'test_{i+1}', 'new calls to _auto_name failed", "x = torch.randn(1, 2, 3, 4) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape)", "unused = engine.unused_kwargs(kw) assert 'base_class' not in unused, 'unused_kwargs leaks used.' assert set(unused.keys())", "engine.activate(a, 'relu') b = F.relu(b) assert torch.equal(a, b), 'activate with str spec did", "3], 'permute tensor with None out_shape did not work corretly.' y = engine.permute(x,", "work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did not work correctly.' assert m.linear_1.bias", "forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3, 'forward_kw did not work correctly.' y =", "x = torch.randn(1, 2) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1,", "'permute 5d tensor with str in_shape and str out_shape did not work correctly.'", "work.' def test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4)) c =", "not work correctly.' x = torch.randn(1, 2) y = engine.permute(x, 'BDC', 'BCD') assert", "work corretly.' x = torch.randn(1, 2, 3, 4) y = engine.permute(x, 'BCD', 'DCB')", "== 4, 'base_kw, infer_kw did not work correctly.' def test_namespace(): m = nn.Module()", "= engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 2], 'permute 2d tensor with", "not work correctly.' assert not torch.equal(a, c), 'initialize_ with str spec did not", "= nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with", "'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class' not in unused, 'unused_kwargs leaks used.' assert", "to all 1s.' y = m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should be all", "work correctly.' def test_permute(): x = torch.randn(1, 2, 3) y = engine.permute(x, 'BCD',", "nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with str spec did not work correctly.' assert", "tensor with None out_shape did not work corretly.' y = engine.permute(x, 'BCD', [1,", "TestModel() assert not engine.is_ready(m), 'is_ready did not work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m),", "'base_kw, infer_kw did not work correctly.' def test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace", "forward pre hook is triggered to reset names assert engine._auto_name('test', a) == 'test_1',", "y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work", "'get_default_parent result mismatchs set_default_parent.' def test_auto_name(): a = nn.Identity() for i in range(10):", "and len(y) == 3 and y[-1] == '2', 'tuple_out did not work correctly.'", "nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), # in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'},", "4) b = copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert torch.equal(a,", "base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work correctly.' y = engine.forward(x,", "\"\"\" Test cases for warm.engine. \"\"\" import torch import torch.nn as nn import", "5) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 5, 2, 3,", "out_shape did not work correctly.' x = torch.randn(1, 2, 3, 4, 5) y", "engine.permute(x, 'BCD', [1, 0, 2]) assert list(y.shape) == [2, 1, 3], 'permute tensor", "m = nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name) for i in range(2)])", "import torch.nn.functional as F import copy from pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent))", "str spec did not work correctly.' a = engine.activate(a, 'relu') b = F.relu(b)", "== 3 and y[-1] == '2', 'tuple_out did not work correctly.' y =", "[f1() for i in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert s1 == 'f1_2-f2_1;f1_2-f2_2'", "# in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x", "base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not work correctly.' y =", "in_shape and str out_shape did not work correctly.' x = torch.randn(1, 2, 3,", "correctly.' def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class'", "b = copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert torch.equal(a, b),", "m = TestModel() assert not engine.is_ready(m), 'is_ready did not work correctly.' engine.prepare_model_(m, x)", "a = engine.activate(a, 'relu') b = F.relu(b) assert torch.equal(a, b), 'activate with str", "engine def test_set_get_default_parent(): a = nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is", "def test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3,", "= F.relu(b) assert torch.equal(a, b), 'activate with str spec did not work correctly.'", "2, 3) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 2, 1],", "in_shape and str out_shape did not work correctly.' y = engine.permute(x, 'BCD', None)", "str out_shape did not work correctly.' def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0}", "not work correctly.' def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw)", "y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3, 'forward_kw did", "[2, 1, 3], 'permute tensor with list out_shape did not work corretly.' x", "engine.is_ready(m), 'prepare_model_ did not work correctly.' assert m.linear_1.bias is None, 'linear_1 should not", "'linear_1 should not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized to", "dropout everything.' assert list(y.shape) == [1, 2, 4], 'y should have shape [1,", "3, 'forward_kw did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert", "[2, 1], 'permute 2d tensor with str in_shape and str out_shape did not", "not work correctly.' y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1]", "3], 'permute 4d tensor with str in_shape and str out_shape did not work", "assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout,", "a) == f'test_{i+1}', 'new calls to _auto_name failed to increment name count.' a(None)", "did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1]", "x, b=1, c='2'): return x+b, x, c y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False)", "y = engine.permute(x, 'CBD', 'DBC') assert list(y.shape) == [2, 1], 'permute 2d tensor", "should not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized to all", "class TripleOut(nn.Module): # to test tuple_out def forward(self, x, b=1, c='2'): return x+b,", "y.shape[1] == 4, 'base_kw, infer_kw did not work correctly.' def test_namespace(): m =", "= nn.Identity() for i in range(10): assert engine._auto_name('test', a) == f'test_{i+1}', 'new calls", "# 08-31-2019; \"\"\" Test cases for warm.engine. \"\"\" import torch import torch.nn as", "'initialize_ with str spec did not work correctly.' assert not torch.equal(a, c), 'initialize_", "torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with str spec did not work correctly.'", "def f1(name=''): return ';'.join([f2(name=name) for i in range(2)]) @engine.namespace def f2(name=''): return name", "engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert torch.equal(a, b), 'activate with str spec did", "nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a,", "not torch.equal(a, c), 'initialize_ with str spec did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_)", "did not work correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])),", "calls to _auto_name failed to increment name count.' a(None) # test if forward", "for i in range(10): assert engine._auto_name('test', a) == f'test_{i+1}', 'new calls to _auto_name", "= engine.activate(a, 'relu') b = F.relu(b) assert torch.equal(a, b), 'activate with str spec", "= TestModel() assert not engine.is_ready(m), 'is_ready did not work correctly.' engine.prepare_model_(m, x) assert", "did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple)", "pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def test_set_get_default_parent(): a", "engine._auto_name('test', a) == f'test_{i+1}', 'new calls to _auto_name failed to increment name count.'", "5d tensor with str in_shape and str out_shape did not work correctly.' x", "not work corretly.' y = engine.permute(x, 'BCD', [1, 0, 2]) assert list(y.shape) ==", "and str out_shape did not work correctly.' x = torch.randn(1, 2) y =", "{'p':1.0}), ) return x x = torch.randn(1, 2, 3) m = TestModel() assert", "4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with str spec", "base_arg=(x.shape[-1], 4, False), # in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}),", "Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def test_set_get_default_parent(): a = nn.Identity()", "y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 2, 1], 'permute 3d", "in_shape and str out_shape did not work correctly.' y = engine.permute(x, 'CBD', 'DBC')", "= {'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class' not in unused, 'unused_kwargs", "be initialized to all 1s.' y = m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should", "in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x x = torch.randn(1, 2,", "did not work correctly.' y = engine.permute(x, 'BCD', None) assert list(y.shape) == [1,", "None, 'linear_1 should not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized", "result mismatchs set_default_parent.' def test_auto_name(): a = nn.Identity() for i in range(10): assert", "work correctly.' assert m.linear_1.bias is None, 'linear_1 should not have bias.' assert torch.allclose(m.linear_1.weight,", "everything.' assert list(y.shape) == [1, 2, 4], 'y should have shape [1, 2,", "4, 2, 1], 'permute 4d tensor with str in_shape and str out_shape did", "work correctly.' def test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name)", "# test if forward pre hook is triggered to reset names assert engine._auto_name('test',", "'linear', base_arg=(x.shape[-1], 4, False), # in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout,", "work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3,", "4, 'base_kw, infer_kw did not work correctly.' def test_namespace(): m = nn.Module() engine.set_default_parent(m)", "assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did not filter kw correctly.' def test_prepare_model_is_ready():", "forward(self, x, b=1, c='2'): return x+b, x, c y = engine.forward(x, base_class=TripleOut, base_name='tri',", "triggered to reset names assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did not work.'", "in_shape and str out_shape did not work correctly.' x = torch.randn(1, 2) y", "torch.equal(a, b), 'activate with str spec did not work correctly.' a = engine.activate(a,", "'BCD') assert list(y.shape) == [1, 2], 'permute 2d tensor with str in_shape and", "correctly.' y = engine.permute(x, 'CBD', 'DBC') assert list(y.shape) == [2, 1], 'permute 2d", "to _auto_name failed to increment name count.' a(None) # test if forward pre", "str in_shape and str out_shape did not work correctly.' x = torch.randn(1, 2)", "'permute tensor with list out_shape did not work corretly.' x = torch.randn(1, 2,", "= m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should be all 0s because we dropout", "engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did not work correctly.' assert m.linear_1.bias is None,", "str spec did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_ with", "base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did not work correctly.' y = engine.forward(x,", "f'test_{i+1}', 'new calls to _auto_name failed to increment name count.' a(None) # test", "range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert s1 == 'f1_2-f2_1;f1_2-f2_2' assert s2 == 'f1_3-f2_1;f1_3-f2_2'", "str out_shape did not work correctly.' y = engine.permute(x, 'DBC', 'CDB') assert list(y.shape)", "1, 3], 'permute tensor with list out_shape did not work corretly.' x =", "hook is triggered to reset names assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did", "base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x x = torch.randn(1, 2, 3) m", "= engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3, 'forward_kw did not", "s1, s2 = [f1() for i in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2' assert", "did not work corretly.' x = torch.randn(1, 2, 3, 4) y = engine.permute(x,", "x+b, x, c y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out", "for warm.engine. \"\"\" import torch import torch.nn as nn import torch.nn.functional as F", "assert list(y.shape) == [1, 2, 4], 'y should have shape [1, 2, 4]", "engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert y[-1] == 3, 'forward_kw did not work", "correctly.' y = engine.permute(x, 'DBC', 'CDB') assert list(y.shape) == [4, 1, 2, 3],", "engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_ with function spec did not work correctly.'", "5, 2, 3, 4], 'permute 5d tensor with str in_shape and str out_shape", "== '2', 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3},", "spec did not work correctly.' def test_activate(): a = torch.randn(3, 4) b =", "is b, 'get_default_parent result mismatchs set_default_parent.' def test_auto_name(): a = nn.Identity() for i", "m.linear_1.bias is None, 'linear_1 should not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should", "in range(2)]) @engine.namespace def f2(name=''): return name s0, s1, s2 = [f1() for", "import torch import torch.nn as nn import torch.nn.functional as F import copy from", "2) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 2], 'permute 2d", "str in_shape and str out_shape did not work correctly.' def test_unused_kwargs(): kw =", "y = engine.permute(x, 'BCD', [1, 0, 2]) assert list(y.shape) == [2, 1, 3],", "function spec did not work correctly.' def test_activate(): a = torch.randn(3, 4) b", "mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent result mismatchs set_default_parent.' def test_auto_name():", "all 1s.' y = m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should be all 0s", "y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple) and len(y) == 3", "torch.randn(1, 2, 3, 4) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3,", "tuple_out=True) assert isinstance(y, tuple) and len(y) == 3 and y[-1] == '2', 'tuple_out", "increment name count.' a(None) # test if forward pre hook is triggered to", "mismatchs set_default_parent.' def test_auto_name(): a = nn.Identity() for i in range(10): assert engine._auto_name('test',", "b = F.relu(b) assert torch.equal(a, b), 'activate with str spec did not work", "out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x x =", "out_shape did not work correctly.' y = engine.permute(x, 'DBC', 'CDB') assert list(y.shape) ==", "to increment name count.' a(None) # test if forward pre hook is triggered", "did not work corretly.' y = engine.permute(x, 'BCD', [1, 0, 2]) assert list(y.shape)", "did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])),", "with str spec did not work correctly.' def test_permute(): x = torch.randn(1, 2,", "work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did", "assert not torch.equal(a, c), 'initialize_ with str spec did not work.' torch.manual_seed(1) engine.initialize_(c,", "4], 'permute 5d tensor with str in_shape and str out_shape did not work", "bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x x = torch.randn(1,", "y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did not work", "nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent result mismatchs set_default_parent.'", "3, 4, 5) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 5,", "str spec did not work correctly.' def test_permute(): x = torch.randn(1, 2, 3)", "base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did not work correctly.' y =", "set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did not filter kw correctly.' def test_prepare_model_is_ready(): class", "not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_ with function spec did", "4d tensor with str in_shape and str out_shape did not work correctly.' x", "not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized to all 1s.'", "3) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 2, 1], 'permute", "';'.join([f2(name=name) for i in range(2)]) @engine.namespace def f2(name=''): return name s0, s1, s2", "len(y) == 3 and y[-1] == '2', 'tuple_out did not work correctly.' y", "work corretly.' y = engine.permute(x, 'BCD', [1, 0, 2]) assert list(y.shape) == [2,", "correctly.' y = engine.permute(x, 'BCD', None) assert list(y.shape) == [1, 2, 3], 'permute", "assert list(y.shape) == [1, 5, 2, 3, 4], 'permute 5d tensor with str", "did not work correctly.' x = torch.randn(1, 2, 3, 4, 5) y =", "'hardshrink') b = F.hardshrink(b) assert torch.equal(a, b), 'activate with str spec did not", "2, 3) m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to test tuple_out def", "engine.set_default_parent(m) class TripleOut(nn.Module): # to test tuple_out def forward(self, x, b=1, c='2'): return", "as nn import torch.nn.functional as F import copy from pathlib import Path import", "import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def test_set_get_default_parent(): a =", "'BCD') assert list(y.shape) == [1, 5, 2, 3, 4], 'permute 5d tensor with", "correctly.' a = engine.activate(a, 'relu') b = F.relu(b) assert torch.equal(a, b), 'activate with", "== {'unused1', 'unused2'}, 'unused_kwargs did not filter kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module):", "torch.randn(3, 4) b = copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert", "engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did not work correctly.' y", "= nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to test tuple_out def forward(self, x, b=1,", "'BCD', 'DCB') assert list(y.shape) == [3, 2, 1], 'permute 3d tensor with str", "y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4, 'base_kw,", "engine.permute(x, 'BCD', None) assert list(y.shape) == [1, 2, 3], 'permute tensor with None", "y[-1] == '2', 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri',", "str in_shape and str out_shape did not work correctly.' y = engine.permute(x, 'CBD',", "= engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work correctly.'", "warm import engine def test_set_get_default_parent(): a = nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert", "base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple) and len(y) == 3 and y[-1] ==", "name s0, s1, s2 = [f1() for i in range(3)] assert s0 ==", "torch.Tensor), 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert", "= engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 5, 2, 3, 4], 'permute", "b=1, c='2'): return x+b, x, c y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert", "None out_shape did not work corretly.' y = engine.permute(x, 'BCD', [1, 0, 2])", "out_shape did not work corretly.' y = engine.permute(x, 'BCD', [1, 0, 2]) assert", "reset names assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did not work.' def test_initialize():", "engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work correctly.' y", "nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return ';'.join([f2(name=name) for i in range(2)]) @engine.namespace def", "work correctly.' x = torch.randn(1, 2, 3, 4, 5) y = engine.permute(x, 'BDC',", "correctly.' def test_activate(): a = torch.randn(3, 4) b = copy.deepcopy(a) a = engine.activate(a,", "not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple) and", "pre hook is triggered to reset names assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook", "4, 5) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 5, 2,", "test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x): x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4,", "test_forward(): x = torch.randn(1, 2, 3) m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): #", "c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a, 'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_", "b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b)", "list(y.shape) == [2, 1], 'permute 2d tensor with str in_shape and str out_shape", "[1, 5, 2, 3, 4], 'permute 5d tensor with str in_shape and str", "with function spec did not work correctly.' def test_activate(): a = torch.randn(3, 4)", "for i in range(2)]) @engine.namespace def f2(name=''): return name s0, s1, s2 =", "engine.get_default_parent() is a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent", "engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1, 5, 2, 3, 4], 'permute 5d", "'initialize_ with function spec did not work correctly.' def test_activate(): a = torch.randn(3,", "def test_permute(): x = torch.randn(1, 2, 3) y = engine.permute(x, 'BCD', 'DCB') assert", "in unused, 'unused_kwargs leaks used.' assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did not", "warm.engine. \"\"\" import torch import torch.nn as nn import torch.nn.functional as F import", "def forward(self, x, b=1, c='2'): return x+b, x, c y = engine.forward(x, base_class=TripleOut,", "list out_shape did not work corretly.' x = torch.randn(1, 2, 3, 4) y", "not filter kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x): x =", "2, 4] after linear projection.' def test_forward(): x = torch.randn(1, 2, 3) m", "== [2, 1], 'permute 2d tensor with str in_shape and str out_shape did", "'activate with str spec did not work correctly.' def test_permute(): x = torch.randn(1,", "assert y[-1] == 3, 'forward_kw did not work correctly.' y = engine.forward(x, base_class=TripleOut,", "'2', 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True)", "str out_shape did not work correctly.' y = engine.permute(x, 'CBD', 'DBC') assert list(y.shape)", "list(y.shape) == [1, 2, 4], 'y should have shape [1, 2, 4] after", "correctly.' def test_permute(): x = torch.randn(1, 2, 3) y = engine.permute(x, 'BCD', 'DCB')", "base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4, 'base_kw, infer_kw did not work correctly.'", "engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), # in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None,", "tuple_out def forward(self, x, b=1, c='2'): return x+b, x, c y = engine.forward(x,", "== [1, 5, 2, 3, 4], 'permute 5d tensor with str in_shape and", "list(y.shape) == [1, 5, 2, 3, 4], 'permute 5d tensor with str in_shape", "not work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did not work correctly.' assert", "x = torch.randn(1, 2, 3, 4, 5) y = engine.permute(x, 'BDC', 'BCD') assert", "work correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did", "Test cases for warm.engine. \"\"\" import torch import torch.nn as nn import torch.nn.functional", "def f2(name=''): return name s0, s1, s2 = [f1() for i in range(3)]", "return x+b, x, c y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=False) assert isinstance(y, torch.Tensor),", "a = nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent result", "spec did not work correctly.' def test_permute(): x = torch.randn(1, 2, 3) y", "work correctly.' y = engine.permute(x, 'DBC', 'CDB') assert list(y.shape) == [4, 1, 2,", "F.hardshrink(b) assert torch.equal(a, b), 'activate with str spec did not work correctly.' a", "3, 4], 'permute 5d tensor with str in_shape and str out_shape did not", "{'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class' not in unused, 'unused_kwargs leaks", "# to test tuple_out def forward(self, x, b=1, c='2'): return x+b, x, c", "assert engine._auto_name('test', a) == f'test_{i+1}', 'new calls to _auto_name failed to increment name", "x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4, 'base_kw, infer_kw did not", "b), 'initialize_ with str spec did not work correctly.' assert not torch.equal(a, c),", "m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should be all 0s because we dropout everything.'", "did not work correctly.' engine.prepare_model_(m, x) assert engine.is_ready(m), 'prepare_model_ did not work correctly.'", "== [1, 2], 'permute 2d tensor with str in_shape and str out_shape did", "assert list(y.shape) == [3, 4, 2, 1], 'permute 4d tensor with str in_shape", "kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x): x = engine.forward(x, nn.Linear,", "did not work correctly.' def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0} unused =", "import copy from pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine", "c), 'initialize_ with str spec did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a,", "not work correctly.' y = engine.permute(x, 'DBC', 'CDB') assert list(y.shape) == [4, 1,", "b), 'activate with str spec did not work correctly.' a = engine.activate(a, 'relu')", "base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x, torch.Tensor([2.0])), 'forward_arg did not work correctly.' y =", "assert torch.equal(a, b), 'activate with str spec did not work correctly.' def test_permute():", "{'unused1', 'unused2'}, 'unused_kwargs did not filter kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def", "assert m.linear_1.bias is None, 'linear_1 should not have bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight", "'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y,", "torch.nn as nn import torch.nn.functional as F import copy from pathlib import Path", "a = nn.Identity() for i in range(10): assert engine._auto_name('test', a) == f'test_{i+1}', 'new", "torch.allclose(y, torch.Tensor([0.0])), 'activation did not work correctly.' y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4},", "[1, 2], 'permute 2d tensor with str in_shape and str out_shape did not", "activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not work correctly.' y = engine.forward(", "4d tensor with str in_shape and str out_shape did not work correctly.' y", "did not filter kw correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x): x", "= engine.permute(x, 'BCD', [1, 0, 2]) assert list(y.shape) == [2, 1, 3], 'permute", "= engine.permute(x, 'BCD', None) assert list(y.shape) == [1, 2, 3], 'permute tensor with", "infer_kw did not work correctly.' def test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace def", "torch.randn(1, 2, 3, 4, 5) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) ==", "4] after linear projection.' def test_forward(): x = torch.randn(1, 2, 3) m =", "= nn.Parameter(torch.zeros(3, 4)) b = nn.Parameter(torch.zeros(3, 4)) c = nn.Parameter(torch.zeros(3, 4)) torch.manual_seed(1) engine.initialize_(a,", "nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to test tuple_out def forward(self, x, b=1, c='2'):", "test_set_get_default_parent(): a = nn.Identity() b = nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent", "'DBC') assert list(y.shape) == [2, 1], 'permute 2d tensor with str in_shape and", "1], 'permute 2d tensor with str in_shape and str out_shape did not work", "3, 4) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 4, 2,", "copy.deepcopy(a) a = engine.activate(a, 'hardshrink') b = F.hardshrink(b) assert torch.equal(a, b), 'activate with", "with str in_shape and str out_shape did not work correctly.' x = torch.randn(1,", "unused, 'unused_kwargs leaks used.' assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did not filter", "not work corretly.' x = torch.randn(1, 2, 3, 4) y = engine.permute(x, 'BCD',", "assert torch.equal(a, c), 'initialize_ with function spec did not work correctly.' def test_activate():", "torch.Tensor([0.0])), 'activation did not work correctly.' y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'},", "isinstance(y, tuple) and len(y) == 3 and y[-1] == '2', 'tuple_out did not", "have shape [1, 2, 4] after linear projection.' def test_forward(): x = torch.randn(1,", "torch.randn(1, 2, 3) m = nn.Module() engine.set_default_parent(m) class TripleOut(nn.Module): # to test tuple_out", "not work correctly.' x = torch.randn(1, 2, 3, 4, 5) y = engine.permute(x,", "copy from pathlib import Path import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def", "= engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 2, 1], 'permute 3d tensor", "= engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4, 'base_kw, infer_kw", "engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC') assert y.shape[1] == 4, 'base_kw, infer_kw did", "did not work correctly.' def test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''):", "tuple_out=False) assert isinstance(y, torch.Tensor), 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut,", "out_shape did not work correctly.' y = engine.permute(x, 'BCD', None) assert list(y.shape) ==", "names assert engine._auto_name('test', a) == 'test_1', 'forward_pre_hook did not work.' def test_initialize(): a", "engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is", "y[-1] == 3, 'forward_kw did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri',", "2, 3, 4) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 4,", "f1(name=''): return ';'.join([f2(name=name) for i in range(2)]) @engine.namespace def f2(name=''): return name s0,", "2, 3, 4, 5) y = engine.permute(x, 'BDC', 'BCD') assert list(y.shape) == [1,", "= engine.permute(x, 'CBD', 'DBC') assert list(y.shape) == [2, 1], 'permute 2d tensor with", "range(10): assert engine._auto_name('test', a) == f'test_{i+1}', 'new calls to _auto_name failed to increment", "nn.init.normal_) assert torch.equal(a, c), 'initialize_ with function spec did not work correctly.' def", "bias.' assert torch.allclose(m.linear_1.weight, torch.Tensor([1.0])), 'linear_1.weight should be initialized to all 1s.' y =", "2, 1], 'permute 3d tensor with str in_shape and str out_shape did not", "not work correctly.' y = engine.permute(x, 'BCD', None) assert list(y.shape) == [1, 2,", "3) m = TestModel() assert not engine.is_ready(m), 'is_ready did not work correctly.' engine.prepare_model_(m,", "list(y.shape) == [2, 1, 3], 'permute tensor with list out_shape did not work", "failed to increment name count.' a(None) # test if forward pre hook is", "4, False), # in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), )", "tuple) and len(y) == 3 and y[-1] == '2', 'tuple_out did not work", "'forward_arg did not work correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y,", "nn.Identity() for i in range(10): assert engine._auto_name('test', a) == f'test_{i+1}', 'new calls to", "[1, 2, 4] after linear projection.' def test_forward(): x = torch.randn(1, 2, 3)", "x = torch.randn(1, 2, 3) m = TestModel() assert not engine.is_ready(m), 'is_ready did", "torch.Tensor([2.0])), 'forward_arg did not work correctly.' y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert", "tensor with str in_shape and str out_shape did not work correctly.' def test_unused_kwargs():", "'BDC', 'BCD') assert list(y.shape) == [1, 5, 2, 3, 4], 'permute 5d tensor", "import sys sys.path.append(str(Path(__file__).parent.parent)) from warm import engine def test_set_get_default_parent(): a = nn.Identity() b", "in_features, out_features, bias in_shape=None, out_shape=None, base_shape=None, initialization={'weight':'ones_'}, activation=(F.dropout, {'p':1.0}), ) return x x", "and str out_shape did not work correctly.' y = engine.permute(x, 'DBC', 'CDB') assert", "assert list(y.shape) == [4, 1, 2, 3], 'permute 4d tensor with str in_shape", "spec did not work correctly.' a = engine.activate(a, 'relu') b = F.relu(b) assert", "return name s0, s1, s2 = [f1() for i in range(3)] assert s0", "we dropout everything.' assert list(y.shape) == [1, 2, 4], 'y should have shape", "is a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent result", "count.' a(None) # test if forward pre hook is triggered to reset names", "2, 1], 'permute 4d tensor with str in_shape and str out_shape did not", "def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x): x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1],", "s0, s1, s2 = [f1() for i in range(3)] assert s0 == 'f1_1-f2_1;f1_1-f2_2'", "'DCB') assert list(y.shape) == [3, 4, 2, 1], 'permute 4d tensor with str", "def test_unused_kwargs(): kw = {'unused1':0, 'unused2':0, 'base_class':0} unused = engine.unused_kwargs(kw) assert 'base_class' not", "'CBD', 'DBC') assert list(y.shape) == [2, 1], 'permute 2d tensor with str in_shape", "'y should have shape [1, 2, 4] after linear projection.' def test_forward(): x", "4) y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 4, 2, 1],", "@engine.namespace def f1(name=''): return ';'.join([f2(name=name) for i in range(2)]) @engine.namespace def f2(name=''): return", "engine.set_default_parent(b) assert engine.get_default_parent() is b, 'get_default_parent result mismatchs set_default_parent.' def test_auto_name(): a =", "out_shape did not work correctly.' y = engine.permute(x, 'CBD', 'DBC') assert list(y.shape) ==", "assert torch.equal(a, b), 'initialize_ with str spec did not work correctly.' assert not", "'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_kw={'c':3}, tuple_out=True) assert", "y = m(x) assert torch.allclose(y, torch.Tensor([0.0])), 'y should be all 0s because we", "because we dropout everything.' assert list(y.shape) == [1, 2, 4], 'y should have", "== f'test_{i+1}', 'new calls to _auto_name failed to increment name count.' a(None) #", "torch.equal(a, b), 'activate with str spec did not work correctly.' def test_permute(): x", "not work correctly.' def test_namespace(): m = nn.Module() engine.set_default_parent(m) @engine.namespace def f1(name=''): return", "x x = torch.randn(1, 2, 3) m = TestModel() assert not engine.is_ready(m), 'is_ready", "tuple_out=True) assert y[-1] == 3, 'forward_kw did not work correctly.' y = engine.forward(x,", "1, 2, 3], 'permute 4d tensor with str in_shape and str out_shape did", "y = engine.forward(x, base_class=TripleOut, activation=(F.dropout, {'p':1.0})) assert torch.allclose(y, torch.Tensor([0.0])), 'activation did not work", "08-31-2019; \"\"\" Test cases for warm.engine. \"\"\" import torch import torch.nn as nn", "engine.permute(x, 'DBC', 'CDB') assert list(y.shape) == [4, 1, 2, 3], 'permute 4d tensor", "'test_1', 'forward_pre_hook did not work.' def test_initialize(): a = nn.Parameter(torch.zeros(3, 4)) b =", "assert isinstance(y, torch.Tensor), 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri',", "'DBC', 'CDB') assert list(y.shape) == [4, 1, 2, 3], 'permute 4d tensor with", "2, 3) m = TestModel() assert not engine.is_ready(m), 'is_ready did not work correctly.'", "'linear_1.weight should be initialized to all 1s.' y = m(x) assert torch.allclose(y, torch.Tensor([0.0])),", "'unused_kwargs leaks used.' assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did not filter kw", "if forward pre hook is triggered to reset names assert engine._auto_name('test', a) ==", "= engine.unused_kwargs(kw) assert 'base_class' not in unused, 'unused_kwargs leaks used.' assert set(unused.keys()) ==", "y = engine.permute(x, 'BCD', None) assert list(y.shape) == [1, 2, 3], 'permute tensor", "nn.Identity() engine.set_default_parent(a) assert engine.get_default_parent() is a, 'get_default_parent result mismatchs set_default_parent.' engine.set_default_parent(b) assert engine.get_default_parent()", "with str spec did not work correctly.' assert not torch.equal(a, c), 'initialize_ with", "@engine.namespace def f2(name=''): return name s0, s1, s2 = [f1() for i in", "work correctly.' y = engine.permute(x, 'CBD', 'DBC') assert list(y.shape) == [2, 1], 'permute", "corretly.' x = torch.randn(1, 2, 3, 4) y = engine.permute(x, 'BCD', 'DCB') assert", "y = engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 4, 2, 1], 'permute", "linear projection.' def test_forward(): x = torch.randn(1, 2, 3) m = nn.Module() engine.set_default_parent(m)", "x): x = engine.forward(x, nn.Linear, 'linear', base_arg=(x.shape[-1], 4, False), # in_features, out_features, bias", "'forward_kw did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', forward_arg=(2.0,)) assert torch.allclose(y-x,", "not work correctly.' def test_activate(): a = torch.randn(3, 4) b = copy.deepcopy(a) a", "'normal_') torch.manual_seed(1) nn.init.normal_(b) assert torch.equal(a, b), 'initialize_ with str spec did not work", "'activation did not work correctly.' y = engine.forward( x, base_class=nn.Linear, base_kw={'out_features':4}, infer_kw={'in_features':'C'}, base_shape='BDC')", "isinstance(y, torch.Tensor), 'tuple_out did not work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True)", "work correctly.' y = engine.forward(x, base_class=TripleOut, base_name='tri', tuple_out=True) assert isinstance(y, tuple) and len(y)", "\"\"\" import torch import torch.nn as nn import torch.nn.functional as F import copy", "'BCD', 'DCB') assert list(y.shape) == [3, 4, 2, 1], 'permute 4d tensor with", "assert isinstance(y, tuple) and len(y) == 3 and y[-1] == '2', 'tuple_out did", "did not work correctly.' x = torch.randn(1, 2) y = engine.permute(x, 'BDC', 'BCD')", "out_shape did not work corretly.' x = torch.randn(1, 2, 3, 4) y =", "did not work correctly.' assert not torch.equal(a, c), 'initialize_ with str spec did", "not in unused, 'unused_kwargs leaks used.' assert set(unused.keys()) == {'unused1', 'unused2'}, 'unused_kwargs did", "engine.permute(x, 'BCD', 'DCB') assert list(y.shape) == [3, 4, 2, 1], 'permute 4d tensor", "correctly.' def test_prepare_model_is_ready(): class TestModel(nn.Module): def forward(self, x): x = engine.forward(x, nn.Linear, 'linear',", "out_shape did not work correctly.' x = torch.randn(1, 2) y = engine.permute(x, 'BDC',", "assert engine.is_ready(m), 'prepare_model_ did not work correctly.' assert m.linear_1.bias is None, 'linear_1 should", "== [1, 2, 3], 'permute tensor with None out_shape did not work corretly.'", "spec did not work.' torch.manual_seed(1) engine.initialize_(c, nn.init.normal_) assert torch.equal(a, c), 'initialize_ with function", "corretly.' y = engine.permute(x, 'BCD', [1, 0, 2]) assert list(y.shape) == [2, 1,", "f2(name=''): return name s0, s1, s2 = [f1() for i in range(3)] assert", "[3, 4, 2, 1], 'permute 4d tensor with str in_shape and str out_shape", "'DCB') assert list(y.shape) == [3, 2, 1], 'permute 3d tensor with str in_shape", "not work correctly.' def test_permute(): x = torch.randn(1, 2, 3) y = engine.permute(x,", "should be all 0s because we dropout everything.' assert list(y.shape) == [1, 2,", "torch.equal(a, b), 'initialize_ with str spec did not work correctly.' assert not torch.equal(a," ]
[ "to return from FileDiffACLHook implementations. result (bool): A resulting approval value to check.", "FileDiffACLHook multiple with False and None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values, result):", "BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp()", "the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user()", "check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values: hook = FileDiffACLHook(extension=self.extension) self.spy_on(hook.is_accessible,", "implementations. result (bool): A resulting approval value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True):", "False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True and None\"\"\" self._test_hook_approval_sequence([True, None],", "def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False and None\"\"\" self._test_hook_approval_sequence([False, None], False) def", "self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with False result\"\"\" self._test_hook_approval_sequence([False], False)", "True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with False result\"\"\" self._test_hook_approval_sequence([False], False) def", "self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with True result\"\"\" self._test_hook_approval_sequence([True],", "= ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request = self.create_review_request(", "from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class", "super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter)", "self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with True result\"\"\" self._test_hook_approval_sequence([True], True) def", "for reviewboard.extensions.hooks.FileDiffACLHook.\"\"\" import kgb from djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from", "from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for", "True and False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True", "sequence of FileDiffACLHook approval results. Args: accessible_values (list of bool): A list of", "None], False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence of FileDiffACLHook approval results.", "values to return from FileDiffACLHook implementations. result (bool): A resulting approval value to", "False and None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence", "FileDiffACLHook implementations. result (bool): A resulting approval value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id,", "test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False and None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self,", "False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence of FileDiffACLHook approval results. Args:", "test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing", "kgb from djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase", "= self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval", "FileDiffACLHook multiple with True and None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook", "override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature", "and None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence of", "A resulting approval value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in", "import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import", "self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence of FileDiffACLHook approval", "accessible_values, result): \"\"\"Test a sequence of FileDiffACLHook approval results. Args: accessible_values (list of", "multiple with True and False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple", "import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users']", "reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency,", "(list of bool): A list of the values to return from FileDiffACLHook implementations.", "self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True and None\"\"\" self._test_hook_approval_sequence([True,", "\"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values: hook = FileDiffACLHook(extension=self.extension) self.spy_on(hook.is_accessible, op=kgb.SpyOpReturn(value))", "and None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False and", "_test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence of FileDiffACLHook approval results. Args: accessible_values (list", "self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic", "\"\"\"Testing FileDiffACLHook basic approval with False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook", "None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False and None\"\"\"", "FileDiffACLHook basic approval with None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic", "tests for reviewboard.extensions.hooks.FileDiffACLHook.\"\"\" import kgb from djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook", "def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self):", "of bool): A list of the values to return from FileDiffACLHook implementations. result", "A list of the values to return from FileDiffACLHook implementations. result (bool): A", "draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with True result\"\"\" self._test_hook_approval_sequence([True], True)", "\"\"\"Test a sequence of FileDiffACLHook approval results. Args: accessible_values (list of bool): A", "resulting approval value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values:", "FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests", "basic approval with False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with", "= self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing", "def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self):", "FileDiffACLHook basic approval with True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic", "False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True and False\"\"\"", "def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True and False\"\"\" self._test_hook_approval_sequence([True, False], False) def", "BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures", "def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence of FileDiffACLHook approval results. Args: accessible_values", "self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with", "approval results. Args: accessible_values (list of bool): A list of the values to", "return from FileDiffACLHook implementations. result (bool): A resulting approval value to check. \"\"\"", "True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with None result\"\"\" self._test_hook_approval_sequence([None], True) def", "djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features", "FileDiffACLHook multiple with True and False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook", "Args: accessible_values (list of bool): A list of the values to return from", "class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def setUp(self):", "'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter)", "\"\"\"Testing FileDiffACLHook basic approval with True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook", "basic approval with True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval", "create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with True", "fixtures = ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request =", "test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True and None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self):", "def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request,", "from FileDiffACLHook implementations. result (bool): A resulting approval value to check. \"\"\" with", "import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\"", "with True and False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with", "to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values: hook = FileDiffACLHook(extension=self.extension)", "accessible_values (list of bool): A list of the values to return from FileDiffACLHook", "test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True and False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self):", "multiple with False and None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test", "with True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with None", "test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing", "self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with True result\"\"\"", "with False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True and", "self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with None result\"\"\" self._test_hook_approval_sequence([None], True)", "result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with False result\"\"\" self._test_hook_approval_sequence([False],", "True and None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False", "approval with True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with", "approval value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values: hook", "with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values: hook = FileDiffACLHook(extension=self.extension) self.spy_on(hook.is_accessible, op=kgb.SpyOpReturn(value)) self.assertEqual(self.review_request.is_accessible_by(self.user),", "True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with None result\"\"\"", "True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False and None\"\"\" self._test_hook_approval_sequence([False, None], False)", "with False and None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a", "FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests,", "test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook basic approval with True result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing", "for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user =", "multiple with True and None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple", "FileDiffACLHook approval results. Args: accessible_values (list of bool): A list of the values", "list of the values to return from FileDiffACLHook implementations. result (bool): A resulting", "reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools',", "self.user = self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self):", "reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the", "of FileDiffACLHook approval results. Args: accessible_values (list of bool): A list of the", "(bool): A resulting approval value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value", "with True and None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with", "the values to return from FileDiffACLHook implementations. result (bool): A resulting approval value", "basic approval with None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval", "override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values: hook = FileDiffACLHook(extension=self.extension) self.spy_on(hook.is_accessible, op=kgb.SpyOpReturn(value)) self.assertEqual(self.review_request.is_accessible_by(self.user), result)", "setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True)", "FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request", "of the values to return from FileDiffACLHook implementations. result (bool): A resulting approval", "from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures =", "result (bool): A resulting approval value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for", "approval with False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True", "bool): A list of the values to return from FileDiffACLHook implementations. result (bool):", "\"\"\"Testing FileDiffACLHook basic approval with None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook", "\"\"\"Testing FileDiffACLHook multiple with True and False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing", "self).setUp() self.user = self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def", "None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False and None\"\"\" self._test_hook_approval_sequence([False, None],", "self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing FileDiffACLHook multiple with False and None\"\"\" self._test_hook_approval_sequence([False,", "def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self):", "approval with None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with", "result): \"\"\"Test a sequence of FileDiffACLHook approval results. Args: accessible_values (list of bool):", "\"\"\"Unit tests for reviewboard.extensions.hooks.FileDiffACLHook.\"\"\" import kgb from djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import", "import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from reviewboard.reviews.features import DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase):", "result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True and False\"\"\" self._test_hook_approval_sequence([True,", "\"\"\"Testing FileDiffACLHook multiple with False and None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values,", "results. Args: accessible_values (list of bool): A list of the values to return", "\"\"\"Testing FileDiffACLHook multiple with True and None\"\"\" self._test_hook_approval_sequence([True, None], True) def test_multiple_aclhooks_3(self): \"\"\"Testing", "def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True and None\"\"\" self._test_hook_approval_sequence([True, None], True) def", "reviewboard.extensions.hooks.FileDiffACLHook.\"\"\" import kgb from djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases", "with None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with False", "FileDiffACLHook basic approval with False result\"\"\" self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple", "False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True and None\"\"\" self._test_hook_approval_sequence([True, None], True)", "False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True and None\"\"\"", "False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True and False\"\"\" self._test_hook_approval_sequence([True, False], False)", "self.create_user() self.review_request = self.create_review_request( create_repository=True) self.review_request.target_people.add(self.review_request.submitter) self.create_diffset(review_request=self.review_request, draft=True) self.review_request.publish(user=self.review_request.submitter) def test_single_aclhook_true(self): \"\"\"Testing FileDiffACLHook", "and False\"\"\" self._test_hook_approval_sequence([True, False], False) def test_multiple_aclhooks_2(self): \"\"\"Testing FileDiffACLHook multiple with True and", "['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user = self.create_user() self.review_request = self.create_review_request( create_repository=True)", "DiffACLsFeature class FileDiffACLHookTests(kgb.SpyAgency, BaseExtensionHookTestCase): \"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def", "None result\"\"\" self._test_hook_approval_sequence([None], True) def test_single_aclhook_false(self): \"\"\"Testing FileDiffACLHook basic approval with False result\"\"\"", "self._test_hook_approval_sequence([False], False) def test_multiple_aclhooks_1(self): \"\"\"Testing FileDiffACLHook multiple with True and False\"\"\" self._test_hook_approval_sequence([True, False],", "result\"\"\" self._test_hook_approval_sequence([True], True) def test_single_aclhook_none(self): \"\"\"Testing FileDiffACLHook basic approval with None result\"\"\" self._test_hook_approval_sequence([None],", "None\"\"\" self._test_hook_approval_sequence([False, None], False) def _test_hook_approval_sequence(self, accessible_values, result): \"\"\"Test a sequence of FileDiffACLHook", "import kgb from djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import", "from djblets.features.testing import override_feature_check from reviewboard.extensions.hooks import FileDiffACLHook from reviewboard.extensions.tests.testcases import BaseExtensionHookTestCase from", "a sequence of FileDiffACLHook approval results. Args: accessible_values (list of bool): A list", "value to check. \"\"\" with override_feature_check(DiffACLsFeature.feature_id, enabled=True): for value in accessible_values: hook =", "\"\"\"Tests for the FileDiffACLHook.\"\"\" fixtures = ['test_scmtools', 'test_users'] def setUp(self): super(FileDiffACLHookTests, self).setUp() self.user" ]
[ "logs=None): \"\"\"Set trainer object for current callback.\"\"\" if not self.trainer.hccl: return if vega.is_torch_backend():", "if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import torch.distributed as dist logger.info(\"init HCCL\")", "for more details. \"\"\"Data parallel callback.\"\"\" import logging import vega from .callback import", "distributed in the hope that it will be useful, # but WITHOUT ANY", "gradients_mean=True) init() def before_epoch(self, epoch, logs=None): \"\"\"Be called before each epoach.\"\"\" if not", "init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch, logs=None): \"\"\"Be called before", "logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves the evaluated Performance.\"\"\" def", "HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch, logs=None): \"\"\"Be called before each epoach.\"\"\"", "dist logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model =", "terms of the MIT License. # This program is distributed in the hope", "PURPOSE. See the # MIT License for more details. \"\"\"Data parallel callback.\"\"\" import", "can redistribute it and/or modify # it under the terms of the MIT", "import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves the evaluated", "\"\"\"Be called before each epoach.\"\"\" if not vega.is_torch_backend() or not self.trainer.hccl: return if", "<filename>vega/trainer/callbacks/hccl.py<gh_stars>100-1000 # -*- coding:utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd.", "the MIT License. # This program is distributed in the hope that it", "PARTICULAR PURPOSE. See the # MIT License for more details. \"\"\"Data parallel callback.\"\"\"", "from .callback import Callback from vega.common import ClassFactory, ClassType from vega.common.general import General", "not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop session.\"\"\" if self.trainer.hccl and vega.is_tf_backend(): self.trainer.sess.run(self.trainer.npu_shutdown)", "MIT License. # This program is distributed in the hope that it will", "of the MIT License. # This program is distributed in the hope that", "you can redistribute it and/or modify # it under the terms of the", "260 def init_trainer(self, logs=None): \"\"\"Set trainer object for current callback.\"\"\" if not self.trainer.hccl:", "import vega from .callback import Callback from vega.common import ClassFactory, ClassType from vega.common.general", "= torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self): from mindspore import", "and/or modify # it under the terms of the MIT License. # This", "def _init_pytorch_trainer(self): import torch import torch.distributed as dist logger.info(\"init HCCL\") model = self.trainer.model", "context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch, logs=None): \"\"\"Be called before each epoach.\"\"\" if", "def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority = 260 def init_trainer(self, logs=None):", "broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self): from mindspore import context from mindspore.context import", "= model def _init_ms_trainer(self): from mindspore import context from mindspore.context import ParallelMode from", "Technologies Co., Ltd. All rights reserved. # This program is free software; you", "@ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint", "is free software; you can redistribute it and/or modify # it under the", "epoch, logs=None): \"\"\"Be called before each epoach.\"\"\" if not vega.is_torch_backend() or not self.trainer.hccl:", "or not self.trainer.hccl: return if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None):", "not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch", "self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import", "callback.\"\"\" if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self):", "import context from mindspore.context import ParallelMode from mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL,", "import torch.distributed as dist logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers,", "object for current callback.\"\"\" if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend():", "without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "import logging import vega from .callback import Callback from vega.common import ClassFactory, ClassType", "model def _init_ms_trainer(self): from mindspore import context from mindspore.context import ParallelMode from mindspore.communication.management", "Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # This program", "device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self): from mindspore import context from mindspore.context", "mindspore import context from mindspore.context import ParallelMode from mindspore.communication.management import init logger.info(\"init HCCL\")", "return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import torch.distributed", "from vega.common.general import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves", "hope that it will be useful, # but WITHOUT ANY WARRANTY; without even", "ClassFactory, ClassType from vega.common.general import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback", "(C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # This program is", "\"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority = 260 def init_trainer(self, logs=None): \"\"\"Set trainer", "evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority = 260 def", "dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model =", "ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority = 260 def init_trainer(self, logs=None): \"\"\"Set trainer object", "All rights reserved. # This program is free software; you can redistribute it", "\"\"\"Set trainer object for current callback.\"\"\" if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer()", "for current callback.\"\"\" if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer()", "MIT License for more details. \"\"\"Data parallel callback.\"\"\" import logging import vega from", ".callback import Callback from vega.common import ClassFactory, ClassType from vega.common.general import General logger", "Callback from vega.common import ClassFactory, ClassType from vega.common.general import General logger = logging.getLogger(__name__)", "-*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. #", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "free software; you can redistribute it and/or modify # it under the terms", "each epoach.\"\"\" if not vega.is_torch_backend() or not self.trainer.hccl: return if self.trainer.sampler is not", "self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import torch.distributed as dist logger.info(\"init HCCL\") model =", "self.trainer.hccl: return if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop session.\"\"\"", "torch import torch.distributed as dist logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\",", "world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self):", "self).__init__() self.priority = 260 def init_trainer(self, logs=None): \"\"\"Set trainer object for current callback.\"\"\"", "mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch, logs=None): \"\"\"Be", "_init_ms_trainer(self): from mindspore import context from mindspore.context import ParallelMode from mindspore.communication.management import init", "# it under the terms of the MIT License. # This program is", "This program is distributed in the hope that it will be useful, #", "software; you can redistribute it and/or modify # it under the terms of", "details. \"\"\"Data parallel callback.\"\"\" import logging import vega from .callback import Callback from", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "Huawei Technologies Co., Ltd. All rights reserved. # This program is free software;", "General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves the evaluated Performance.\"\"\"", "useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #", "that it will be useful, # but WITHOUT ANY WARRANTY; without even the", "import torch import torch.distributed as dist logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl',", "that saves the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority", "the terms of the MIT License. # This program is distributed in the", "backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model", "or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License for more", "it and/or modify # it under the terms of the MIT License. #", "logging import vega from .callback import Callback from vega.common import ClassFactory, ClassType from", "saves the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority =", "None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop session.\"\"\" if self.trainer.hccl and vega.is_tf_backend(): self.trainer.sess.run(self.trainer.npu_shutdown) self.trainer.sess.close()", "callback.\"\"\" super(Hccl, self).__init__() self.priority = 260 def init_trainer(self, logs=None): \"\"\"Set trainer object for", "if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop session.\"\"\" if self.trainer.hccl", "Ltd. All rights reserved. # This program is free software; you can redistribute", "before_epoch(self, epoch, logs=None): \"\"\"Be called before each epoach.\"\"\" if not vega.is_torch_backend() or not", "vega from .callback import Callback from vega.common import ClassFactory, ClassType from vega.common.general import", "import ParallelMode from mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self,", "self.priority = 260 def init_trainer(self, logs=None): \"\"\"Set trainer object for current callback.\"\"\" if", "trainer object for current callback.\"\"\" if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if", "# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # This", "= self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers)", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "_init_pytorch_trainer(self): import torch import torch.distributed as dist logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group(", "is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop session.\"\"\" if self.trainer.hccl and vega.is_tf_backend():", "redistribute it and/or modify # it under the terms of the MIT License.", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "A PARTICULAR PURPOSE. See the # MIT License for more details. \"\"\"Data parallel", "from mindspore import context from mindspore.context import ParallelMode from mindspore.communication.management import init logger.info(\"init", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import torch.distributed as dist logger.info(\"init", "it under the terms of the MIT License. # This program is distributed", "Co., Ltd. All rights reserved. # This program is free software; you can", "model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self): from mindspore", "init() def before_epoch(self, epoch, logs=None): \"\"\"Be called before each epoach.\"\"\" if not vega.is_torch_backend()", "it will be useful, # but WITHOUT ANY WARRANTY; without even the implied", "vega.is_torch_backend() or not self.trainer.hccl: return if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self,", "the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority = 260", "FOR A PARTICULAR PURPOSE. See the # MIT License for more details. \"\"\"Data", "# This program is free software; you can redistribute it and/or modify #", "ClassType from vega.common.general import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that", "WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS", "HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model,", "import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch, logs=None): \"\"\"Be called", "the hope that it will be useful, # but WITHOUT ANY WARRANTY; without", "logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize", "init_trainer(self, logs=None): \"\"\"Set trainer object for current callback.\"\"\" if not self.trainer.hccl: return if", "mindspore.context import ParallelMode from mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def", "\"\"\"Callback that saves the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__()", "__init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority = 260 def init_trainer(self, logs=None): \"\"\"Set", "before each epoach.\"\"\" if not vega.is_torch_backend() or not self.trainer.hccl: return if self.trainer.sampler is", "License. # This program is distributed in the hope that it will be", "See the # MIT License for more details. \"\"\"Data parallel callback.\"\"\" import logging", "self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop session.\"\"\" if self.trainer.hccl and", "under the terms of the MIT License. # This program is distributed in", "be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of", "if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import", "ParallelMode from mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch,", "not self.trainer.hccl: return if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "Hccl(Callback): \"\"\"Callback that saves the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl,", "vega.common import ClassFactory, ClassType from vega.common.general import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class", "def before_epoch(self, epoch, logs=None): \"\"\"Be called before each epoach.\"\"\" if not vega.is_torch_backend() or", "self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model", "called before each epoach.\"\"\" if not vega.is_torch_backend() or not self.trainer.hccl: return if self.trainer.sampler", "torch.distributed as dist logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id)", "if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import torch.distributed as", "License for more details. \"\"\"Data parallel callback.\"\"\" import logging import vega from .callback", "def _init_ms_trainer(self): from mindspore import context from mindspore.context import ParallelMode from mindspore.communication.management import", "# -*- coding:utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All", "logs=None): \"\"\"Be called before each epoach.\"\"\" if not vega.is_torch_backend() or not self.trainer.hccl: return", "is distributed in the hope that it will be useful, # but WITHOUT", "self.trainer.model = model def _init_ms_trainer(self): from mindspore import context from mindspore.context import ParallelMode", "parallel callback.\"\"\" import logging import vega from .callback import Callback from vega.common import", "torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self): from mindspore import context", "# MIT License for more details. \"\"\"Data parallel callback.\"\"\" import logging import vega", "super(Hccl, self).__init__() self.priority = 260 def init_trainer(self, logs=None): \"\"\"Set trainer object for current", "if not vega.is_torch_backend() or not self.trainer.hccl: return if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch)", "import Callback from vega.common import ClassFactory, ClassType from vega.common.general import General logger =", "in the hope that it will be useful, # but WITHOUT ANY WARRANTY;", "return if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def after_train(self, logs=None): \"\"\"Stop session.\"\"\" if", "= 260 def init_trainer(self, logs=None): \"\"\"Set trainer object for current callback.\"\"\" if not", "logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch, logs=None): \"\"\"Be called before each", "epoach.\"\"\" if not vega.is_torch_backend() or not self.trainer.hccl: return if self.trainer.sampler is not None:", "coding:utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.", "vega.common.general import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves the", "current callback.\"\"\" if not self.trainer.hccl: return if vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def", "not vega.is_torch_backend() or not self.trainer.hccl: return if self.trainer.sampler is not None: self.trainer.sampler.set_epoch(epoch) def", "from vega.common import ClassFactory, ClassType from vega.common.general import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK)", "rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self): from", "def init_trainer(self, logs=None): \"\"\"Set trainer object for current callback.\"\"\" if not self.trainer.hccl: return", "logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel(", "Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\" super(Hccl, self).__init__() self.priority = 260 def init_trainer(self,", "program is free software; you can redistribute it and/or modify # it under", "This program is free software; you can redistribute it and/or modify # it", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT", "more details. \"\"\"Data parallel callback.\"\"\" import logging import vega from .callback import Callback", "will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty", "rights reserved. # This program is free software; you can redistribute it and/or", "modify # it under the terms of the MIT License. # This program", "vega.is_torch_backend(): self._init_pytorch_trainer() if vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import torch.distributed as dist", "2020. Huawei Technologies Co., Ltd. All rights reserved. # This program is free", "model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id],", "-*- coding:utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights", "as dist logger.info(\"init HCCL\") model = self.trainer.model dist.init_process_group( backend='hccl', init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License", "reserved. # This program is free software; you can redistribute it and/or modify", "the # MIT License for more details. \"\"\"Data parallel callback.\"\"\" import logging import", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License for", "init_method=f\"tcp://{General.cluster.hccl_server_ip}:{General.cluster.hccl_port}\", world_size=self.trainer.num_workers, rank=self.trainer.rank_id) model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def", "from mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init() def before_epoch(self, epoch, logs=None):", "model, device_ids=[self.trainer.device_id], broadcast_buffers=General.cluster.enable_broadcast_buffers) self.trainer.model = model def _init_ms_trainer(self): from mindspore import context from", "vega.is_ms_backend(): self._init_ms_trainer() def _init_pytorch_trainer(self): import torch import torch.distributed as dist logger.info(\"init HCCL\") model", "# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "callback.\"\"\" import logging import vega from .callback import Callback from vega.common import ClassFactory,", "from mindspore.context import ParallelMode from mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) init()", "import ClassFactory, ClassType from vega.common.general import General logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback):", "= logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class Hccl(Callback): \"\"\"Callback that saves the evaluated Performance.\"\"\" def __init__(self):", "context from mindspore.context import ParallelMode from mindspore.communication.management import init logger.info(\"init HCCL\") context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True)", "class Hccl(Callback): \"\"\"Callback that saves the evaluated Performance.\"\"\" def __init__(self): \"\"\"Initialize ModelCheckpoint callback.\"\"\"", "\"\"\"Data parallel callback.\"\"\" import logging import vega from .callback import Callback from vega.common", "program is distributed in the hope that it will be useful, # but", "# This program is distributed in the hope that it will be useful,", "FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License for more details.", "but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or" ]
[ "10 indice += 1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice, numero in", "= \"Hola amigos\" for caracter in cadena: print(caracter) cadena2 = \"\" for caracter", "in cadena: print(caracter) cadena2 = \"\" for caracter in cadena: cadena2 += caracter", "print(caracter) cadena2 = \"\" for caracter in cadena: cadena2 += caracter * 2", "len(numeros)): print(numeros[indice]) indice += 1 for numero in numeros: print(numero) for numero in", "cadena: print(caracter) cadena2 = \"\" for caracter in cadena: cadena2 += caracter *", "+= 1 for numero in numeros: print(numero) for numero in numeros: numero *=", "for caracter in cadena: print(caracter) cadena2 = \"\" for caracter in cadena: cadena2", "= 0 for numero in numeros: numeros[indice] *= 10 indice += 1 print", "in cadena: cadena2 += caracter * 2 print (cadena2) for i in range(10):", "*= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 for numero in numeros: numeros[indice]", "numeros: print(numero) for numero in numeros: numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice", "indice = 0 for numero in numeros: numeros[indice] *= 10 indice += 1", "= [1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x): x[indice] *= 10 print(x) cadena =", "print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x): x[indice] *= 10", "numero in enumerate(x): x[indice] *= 10 print(x) cadena = \"Hola amigos\" for caracter", "indice += 1 for numero in numeros: print(numero) for numero in numeros: numero", "*= 10 indice += 1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice, numero", "x[indice] *= 10 print(x) cadena = \"Hola amigos\" for caracter in cadena: print(caracter)", "0 while(indice < len(numeros)): print(numeros[indice]) indice += 1 for numero in numeros: print(numero)", "in numeros: numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 for numero", "[1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x): x[indice] *= 10 print(x) cadena = \"Hola", "numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 while(indice < len(numeros)): print(numeros[indice]) indice += 1", "*= 10 print(x) cadena = \"Hola amigos\" for caracter in cadena: print(caracter) cadena2", "numero in numeros: print(numero) for numero in numeros: numero *= 10 numeros =", "[1,2,3,4,5,6,7,8,9,10] indice = 0 while(indice < len(numeros)): print(numeros[indice]) indice += 1 for numero", "numero in numeros: numeros[indice] *= 10 indice += 1 print (numeros) x =", "numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 for numero in numeros: numeros[indice] *= 10", "print(x) cadena = \"Hola amigos\" for caracter in cadena: print(caracter) cadena2 = \"\"", "numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 for numero in numeros:", "enumerate(x): x[indice] *= 10 print(x) cadena = \"Hola amigos\" for caracter in cadena:", "numeros: numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 for numero in", "for caracter in cadena: cadena2 += caracter * 2 print (cadena2) for i", "caracter in cadena: print(caracter) cadena2 = \"\" for caracter in cadena: cadena2 +=", "10 numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 for numero in numeros: numeros[indice] *=", "(numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x): x[indice] *= 10 print(x)", "= [1,2,3,4,5,6,7,8,9,10] indice = 0 for numero in numeros: numeros[indice] *= 10 indice", "print(numero) for numero in numeros: numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice =", "while(indice < len(numeros)): print(numeros[indice]) indice += 1 for numero in numeros: print(numero) for", "= 0 while(indice < len(numeros)): print(numeros[indice]) indice += 1 for numero in numeros:", "caracter in cadena: cadena2 += caracter * 2 print (cadena2) for i in", "1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x): x[indice] *=", "amigos\" for caracter in cadena: print(caracter) cadena2 = \"\" for caracter in cadena:", "= \"\" for caracter in cadena: cadena2 += caracter * 2 print (cadena2)", "cadena2 = \"\" for caracter in cadena: cadena2 += caracter * 2 print", "< len(numeros)): print(numeros[indice]) indice += 1 for numero in numeros: print(numero) for numero", "for numero in numeros: numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0", "10 print(x) cadena = \"Hola amigos\" for caracter in cadena: print(caracter) cadena2 =", "indice += 1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x):", "in numeros: numeros[indice] *= 10 indice += 1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10]", "x = [1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x): x[indice] *= 10 print(x) cadena", "for indice, numero in enumerate(x): x[indice] *= 10 print(x) cadena = \"Hola amigos\"", "indice, numero in enumerate(x): x[indice] *= 10 print(x) cadena = \"Hola amigos\" for", "numeros: numeros[indice] *= 10 indice += 1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for", "cadena = \"Hola amigos\" for caracter in cadena: print(caracter) cadena2 = \"\" for", "cadena: cadena2 += caracter * 2 print (cadena2) for i in range(10): print(i)", "in numeros: print(numero) for numero in numeros: numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10]", "for numero in numeros: numeros[indice] *= 10 indice += 1 print (numeros) x", "print(numeros[indice]) indice += 1 for numero in numeros: print(numero) for numero in numeros:", "numero in numeros: numero *= 10 numeros = [1,2,3,4,5,6,7,8,9,10] indice = 0 for", "1 for numero in numeros: print(numero) for numero in numeros: numero *= 10", "[1,2,3,4,5,6,7,8,9,10] indice = 0 for numero in numeros: numeros[indice] *= 10 indice +=", "in enumerate(x): x[indice] *= 10 print(x) cadena = \"Hola amigos\" for caracter in", "\"Hola amigos\" for caracter in cadena: print(caracter) cadena2 = \"\" for caracter in", "numeros[indice] *= 10 indice += 1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice,", "indice = 0 while(indice < len(numeros)): print(numeros[indice]) indice += 1 for numero in", "= [1,2,3,4,5,6,7,8,9,10] indice = 0 while(indice < len(numeros)): print(numeros[indice]) indice += 1 for", "\"\" for caracter in cadena: cadena2 += caracter * 2 print (cadena2) for", "for numero in numeros: print(numero) for numero in numeros: numero *= 10 numeros", "0 for numero in numeros: numeros[indice] *= 10 indice += 1 print (numeros)", "+= 1 print (numeros) x = [1,2,3,4,5,6,7,8,9,10] for indice, numero in enumerate(x): x[indice]" ]
[ "logging import py_cui from modules.tui import TUI from modules.util import configManager def runSchedule(station):", "= threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception", "from modules.util import configManager def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow):", "station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main(): parser = argparse.ArgumentParser(", "configManager def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start()", "Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True)", "frame.station.signOff() sys.exit(0) # TODO turn this into a package at first major released", "started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main():", "if frame: frame.station.signOff() sys.exit(0) # TODO turn this into a package at first", "= parser.parse_args() if args.headless: headless.routine() else: try: frame = None mainWindow = py_cui.PyCUI(4,", "def main(): parser = argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without", "else: try: frame = None mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{}", "major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with CLI: https://medium.com/nerd-for-tech/how-to-build-and-distribute-a-cli-tool-with-python-537ae41d9d78 if __name__ == \"__main__\":", "TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI',", "= None mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name))", "threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as", "except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as e: logging.critical(\"TUI: \"+str(e)) finally: if frame:", "def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main(): parser = argparse.ArgumentParser( description='{station}", "a package at first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with CLI: https://medium.com/nerd-for-tech/how-to-build-and-distribute-a-cli-tool-with-python-537ae41d9d78", "import configManager def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\")", "except Exception as e: logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0) # TODO", "name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as e:", "description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI') args = parser.parse_args() if", "main(): parser = argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI')", "args = parser.parse_args() if args.headless: headless.routine() else: try: frame = None mainWindow =", "name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join()", "TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except", "parser.add_argument('--headless', action='store_true', help='run without TUI') args = parser.parse_args() if args.headless: headless.routine() else: try:", "from modules.tui import TUI from modules.util import configManager def runSchedule(station): logging.info(\"Scheduler started.\") while", "at first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with CLI: https://medium.com/nerd-for-tech/how-to-build-and-distribute-a-cli-tool-with-python-537ae41d9d78 if __name__", "parser.parse_args() if args.headless: headless.routine() else: try: frame = None mainWindow = py_cui.PyCUI(4, 3,", "Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI') args = parser.parse_args() if args.headless:", "args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt:", "argparse import threading import sys import logging import py_cui from modules.tui import TUI", "exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread = threading.Thread(", "import logging import py_cui from modules.tui import TUI from modules.util import configManager def", "mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule,", "turn this into a package at first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ #", "daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\")", "mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread = threading.Thread( name='Daemon',", "first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with CLI: https://medium.com/nerd-for-tech/how-to-build-and-distribute-a-cli-tool-with-python-537ae41d9d78 if __name__ ==", "TODO turn this into a package at first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/", "TUI from modules.util import configManager def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def", "logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def", "mainWindow.start() logging.info(\"TUI exited.\") def main(): parser = argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless',", "import py_cui from modules.tui import TUI from modules.util import configManager def runSchedule(station): logging.info(\"Scheduler", "System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread", "frame: frame.station.signOff() sys.exit(0) # TODO turn this into a package at first major", "try: frame = None mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast", "Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI') args = parser.parse_args() if args.headless: headless.routine()", "help='run without TUI') args = parser.parse_args() if args.headless: headless.routine() else: try: frame =", "action='store_true', help='run without TUI') args = parser.parse_args() if args.headless: headless.routine() else: try: frame", "Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start()", "None mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame", "args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as e: logging.critical(\"TUI: \"+str(e))", "py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread", "runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main(): parser = argparse.ArgumentParser( description='{station} Broadcast", "\"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0) # TODO turn this into a package", "logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0) # TODO turn this into a", "headless.routine() else: try: frame = None mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title(", "= argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI') args =", "frame = None mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation", "modules.tui import TUI from modules.util import configManager def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init():", "import sys import logging import py_cui from modules.tui import TUI from modules.util import", "def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI", "target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as e: logging.critical(\"TUI:", "released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with CLI: https://medium.com/nerd-for-tech/how-to-build-and-distribute-a-cli-tool-with-python-537ae41d9d78 if __name__ == \"__main__\": main()", "while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main(): parser", "'{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,),", "args.headless: headless.routine() else: try: frame = None mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1)", "runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\")", "if args.headless: headless.routine() else: try: frame = None mainWindow = py_cui.PyCUI(4, 3, exit_key=1)", "argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI') args = parser.parse_args()", "3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow) daemonThread =", "modules.util import configManager def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI", "package at first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with CLI: https://medium.com/nerd-for-tech/how-to-build-and-distribute-a-cli-tool-with-python-537ae41d9d78 if", "as e: logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0) # TODO turn this", "detected.\") except Exception as e: logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0) #", "TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as e: logging.critical(\"TUI: \"+str(e)) finally:", "target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except", "logging.info(\"TUI exited.\") def main(): parser = argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true',", "TUI') args = parser.parse_args() if args.headless: headless.routine() else: try: frame = None mainWindow", "daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI,", "= py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame = TUI(mainWindow)", "py_cui from modules.tui import TUI from modules.util import configManager def runSchedule(station): logging.info(\"Scheduler started.\")", "headless import argparse import threading import sys import logging import py_cui from modules.tui", "sys import logging import py_cui from modules.tui import TUI from modules.util import configManager", "daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start() TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt", "logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main(): parser = argparse.ArgumentParser( description='{station} Broadcast Automation", "import argparse import threading import sys import logging import py_cui from modules.tui import", "KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as e: logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff()", "e: logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0) # TODO turn this into", "mainWindow = py_cui.PyCUI(4, 3, exit_key=1) mainWindow.set_refresh_timeout(1) mainWindow.set_title( '{} Broadcast Automation System'.format(configManager.cfg.station.name)) frame =", "parser = argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI') args", "= threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,))", "threading import sys import logging import py_cui from modules.tui import TUI from modules.util", "TUIThread.join() except KeyboardInterrupt: logging.warning(\"KeyboardInterrupt detected.\") except Exception as e: logging.critical(\"TUI: \"+str(e)) finally: if", "<gh_stars>1-10 import headless import argparse import threading import sys import logging import py_cui", "exited.\") def main(): parser = argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run", "this into a package at first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with", "without TUI') args = parser.parse_args() if args.headless: headless.routine() else: try: frame = None", "station.mixer.get_init(): station.scheduleRun() def runTUI(mainWindow): logging.info(\"TUI starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main(): parser =", "Exception as e: logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0) # TODO turn", "sys.exit(0) # TODO turn this into a package at first major released #", "frame = TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread =", "import threading import sys import logging import py_cui from modules.tui import TUI from", "threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread( name='TUI', target=runTUI, args=(mainWindow,)) TUIThread.start()", "finally: if frame: frame.station.signOff() sys.exit(0) # TODO turn this into a package at", "# TODO turn this into a package at first major released # packaging:", "import headless import argparse import threading import sys import logging import py_cui from", "System'.format(station=configManager.cfg.station.name)) parser.add_argument('--headless', action='store_true', help='run without TUI') args = parser.parse_args() if args.headless: headless.routine() else:", "into a package at first major released # packaging: https://uoftcoders.github.io/studyGroup/lessons/python/packages/lesson/ # with CLI:", "logging.warning(\"KeyboardInterrupt detected.\") except Exception as e: logging.critical(\"TUI: \"+str(e)) finally: if frame: frame.station.signOff() sys.exit(0)", "import TUI from modules.util import configManager def runSchedule(station): logging.info(\"Scheduler started.\") while station.mixer.get_init(): station.scheduleRun()", "starting...\") mainWindow.start() logging.info(\"TUI exited.\") def main(): parser = argparse.ArgumentParser( description='{station} Broadcast Automation System'.format(station=configManager.cfg.station.name))", "= TUI(mainWindow) daemonThread = threading.Thread( name='Daemon', target=runSchedule, args=(frame.station,), daemon=True) daemonThread.start() TUIThread = threading.Thread(" ]
[ "elif '-' in die[1]: split = die[1].index('-') plus = -int(die[1][split + 1:]) die[1]", "matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan,", "return id # Warning: this does no locking, should only be used internally", ">= min and len(line)-1 <= max: return True elif not max and len(line)-1", "arguments: {baz} out.append(ARG_UNL) else: # Normal argument: foo out.append(ARG_STD) return out def getargnums(argtypes):", "trusted by oonbotti2 and identified with NickServ', '#devoice': 'remove your or nick\\'s voice", "kickreason)) else: irc.msg(reply, zwsp + 'Usage #kick nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply,", "and line[-1] == '\\n': line = line[:-1] if len(line) > 0: chan, account", "'#invite'): irc.msg(chan, zwsp + '%s: #invite has been removed. Use manual invite' %", "= {'#echo': 'text', '#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick',", "only be used internally # The index returned cannot be guaranteed valid if", "msgs[receiver].append((sender, origin, msg)) f.close() def savemessages(): global msgs, msgslock with msgslock: f=open('msgs.txt', 'w')", "max = None if number of arguments is unlimited for argtype in argtypes:", "nick, '-o', args.split(' ')) elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan,", "if cmd in usage: if message: return 'Usage: %s %s' % (cmd, usage[cmd])", "this in private messages for more transparent bot usage if matchcmd(cmdline, '#chan') and", "zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message", "'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline,", "godslock = threading.Lock() # receiver: [(sender1, origin1, message1), (sender2, origin2, message2), ..., (senderN,", "in rolls])) else: text = str(result) if plus > 0: text = '%i", "0: chan, account = line.split() if chan not in gods: gods[chan] = []", "else: return account def isauthorized(irc, chan, nick): account = getaccount(irc, nick) if account:", "kicknick, kickreason = parsecmd(cmdline, 'nick {reason}') if kicknick.lower() == irc.nick: irc.send('KICK %s %s", "message to nick', '#trusted?': 'tell you if nick or yourself is trusted by", "if plus > 0: text = '%i (%s + %i)' % (result +", "string') if len(out) == 1: return out[0] else: return out def parse((line, irc)):", "elif plus < 0: text = '%i (%s - %i)' % (result +", "cmd) matched if the command cmd is used, matchcmd(line, cmd, args) checks whether", "of /WHOIS list. whoisnick = line[3] for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) ==", "== '': # '' Signifies failure return None else: return account def isauthorized(irc,", "chan, nick): account = getaccount(irc, nick) if account: return istrusted(chan, account) else: irc.msg(nick,", "in case you are trusted by oonbotti2 and identified with NickServ', '#quiet': 'give", "zwsp + 'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick", "in msgs: for sender, origin, msg in msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s>", "def loadmessages(): global msgs, msgslock with msgslock: msgs = {} f = open('msgs.txt',", "cmdline = newcmdline else: irc.msg(chan, zwsp + 'Usage #chan channel command') if matchcmd(cmdline,", "Remove the command if len(line) == 0: raise ArgsfmtError('No command given') line =", "'I shall.' elif cmd in helptext: if helptext[cmd]: return '%s %s %s' %", "= '%i (%s - %i)' % (result + plus, text, -plus) irc.msg(reply, zwsp", "be assigned '' argtypes = parseargsfmt(args) if len(argtypes) >= 1 and ARG_UNL in", "transparent bot usage if matchcmd(cmdline, '#chan') and chan != nick: if matchcmd(cmdline, '#chan',", "min += 1 if max != None: # Don't try to increment if", "matchcmd(line, cmd) matched if the command cmd is used, matchcmd(line, cmd, args) checks", "text back', '#op': 'give nick or yourself op rights in case you are", "'#ls-trusted': 'list nicks that are trusted. use only in a query', '#chan': 'Runs", "!= cmd: return False if not args: return True min, max = getargnums(parseargsfmt(args))", "parsecmd(cmdline, '[command]') helptext = help(command) if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and", "(time-1, fn)), self.timedjobs) torun = map((lambda (time, fn): fn), filter((lambda (time, fn): time<=0),", "= None while account == None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account", "'': line = account elif len(line + ', ' + account) <= 255:", "' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if chan in trusted and account in", "accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: del accountcheck[index] accountchecklock.release()", "[ARG_STD, ARG_OPT, ARG_UNL] args = args.split(' ') out = [] for arg in", "'): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' + line[3]) elif line[1] == '482':", "helptext[cmd]: return '%s %s %s' % (cmd, usage(cmd, False), helptext[cmd]) else: return '%s", "ids def getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda", "+ '%s: #invite has been removed. Use manual invite' % nick) elif matchcmd(cmdline,", "bot usage if matchcmd(cmdline, '#chan') and chan != nick: if matchcmd(cmdline, '#chan', 'channel", "'': # '' Signifies failure return None else: return account def isauthorized(irc, chan,", "trustedlock trustedlock.acquire() if chan not in trusted: trusted[chan] = [] if account not", "if isauthorized(irc, chan, nick): account = getaccount(irc, trustnick) if account: addtrusted(chan, account) savetrusted()", "= None if number of arguments is unlimited for argtype in argtypes: if", "!= '': lines.append(line) for line in lines: irc.msg(nick, zwsp + '%s: %s' %", "helptext: if helptext[cmd]: return '%s %s %s' % (cmd, usage(cmd, False), helptext[cmd]) else:", "'nick message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel command',", "or nick\\'s voice in case you are trusted by oonbotti2 and identified with", "plus > 0: text = '%i (%s + %i)' % (result + plus,", "if istrusted(chan, untrustnick): account = untrustnick if account: godslock.acquire() if chan not in", "'#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick [reason]', '#src': '',", "Optional (0-1) argument: [bar] out.append(ARG_OPT) elif len(arg) >= 2 and arg[0] == '{'", "matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account =", "newcmdline = newcmdline.split(' ') if isauthorized(irc, newchan, nick): chan = newchan cmdline =", "istrusted(chan, account): trustedlock.acquire() if chan in trusted and account in trusted[chan]: trustedlock.release() return", "lambda (id, nick, account): id filterbynick = lambda (id, cknick, account): cknick ==", "trusted', '#help': 'give short info of command or list commands'} if cmd=='': return", "account == None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account == '': #", "= [] for type in argtypes: if type == ARG_STD or type ==", "of arguments: {baz} out.append(ARG_UNL) else: # Normal argument: foo out.append(ARG_STD) return out def", "%s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp + 'Usage #kick nick reason') elif", "else: irc.msg(nick, zwsp + 'Identify with NickServ') class ArgsfmtError(Exception): def __init__(self, msg): self.msg", "(time, fn): (time-1, fn)), self.timedjobs) torun = map((lambda (time, fn): fn), filter((lambda (time,", "text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}')", "case you are trusted by oonbotti2 and identified with NickServ', '#devoice': 'remove your", "be used internally # The index returned cannot be guaranteed valid if lock", "+ %i)' % (result + plus, text, plus) elif plus < 0: text", "cmdline.remove('') # #chan: channel override prefix # Don't allow this in private messages", "%s %s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp + 'Usage #kick nick reason')", "parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o', args.split(' ')) elif matchcmd(cmdline, '#deop'): args =", "+= 1 elif argtype == ARG_OPT: if max != None: # Don't try", "args) checks whether the args match too if len(line) == 0: return False", "than one optional or unlimited argument per argument string raise ArgsfmtError('Ambiguous argument format')", "'/lg': loadgods() elif cmdline[0] == '/lm': loadmessages() elif cmdline[0] == '/sm': savemessages() def", "voice in case you are trusted by oonbotti2 and identified with NickServ', '#quiet':", "= line[:-1] if len(line) > 0: chan, account = line.split() if chan not", "ARG_UNL in argtypes[:-1]: # Disallow non-final unlimited arguments raise ArgsfmtError('Non-final unlimited argument') if", "%s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if chan in", "self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run =", "command cmd is used, matchcmd(line, cmd, args) checks whether the args match too", "'#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel command', '#help': '[command]'}", "accountcheckid += 1 accountchecklock.release() return id # Warning: this does no locking, should", "rights', '#voice': 'give nick or yourself voice in case you are trusted by", "yourself is trusted by oonbotti2', '#trust': 'add nick to trusted list', '#untrust': 'remove", "and line[3][:len(zwsp)+1] == ':'+zwsp: # If line begins with ZWSP return if line[1]=='PRIVMSG'", "'' is used because None is already reserved elif line[1] == 'INVITE' and", "= threading.Lock() self.cronctrl = [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn):", "irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick,", "raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type: type == ARG_OPT or type ==", "'[command]') helptext = help(command) if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan", "1: irc.msg(reply, zwsp + 'This die is not available in your space-time region.')", "logged in as whoisnick = line[3] account = line[4] for id in getaccountcheckidbynick(whoisnick):", "def addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan not in trusted: trusted[chan]", "chan with msgslock: if msgnick not in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin,", "'#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick':", "chmode(irc, chan, nick, '-v', args.split(' ')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick", "use! def getindexbyaccountcheckid(id): global accountcheck for index in range(len(accountcheck)): ckid, cknick, ckaccount =", "not None: ckid, cknick, value = accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id): global", "{} f=open('gods.txt', 'r') for line in f: while len(line) > 0 and line[-1]", "%s' % (cmd, usage(cmd, False), helptext[cmd]) else: return '%s %s' % (cmd, usage(cmd,", "zwsp + '%s: %s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times", "len(line) > min: # Unlimited argument given out = [] for type in", "if line[2][0] == '#' else nick zwsp = '\\xe2\\x80\\x8b' if nick in blacklist:", "and chan != nick: if matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline = parsecmd(cmdline,", "fn in torun: fn() def loadmessages(): global msgs, msgslock with msgslock: msgs =", "accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick, None)) accountcheckid += 1 accountchecklock.release() return", "not to get truncated line += ', ' + account else: lines.append(line) line", "'give +q to nick!*@*', '#dequiet': 'remove +q from nick!*@*', '#kick': 'kicks nick with", "for %s' % untrustnick) else: irc.msg(reply, zwsp + 'Usage #untrust nick') elif matchcmd(cmdline,", "me to do?') elif times > 128: irc.msg(reply, zwsp + 'Sorry, I don\\'t", "(cmd, usage[cmd]) else: return usage[cmd] else: return None def help(cmd): helptext = {'#echo':", "in trusted: trusted[chan] = [] if account not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def", "msg)) f.close() loadmessages() def addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan not", "'QUIT': run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn): (time-1,", "parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL] args = args.split(' ') out", "info of command or list commands'} if cmd=='': return '#echo #op #deop #voice", "import time concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm'] blacklist = [] doctor =", "') out = [] for arg in args: if len(arg) >= 2 and", "the args match too if len(line) == 0: return False if line[0] !=", "not in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply, zwsp", "type: type == ARG_OPT or type == ARG_UNL), argtypes)) > 1: # Disallow", "found (e.g. it has been deleted, use the parameter as-is if not account:", "savemessages() def usage(cmd, message = True): usage = {'#echo': 'text', '#op': '[nick]', '#deop':", "account for %s' % untrustnick) else: irc.msg(reply, zwsp + 'Usage #untrust nick') elif", "manual invite' % nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'): command =", "> 1: text = '%s (%s)' % (str(result), ', '.join([str(i) for i in", "origin, msg)) f.close() loadmessages() def addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan", "if lock is released between call to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global", "(result + plus, text, -plus) irc.msg(reply, zwsp + text) elif line[1] == '330':", "elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc,", "a query', '#chan': 'Runs the command as if it was sent on the", "for index in range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index] if ckid == id:", "{command}'): newchan, newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split(' ') if isauthorized(irc,", "max = 0 # max = None if number of arguments is unlimited", "accountcheckid = 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs =", "def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick, None))", "accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: ckid, cknick,", "gods = {} f=open('gods.txt', 'r') for line in f: while len(line) > 0", "isauthorized(irc, chan, nick): account = getaccount(irc, nick) if account: return istrusted(chan, account) else:", "return out def getargnums(argtypes): min = 0 max = 0 # max =", "msgs: for sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg))", "if max != None: # Don't try to increment if max is unlimited", "used because None is already reserved elif line[1] == 'INVITE' and line[2] ==", "account = line.split() if chan not in gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan,", "parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o', args.split(' ')) elif matchcmd(cmdline, '#voice'): args =", "== ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] else: out.append('') elif", "e.g. parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL] args = args.split(' ')", "torun: fn() def loadmessages(): global msgs, msgslock with msgslock: msgs = {} f", "# Mark as failed, '' is used because None is already reserved elif", "usage[cmd]) else: return usage[cmd] else: return None def help(cmd): helptext = {'#echo': '#echo", "question[:2] != ':D': # Mandated by #osdev-offtopic law irc.msg(reply, zwsp + '%s: %s'", "% (result + plus, text, -plus) irc.msg(reply, zwsp + text) elif line[1] ==", "be high self.cronctrllock.acquire() for cmd in self.cronctrl: if cmd == 'QUIT': run =", "rolls) if times > 1: text = '%s (%s)' % (str(result), ', '.join([str(i)", "ARG_OPT: out.append(line[0]) line = line[1:] elif type == ARG_UNL: out.append(' '.join(line)) line =", "with NickServ', '#quiet': 'give +q to nick!*@*', '#dequiet': 'remove +q from nick!*@*', '#kick':", "elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o', args.split(' '))", "oonbotti2\\'s git repo', '#msg': 'send a message to nick', '#trusted?': 'tell you if", "not trusted' % trustnick) else: irc.msg(reply, zwsp + 'Failed to get account for", "self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run = True while", "%s' % (chan, set_unset+mode*4, ' '.join(nicks))) nicks = [] if nicks: irc.send('MODE %s", "if nick in blacklist: return elif len(line) >= 4 and len(line[3]) >= len(zwsp)+1", "nick to trusted list', '#untrust': 'remove nick from trusted list', '#ls-trusted': 'list nicks", "(chan, account)) f.close trustedlock.release() def init(): global cron cron = Cron() cron.start() loadtrusted()", "account not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire()", "trusted list', '#untrust': 'remove nick from trusted list', '#ls-trusted': 'list nicks that are", "elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]') if trustnick", "self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn): (time-1, fn)), self.timedjobs) torun = map((lambda (time,", "# Unlimited (0-) number of arguments: {baz} out.append(ARG_UNL) else: # Normal argument: foo", "[] msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply, zwsp + 'Usage: #msg nick message')", "account) savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for %s' %", "guaranteed valid if lock is released between call to getindexbyaccountcheckid and use! def", "unlimited argument per argument string raise ArgsfmtError('Ambiguous argument format') # Remove the command", "len(line) >= 4 and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: # If", "'{args}') chmode(irc, chan, nick, '-v', args.split(' ')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick',", "link to oonbotti2\\'s git repo', '#msg': 'send a message to nick', '#trusted?': 'tell", "irc.msg(reply, zwsp + 'Failed to get account for %s' % trustnick) else: irc.msg(reply,", "(id, nick, value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id)", "newchan, nick): chan = newchan cmdline = newcmdline else: irc.msg(chan, zwsp + 'Usage", "return False if not args: return True min, max = getargnums(parseargsfmt(args)) if max", "while '' in cmdline: cmdline.remove('') # #chan: channel override prefix # Don't allow", "if helptext[cmd]: return '%s %s %s' % (cmd, usage(cmd, False), helptext[cmd]) else: return", "time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release() for fn in", "elif line[1] == 'INVITE' and line[2] == irc.nick and line[3][1:] in irc.chan.split(' '):", "line = line[:-1] if len(line.split('\\t')) == 4: receiver, sender, origin, msg = line.split('\\t')", "eliza.eliza() # channel: [user1, user2, ..., userN] trusted = {} trustedlock = threading.Lock()", "..., userN] trusted = {} trustedlock = threading.Lock() gods = {} godslock =", "= '\\xe2\\x80\\x8b' if nick in blacklist: return elif len(line) >= 4 and len(line[3])", "line += ', ' + account else: lines.append(line) line = account if line", "'channel {command}') newcmdline = newcmdline.split(' ') if isauthorized(irc, newchan, nick): chan = newchan", "doctor = eliza.eliza() # channel: [user1, user2, ..., userN] trusted = {} trustedlock", "command = parsecmd(cmdline, '[command]') helptext = help(command) if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline,", "= getargnums(argtypes) if len(line) == min: # Only standard arguments given out =", "die == '%': if times != 1: irc.msg(reply, zwsp + 'Not supported') else:", "parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v', args.split(' ')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline,", "in trusted: lines = [] line = '' for account in trusted[chan]: if", "type in argtypes: if type == ARG_STD: out.append(line[0]) line = line[1:] else: out.append('')", "+q to nick!*@*', '#dequiet': 'remove +q from nick!*@*', '#kick': 'kicks nick with specified", "a value will be assigned '' argtypes = parseargsfmt(args) if len(argtypes) >= 1", "case you are trusted by oonbotti2 and identified with NickServ', '#quiet': 'give +q", "id = accountcheckid accountcheck.append((id, nick, None)) accountcheckid += 1 accountchecklock.release() return id #", "command as if it was sent on the specified channel. Requires user to", "account): id filterbynick = lambda (id, cknick, account): cknick == nick ids =", "getindexbyaccountcheckid(id) if index is not None: ckid, nick, ckvalue = accountcheck[index] accountcheck[index] =", "'%s is not trusted' % trustnick) else: irc.msg(reply, zwsp + 'Failed to get", "matchcmd and parsecmd # e.g. parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL]", "cmd is used, matchcmd(line, cmd, args) checks whether the args match too if", "= Cron() cron.start() loadtrusted() loadgods() def chmode(irc, chan, nick, mode, args): set_unset =", "global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: del", "nick in msgs: for sender, origin, msg in msgs.pop(nick): irc.msg(nick, zwsp + '%s", "not in gods or account not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else:", "nick) if account: return istrusted(chan, account) else: irc.msg(nick, zwsp + 'Identify with NickServ')", "= 2 def parseargsfmt(args): # parses the argument format used by matchcmd and", "for nick in args: nicks.append(nick) if len(nicks) == 4: irc.send('MODE %s %s %s'", "else: return False def parsecmd(line, args): # Returns a tuple containing the arguments.", "chmode(irc, chan, nick, mode, args): set_unset = mode[0] mode = mode[1:] if isauthorized(irc,", "False with msgslock: if (line[1] == 'PRIVMSG' or line[1] == 'JOIN') and nick", "= parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q', [arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'):", "didn't get a value will be assigned '' argtypes = parseargsfmt(args) if len(argtypes)", "helptext = {'#echo': '#echo text back', '#op': 'give nick or yourself op rights", "== '\\n': line = line[:-1] if len(line.split('\\t')) == 4: receiver, sender, origin, msg", "In a channel origin = chan with msgslock: if msgnick not in msgs:", "in helptext: if helptext[cmd]: return '%s %s %s' % (cmd, usage(cmd, False), helptext[cmd])", "type == ARG_OPT: out.append(line[0]) line = line[1:] else: out.append('') elif not max and", "available in your space-time region.') elif times < 1: irc.msg(reply, zwsp + 'What", "op rights', '#voice': 'give nick or yourself voice in case you are trusted", "argument: foo out.append(ARG_STD) return out def getargnums(argtypes): min = 0 max = 0", "run = True while run: time.sleep(1) # Accuracy doesn't need to be high", "== '': trustnick = nick account = getaccount(irc, trustnick) if account: if istrusted(chan,", "[arg + '!*@*']) elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick,", "query: origin = \"[query]\" else: # In a channel origin = chan with", "elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick", "= [] for nick in args: nicks.append(nick) if len(nicks) == 4: irc.send('MODE %s", "receiver not in msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin, msg)) f.close() def savemessages():", "']': # Optional (0-1) argument: [bar] out.append(ARG_OPT) elif len(arg) >= 2 and arg[0]", "== nick ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids def getaccount(irc, nick):", "'#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q', [arg + '!*@*']) elif", "for account in trusted[chan]: f.write('%s %s\\n' % (chan, account)) f.close trustedlock.release() def init():", "unlimited argument') if len(filter((lambda type: type == ARG_OPT or type == ARG_UNL), argtypes))", "+ y), rolls) if times > 1: text = '%s (%s)' % (str(result),", "return 'I shall.' elif cmd in helptext: if helptext[cmd]: return '%s %s %s'", "fn): (time-1, fn)), self.timedjobs) torun = map((lambda (time, fn): fn), filter((lambda (time, fn):", "= line[1:] else: out.append('') elif max and len(line) == max: # Optional argument", "die[1]: split = die[1].index('-') plus = -int(die[1][split + 1:]) die[1] = die[1][:split] else:", "msgs_changed = True if msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT')", "= die[1][:split] else: plus = 0 die = '%' if die[1] == '%'", "line = line[1:] else: out.append('') elif not max and len(line) > min: #", "def loadgods(): global gods, godslock godslock.acquire() gods = {} f=open('gods.txt', 'r') for line", "standard arguments given out = [] for type in argtypes: if type ==", "'{args}') chmode(irc, chan, nick, '+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline,", "non-final unlimited arguments raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type: type == ARG_OPT", "irc.msg(nick, zwsp + '%s: %s' % (chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan,", "reason', '#src': 'paste a link to oonbotti2\\'s git repo', '#msg': 'send a message", "in gods or account not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply,", "msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply, zwsp + 'Usage:", "True if msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0]", "it has been deleted, use the parameter as-is if not account: if istrusted(chan,", "question = parsecmd(cmdline, '{question}') if len(question) < 2 or question[:2] != ':D': #", "newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split(' ') if isauthorized(irc, newchan, nick):", "global blacklist global msgs, msgslock global trusted, trustedlock, gods, godslock global doctor, die_expr", "used internally # The index returned cannot be guaranteed valid if lock is", "global accountcheck, accountchecklock accountchecklock.acquire() getid = lambda (id, nick, account): id filterbynick =", "prefix # Don't allow this in private messages for more transparent bot usage", "elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q', [arg +", "'#quiet': 'give +q to nick!*@*', '#dequiet': 'remove +q from nick!*@*', '#kick': 'kicks nick", "self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run = True while run: time.sleep(1) # Accuracy", "line[1] == 'JOIN') and nick in msgs: for sender, origin, msg in msgs.pop(nick):", "cmdline[0] == '/lg': loadgods() elif cmdline[0] == '/lm': loadmessages() elif cmdline[0] == '/sm':", "(chan, nick)) else: if isauthorized(irc, chan, nick): irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason))", "' + msg ARG_STD = 0 ARG_OPT = 1 ARG_UNL = 2 def", "and arg[-1] == '}': # Unlimited (0-) number of arguments: {baz} out.append(ARG_UNL) else:", "# If account can't be found (e.g. it has been deleted, use the", "not None: ckid, nick, ckvalue = accountcheck[index] accountcheck[index] = (id, nick, value) accountchecklock.release()", "if line != '': lines.append(line) for line in lines: irc.msg(nick, zwsp + '%s:", "argtypes: if type == ARG_STD: out.append(line[0]) line = line[1:] else: out.append('') elif max", "with NickServ') class ArgsfmtError(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return", "= 0 # max = None if number of arguments is unlimited for", "'#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc,", "nick, mode, args): set_unset = mode[0] mode = mode[1:] if isauthorized(irc, chan, nick):", "# parses the argument format used by matchcmd and parsecmd # e.g. parseargsfmt(\"foo", "blacklist = [] doctor = eliza.eliza() # channel: [user1, user2, ..., userN] trusted", "%s' % (cmd, usage[cmd]) else: return usage[cmd] else: return None def help(cmd): helptext", "2 or question[:2] != ':D': # Mandated by #osdev-offtopic law irc.msg(reply, zwsp +", "0: text = '%i (%s - %i)' % (result + plus, text, -plus)", "', ' + account else: lines.append(line) line = account if line != '':", "'#kick', 'nick {reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick {reason}') if kicknick.lower() == irc.nick:", "'#chan': 'channel command', '#help': '[command]'} if cmd in usage: if message: return 'Usage:", "trustedlock.acquire() if chan in trusted and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted():", "max is unlimited max += 1 elif argtype == ARG_OPT: if max !=", "= [line[3][1:]] + line[4:] while '' in cmdline: cmdline.remove('') # #chan: channel override", "with specified reason', '#src': 'paste a link to oonbotti2\\'s git repo', '#msg': 'send", "from nick!*@*', '#kick': 'kicks nick with specified reason', '#src': 'paste a link to", "if account == '': # '' Signifies failure return None else: return account", "type == ARG_OPT: out.append(line[0]) line = line[1:] elif type == ARG_UNL: out.append(' '.join(line))", "cmd in helptext: if helptext[cmd]: return '%s %s %s' % (cmd, usage(cmd, False),", "f.close() loadmessages() def addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan not in", "istrusted(chan, untrustnick): account = untrustnick if account: godslock.acquire() if chan not in gods", "2 and arg[0] == '[' and arg[-1] == ']': # Optional (0-1) argument:", "nick): id = initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, '')))", "yourself voice in case you are trusted by oonbotti2 and identified with NickServ',", "ARG_STD: out.append(line[0]) line = line[1:] else: out.append('') elif max and len(line) == max:", "'!*@*']) elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q', [arg", "#msg nick message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline,", "cmd=='me': return 'I shall.' elif cmd in helptext: if helptext[cmd]: return '%s %s", "elif times < 1: irc.msg(reply, zwsp + 'What exactly do you want me", "line[1] == '330': # WHOIS: is logged in as whoisnick = line[3] account", "msg def __str__(self): return 'Error with argument format: ' + msg ARG_STD =", "accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: ckid, nick, ckvalue =", "'channel {command}'): newchan, newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split(' ') if", "%s %s' % (cmd, usage(cmd, False), helptext[cmd]) else: return '%s %s' % (cmd,", "out.append(ARG_OPT) elif len(arg) >= 2 and arg[0] == '{' and arg[-1] == '}':", "and line[-1] == '\\n': line = line[:-1] if len(line.split('\\t')) == 4: receiver, sender,", "containing the arguments. An optional argument that didn't get a value will be", "{} godslock = threading.Lock() # receiver: [(sender1, origin1, message1), (sender2, origin2, message2), ...,", "supported') else: irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9))) elif die < 1:", "')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason = parsecmd(cmdline,", "line[1] == '318': # WHOIS: End of /WHOIS list. whoisnick = line[3] for", "for sender, origin, msg in msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s> %s' %", "+ 'Not supported') else: irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9))) elif die", "ARG_UNL: max = None return min, max def matchcmd(line, cmd, args=None): # matchcmd(line,", "return False def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id,", "argument per argument string raise ArgsfmtError('Ambiguous argument format') # Remove the command if", "nick or yourself is trusted by oonbotti2', '#trust': 'add nick to trusted list',", "return '#echo #op #deop #voice #devoice #quiet #dequiet #kick #src #msg #trusted? #trust", "return index return None def setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire() index =", "trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted = {} trustedlock.release()", "') if isauthorized(irc, newchan, nick): chan = newchan cmdline = newcmdline else: irc.msg(chan,", "'#op'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o', args.split(' ')) elif matchcmd(cmdline,", "'-' in die[1]: split = die[1].index('-') plus = -int(die[1][split + 1:]) die[1] =", "In a query: origin = \"[query]\" else: # In a channel origin =", "ARG_STD = 0 ARG_OPT = 1 ARG_UNL = 2 def parseargsfmt(args): # parses", "global trusted, trustedlock trustedlock.acquire() if chan not in trusted: trusted[chan] = [] if", "text, -plus) irc.msg(reply, zwsp + text) elif line[1] == '330': # WHOIS: is", "matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v', args.split(' ')) elif", "account: if istrusted(chan, account): irc.msg(reply, zwsp + '%s is trusted' % trustnick) else:", "elif cmdline[0] == '/st': savetrusted() elif cmdline[0] == '/lg': loadgods() elif cmdline[0] ==", "'channel command', '#help': '[command]'} if cmd in usage: if message: return 'Usage: %s", "trustedlock.release() f=open('trusted.txt', 'r') for line in f: while len(line) > 0 and line[-1]", "= line[:-1] if len(line) > 0: chan, account = line.split() addtrusted(chan, account) f.close()", "if trustnick == '': trustnick = nick account = getaccount(irc, trustnick) if account:", "loadtrusted() loadgods() def chmode(irc, chan, nick, mode, args): set_unset = mode[0] mode =", "zwsp + 'Failed to get account for %s' % untrustnick) else: irc.msg(reply, zwsp", "line[4:] while '' in cmdline: cmdline.remove('') # #chan: channel override prefix # Don't", "elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q', [arg +", "{'#echo': 'text', '#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet':", "if max and len(line)-1 >= min and len(line)-1 <= max: return True elif", "nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and", "you are trusted by oonbotti2 and identified with NickServ', '#quiet': 'give +q to", "False if not args: return True min, max = getargnums(parseargsfmt(args)) if max and", "message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]') if", "args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o', args.split(' ')) elif matchcmd(cmdline, '#voice'):", "0 die = '%' if die[1] == '%' else int(die[1]) if die ==", "to be high self.cronctrllock.acquire() for cmd in self.cronctrl: if cmd == 'QUIT': run", "account = line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] == '318':", "#trusted? #trust #untrust #ls-trusted #chan #help' elif cmd=='me': return 'I shall.' elif cmd", "trusted. use only in a query', '#chan': 'Runs the command as if it", "checks whether the args match too if len(line) == 0: return False if", "trusted[chan] = [] if account not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account):", "= line[1:] elif type == ARG_UNL: out.append(' '.join(line)) line = [] else: raise", "else: rolls = [random.randint(1, die) for i in xrange(times)] result = reduce((lambda x,", "def matchcmd(line, cmd, args=None): # matchcmd(line, cmd) matched if the command cmd is", "msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply, zwsp + 'Usage: #msg nick message') elif", "del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid = lambda (id,", "account: addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for", "parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q', [arg + '!*@*']) elif matchcmd(cmdline, '#devoice'): args", "parses the argument format used by matchcmd and parsecmd # e.g. parseargsfmt(\"foo [bar]", "arguments. An optional argument that didn't get a value will be assigned ''", "# matchcmd(line, cmd) matched if the command cmd is used, matchcmd(line, cmd, args)", "trusted: for account in trusted[chan]: f.write('%s %s\\n' % (chan, account)) f.close trustedlock.release() def", "call to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global accountcheck for index in range(len(accountcheck)):", "elif type == ARG_UNL: out.append(' '.join(line)) line = [] else: raise ArgsfmtError('Number of", "Mark as failed, '' is used because None is already reserved elif line[1]", "is not available in your space-time region.') elif times < 1: irc.msg(reply, zwsp", "#src #msg #trusted? #trust #untrust #ls-trusted #chan #help' elif cmd=='me': return 'I shall.'", "message = True): usage = {'#echo': 'text', '#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]',", "will be assigned '' argtypes = parseargsfmt(args) if len(argtypes) >= 1 and ARG_UNL", "# The index returned cannot be guaranteed valid if lock is released between", "zwsp + 'This die is not available in your space-time region.') elif times", "fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run = True", "set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if chan in trusted and account", "zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9))) elif die < 1: irc.msg(reply, zwsp +", "accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid = lambda (id, nick,", "a link to oonbotti2\\'s git repo', '#msg': 'send a message to nick', '#trusted?':", "True min, max = getargnums(parseargsfmt(args)) if max and len(line)-1 >= min and len(line)-1", "zwsp + 'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in trusted:", "do?') elif times > 128: irc.msg(reply, zwsp + 'Sorry, I don\\'t have that", "max: # Optional argument given out = [] for type in argtypes: if", "elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in trusted: lines = [] line =", "and parsecmd # e.g. parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL] args", "'Not op') msgs_changed = False with msgslock: if (line[1] == 'PRIVMSG' or line[1]", "die[0] else 1 if '+' in die[1]: split = die[1].index('+') plus = int(die[1][split", "WHOIS: End of /WHOIS list. whoisnick = line[3] for id in getaccountcheckidbynick(whoisnick): if", "'give nick or yourself voice in case you are trusted by oonbotti2 and", "#dequiet #kick #src #msg #trusted? #trust #untrust #ls-trusted #chan #help' elif cmd=='me': return", "(cmd, usage(cmd, False), helptext[cmd]) else: return '%s %s' % (cmd, usage(cmd, False)) else:", "no locking, should only be used internally # The index returned cannot be", "0: text = '%i (%s + %i)' % (result + plus, text, plus)", "messageN)] msgs = {} msgslock = threading.Lock() # (ID, nick, account) accountcheck =", "chan, nick, '-v', args.split(' ')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'):", "(receiver, sender, origin, msg)) f.close() loadmessages() def addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire()", "text = str(result) if plus > 0: text = '%i (%s + %i)'", "blacklist: return elif len(line) >= 4 and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] ==", "account) accountcheck = [] accountcheckid = 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread):", "f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg)) f.close() loadmessages() def addtrusted(chan, account): global trusted,", "> 1: # Disallow more than one optional or unlimited argument per argument", "'/lt': loadtrusted() elif cmdline[0] == '/st': savetrusted() elif cmdline[0] == '/lg': loadgods() elif", "ARG_UNL), argtypes)) > 1: # Disallow more than one optional or unlimited argument", "ckvalue = accountcheck[index] accountcheck[index] = (id, nick, value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck,", "args.split(' ') out = [] for arg in args: if len(arg) >= 2", "args: nicks.append(nick) if len(nicks) == 4: irc.send('MODE %s %s %s' % (chan, set_unset+mode*4,", "True while run: time.sleep(1) # Accuracy doesn't need to be high self.cronctrllock.acquire() for", "if receiver not in msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin, msg)) f.close() def", "= msg def __str__(self): return 'Error with argument format: ' + msg ARG_STD", "1 elif argtype == ARG_UNL: max = None return min, max def matchcmd(line,", "with ZWSP return if line[1]=='PRIVMSG' and line[3][:2] != ': ': reply = chan", "i in xrange(times)] result = reduce((lambda x, y: x + y), rolls) if", "def run(self): run = True while run: time.sleep(1) # Accuracy doesn't need to", "zwsp + 'Usage #trust nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick", "irc.msg(reply, zwsp + text) elif line[1] == '330': # WHOIS: is logged in", "= getindexbyaccountcheckid(id) if index is not None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global", "'#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'):", "self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn): (time-1, fn)), self.timedjobs) torun = map((lambda", "= line[:-1] if len(line.split('\\t')) == 4: receiver, sender, origin, msg = line.split('\\t') if", "gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w')", "in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp + 'Failed to get", "%s %s' % (cmd, usage[cmd]) else: return usage[cmd] else: return None def help(cmd):", "arguments given out = [] for type in argtypes: if type == ARG_STD:", "elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline, '[command]') helptext =", "trustedlock.release() def loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt', 'r')", "'!*@*']) elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v', args.split('", "do you want me to do?') elif times > 128: irc.msg(reply, zwsp +", "== 'JOIN') and nick in msgs: for sender, origin, msg in msgs.pop(nick): irc.msg(nick,", "accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick, None)) accountcheckid += 1", "whoisnick = line[3] for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '')", "allow this in private messages for more transparent bot usage if matchcmd(cmdline, '#chan')", "raise ArgsfmtError('Number of given arguments not possible for given format string') if len(out)", "for given format string') if len(out) == 1: return out[0] else: return out", "#quiet #dequiet #kick #src #msg #trusted? #trust #untrust #ls-trusted #chan #help' elif cmd=='me':", "{baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL] args = args.split(' ') out = []", "= line[1:] min, max = getargnums(argtypes) if len(line) == min: # Only standard", "args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v', args.split(' ')) elif matchcmd(cmdline, '#kick'):", "as if it was sent on the specified channel. Requires user to be", "% (cmd, usage(cmd, False), helptext[cmd]) else: return '%s %s' % (cmd, usage(cmd, False))", "max and len(line) > min: # Unlimited argument given out = [] for", "= accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index =", "else: irc.msg(reply, zwsp + 'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan", "untrustnick) # If account can't be found (e.g. it has been deleted, use", "line[1:] else: out.append('') elif not max and len(line) > min: # Unlimited argument", "in private messages for more transparent bot usage if matchcmd(cmdline, '#chan') and chan", "map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids def getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS", "'#chan': 'Runs the command as if it was sent on the specified channel.", "chan, nick, '-o', args.split(' ')) elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}') chmode(irc,", "in usage: if message: return 'Usage: %s %s' % (cmd, usage[cmd]) else: return", "elif len(arg) >= 2 and arg[0] == '{' and arg[-1] == '}': #", "global trusted, trustedlock trustedlock.acquire() if chan in trusted and account in trusted[chan]: trusted[chan].remove(account)", "max def matchcmd(line, cmd, args=None): # matchcmd(line, cmd) matched if the command cmd", "nick, '-v', args.split(' ')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick,", "ckid, cknick, value = accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id): global accountcheck, accountchecklock", "= line[1:] else: out.append('') elif not max and len(line) > min: # Unlimited", "'Usage #trust nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline,", "'Nothing here') elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}') if", "origin = chan with msgslock: if msgnick not in msgs: msgs[msgnick] = []", "% (chan, nick)) else: if isauthorized(irc, chan, nick): irc.send('KICK %s %s :%s'%(chan, kicknick,", "while len(line) > 0 and line[-1] == '\\n': line = line[:-1] if len(line.split('\\t'))", "%s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times = int(die[0]) if", "out.append(' '.join(line)) line = [] else: raise ArgsfmtError('Number of given arguments not possible", "== 1: return out[0] else: return out def parse((line, irc)): global blacklist global", "line.split('\\t') if receiver not in msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin, msg)) f.close()", "= \"[query]\" else: # In a channel origin = chan with msgslock: if", "is not None: ckid, nick, ckvalue = accountcheck[index] accountcheck[index] = (id, nick, value)", "matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]') if trustnick == '': trustnick =", "matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]') if trustnick ==", "== ARG_OPT: out.append(line[0]) line = line[1:] else: out.append('') elif not max and len(line)", "= getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account == '': # '' Signifies failure return", "id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') # Mark as failed,", "= account if line != '': lines.append(line) for line in lines: irc.msg(nick, zwsp", "ArgsfmtError('Ambiguous argument format') # Remove the command if len(line) == 0: raise ArgsfmtError('No", "cmd, args) checks whether the args match too if len(line) == 0: return", "cron = Cron() cron.start() loadtrusted() loadgods() def chmode(irc, chan, nick, mode, args): set_unset", "parsecmd(cmdline, '{question}') if len(question) < 2 or question[:2] != ':D': # Mandated by", "False if line[0] != cmd: return False if not args: return True min,", "if die[0] else 1 if '+' in die[1]: split = die[1].index('+') plus =", "128: irc.msg(reply, zwsp + 'Sorry, I don\\'t have that many. Can I borrow", "initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account = None", "you' % (chan, nick)) else: if isauthorized(irc, chan, nick): irc.send('KICK %s %s :%s'%(chan,", "nick): if args == ['']: irc.send('MODE %s %s %s' % (chan, set_unset+mode, nick))", "is not None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid", "nick, account): id filterbynick = lambda (id, cknick, account): cknick == nick ids", "def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not", "f=open('msgs.txt', 'w') for receiver in msgs: for sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n'", "'#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel command', '#help': '[command]'} if cmd in usage:", "line != '': lines.append(line) for line in lines: irc.msg(nick, zwsp + '%s: %s'", "msgs, msgslock with msgslock: f=open('msgs.txt', 'w') for receiver in msgs: for sender, origin,", "and len(line)-1 >= min and len(line)-1 <= max: return True elif not max", "(0-1) argument: [bar] out.append(ARG_OPT) elif len(arg) >= 2 and arg[0] == '{' and", "= [] if account not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global", "given out = [] for type in argtypes: if type == ARG_STD: out.append(line[0])", "> 0: chan, account = line.split() if chan not in gods: gods[chan] =", "irc.msg(reply, zwsp + 'Usage #trust nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'):", "[reason]', '#src': '', '#msg': 'nick message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted':", "command') if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'):", "invite' % nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline,", "'330': # WHOIS: is logged in as whoisnick = line[3] account = line[4]", "zwsp + '%s <%s> %s' % (origin, sender, msg)) msgs_changed = True if", "concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm'] blacklist = [] doctor = eliza.eliza() #", "ckid, cknick, ckaccount = accountcheck[index] if ckid == id: return index return None", "# Unlimited argument given out = [] for type in argtypes: if type", "ARG_OPT: out.append(line[0]) line = line[1:] else: out.append('') elif not max and len(line) >", "nick or yourself voice in case you are trusted by oonbotti2 and identified", "cmdline: cmdline.remove('') # #chan: channel override prefix # Don't allow this in private", "user to be trusted', '#help': 'give short info of command or list commands'}", "trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan in", "% (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if chan in trusted", "msg in msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s> %s' % (origin, sender, msg))", "def execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted() elif", "reduce((lambda x, y: x + y), rolls) if times > 1: text =", "msgs[receiver] = [] msgs[receiver].append((sender, origin, msg)) f.close() def savemessages(): global msgs, msgslock with", "!= ': ': reply = chan cmdline = [line[3][1:]] + line[4:] while ''", "die < 1: irc.msg(reply, zwsp + 'This die is not available in your", "else: nicks = [] for nick in args: nicks.append(nick) if len(nicks) == 4:", "'#dequiet': 'remove +q from nick!*@*', '#kick': 'kicks nick with specified reason', '#src': 'paste", "out def getargnums(argtypes): min = 0 max = 0 # max = None", "times > 128: irc.msg(reply, zwsp + 'Sorry, I don\\'t have that many. Can", "and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: # If line begins with", "'nick', '#ls-trusted': '', '#chan': 'channel command', '#help': '[command]'} if cmd in usage: if", "def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not", "line = line[1:] elif type == ARG_UNL: out.append(' '.join(line)) line = [] else:", "get account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage #trust nick')", "def parsecmd(line, args): # Returns a tuple containing the arguments. An optional argument", "args = args.split(' ') out = [] for arg in args: if len(arg)", "'#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v', args.split(' ')) elif matchcmd(cmdline,", "in argtypes[:-1]: # Disallow non-final unlimited arguments raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda", "line[1:] else: out.append('') elif max and len(line) == max: # Optional argument given", "parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg =", "if the command cmd is used, matchcmd(line, cmd, args) checks whether the args", "ckid, nick, ckvalue = accountcheck[index] accountcheck[index] = (id, nick, value) accountchecklock.release() def getaccountcheckvalue(id):", "== '330': # WHOIS: is logged in as whoisnick = line[3] account =", "your/nick\\'s op rights', '#voice': 'give nick or yourself voice in case you are", "if cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted() elif cmdline[0] ==", "# In a query: origin = \"[query]\" else: # In a channel origin", "'+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick,", "global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan in trusted: for account in", "def parse((line, irc)): global blacklist global msgs, msgslock global trusted, trustedlock, gods, godslock", "origin2, messageN)] msgs = {} msgslock = threading.Lock() # (ID, nick, account) accountcheck", "format: ' + msg ARG_STD = 0 ARG_OPT = 1 ARG_UNL = 2", "trustedlock.acquire() if chan in trusted: lines = [] line = '' for account", "self.timedjobslock = threading.Lock() self.cronctrl = [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self, time,", "= '%i (%s + %i)' % (result + plus, text, plus) elif plus", "zwsp+text) elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o', args.split('", "rolls = [random.randint(1, die) for i in xrange(times)] result = reduce((lambda x, y:", "as whoisnick = line[3] account = line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account)", "'.join([str(i) for i in rolls])) else: text = str(result) if plus > 0:", "'[nick]') if trustnick == '': trustnick = nick account = getaccount(irc, trustnick) if", "> 0: chan, account = line.split() addtrusted(chan, account) f.close() def loadgods(): global gods,", "'#help'): if matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline, '[command]') helptext = help(command) if", "= 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs = []", "== ']': # Optional (0-1) argument: [bar] out.append(ARG_OPT) elif len(arg) >= 2 and", "trusted, trustedlock trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt', 'r') for line in f:", "2 and arg[0] == '{' and arg[-1] == '}': # Unlimited (0-) number", "'[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick [reason]', '#src': '', '#msg': 'nick message',", "oonbotti2', '#trust': 'add nick to trusted list', '#untrust': 'remove nick from trusted list',", "nick)) else: if isauthorized(irc, chan, nick): irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason)) else:", "% (chan, account)) f.close trustedlock.release() def init(): global cron cron = Cron() cron.start()", "'#ls-trusted'): trustedlock.acquire() if chan in trusted: lines = [] line = '' for", "import random import re import time concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm'] blacklist", "in trusted and account in trusted[chan]: trustedlock.release() return True else: trustedlock.release() return False", "cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run = True while run: time.sleep(1) #", "zwsp + '%s is trusted' % trustnick) else: irc.msg(reply, zwsp + '%s is", "nick in blacklist: return elif len(line) >= 4 and len(line[3]) >= len(zwsp)+1 and", "not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp + 'Failed to", "query', '#chan': 'Runs the command as if it was sent on the specified", "chan, nick): account = getaccount(irc, untrustnick) # If account can't be found (e.g.", "if msgnick not in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message)) savemessages() else:", "die = '%' if die[1] == '%' else int(die[1]) if die == '%':", "[] for arg in args: if len(arg) >= 2 and arg[0] == '['", "cmdline[0] == '/sm': savemessages() def usage(cmd, message = True): usage = {'#echo': 'text',", "in trusted[chan]: f.write('%s %s\\n' % (chan, account)) f.close trustedlock.release() def init(): global cron", "len(line) > 0 and line[-1] == '\\n': line = line[:-1] if len(line.split('\\t')) ==", "accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id)", "argument string raise ArgsfmtError('Ambiguous argument format') # Remove the command if len(line) ==", "is not trusted' % trustnick) else: irc.msg(reply, zwsp + 'Failed to get account", "None if number of arguments is unlimited for argtype in argtypes: if argtype", "origin, message)) savemessages() else: irc.msg(reply, zwsp + 'Usage: #msg nick message') elif matchcmd(cmdline,", "if index is not None: ckid, nick, ckvalue = accountcheck[index] accountcheck[index] = (id,", "'remove your/nick\\'s op rights', '#voice': 'give nick or yourself voice in case you", "= parsecmd(cmdline, '{question}') if len(question) < 2 or question[:2] != ':D': # Mandated", "The index returned cannot be guaranteed valid if lock is released between call", "for line in f: while len(line) > 0 and line[-1] == '\\n': line", "accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: ckid, cknick, value", "to increment if max is unlimited max += 1 elif argtype == ARG_OPT:", "0 and line[-1] == '\\n': line = line[:-1] if len(line) > 0: chan,", "= {'#echo': '#echo text back', '#op': 'give nick or yourself op rights in", "max = getargnums(parseargsfmt(args)) if max and len(line)-1 >= min and len(line)-1 <= max:", "= [] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def savetrusted(): global trusted, trustedlock trustedlock.acquire()", "should only be used internally # The index returned cannot be guaranteed valid", "sent on the specified channel. Requires user to be trusted', '#help': 'give short", "your or nick\\'s voice in case you are trusted by oonbotti2 and identified", "for arg in args: if len(arg) >= 2 and arg[0] == '[' and", "== '}': # Unlimited (0-) number of arguments: {baz} out.append(ARG_UNL) else: # Normal", "= lambda (id, nick, account): id filterbynick = lambda (id, cknick, account): cknick", "for i in rolls])) else: text = str(result) if plus > 0: text", "% trustnick) else: irc.msg(reply, zwsp + 'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'): if", "msgs = {} msgslock = threading.Lock() # (ID, nick, account) accountcheck = []", "[] for type in argtypes: if type == ARG_STD or type == ARG_OPT:", "account): global trusted, trustedlock trustedlock.acquire() if chan not in trusted: trusted[chan] = []", "safe not to get truncated line += ', ' + account else: lines.append(line)", "getaccount(irc, trustnick) if account: addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp + 'Failed to", "accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs = [] self.timedjobslock =", "if isauthorized(irc, newchan, nick): chan = newchan cmdline = newcmdline else: irc.msg(chan, zwsp", "'r') for line in f: while len(line) > 0 and line[-1] == '\\n':", "trustedlock.acquire() f=open('trusted.txt', 'w') for chan in trusted: for account in trusted[chan]: f.write('%s %s\\n'", "getindexbyaccountcheckid(id): global accountcheck for index in range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index] if", "'#untrust': 'remove nick from trusted list', '#ls-trusted': 'list nicks that are trusted. use", "'nick [reason]', '#src': '', '#msg': 'nick message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick',", "irc.send('MODE %s %s %s' % (chan, set_unset+mode*4, ' '.join(nicks))) nicks = [] if", "= getindexbyaccountcheckid(id) if index is not None: ckid, cknick, value = accountcheck[index] accountchecklock.release()", "account = getaccount(irc, trustnick) if account: if istrusted(chan, account): irc.msg(reply, zwsp + '%s", "filterbynick = lambda (id, cknick, account): cknick == nick ids = map(getid, filter(filterbynick,", "WHOIS: is logged in as whoisnick = line[3] account = line[4] for id", "gods or account not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp", "= parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}') chmode(irc,", "def rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan in trusted and account", "0 # max = None if number of arguments is unlimited for argtype", "'' argtypes = parseargsfmt(args) if len(argtypes) >= 1 and ARG_UNL in argtypes[:-1]: #", "+ 'Not op') msgs_changed = False with msgslock: if (line[1] == 'PRIVMSG' or", "An optional argument that didn't get a value will be assigned '' argtypes", "= cmdline[0][1:].split('d') times = int(die[0]) if die[0] else 1 if '+' in die[1]:", "too if len(line) == 0: return False if line[0] != cmd: return False", "chan, nick, '+q', [arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick')", "= map((lambda (time, fn): (time-1, fn)), self.timedjobs) torun = map((lambda (time, fn): fn),", "= 0 max = 0 # max = None if number of arguments", "private messages for more transparent bot usage if matchcmd(cmdline, '#chan') and chan !=", "'%s: %s' % (chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s:", "borrow yours?') else: rolls = [random.randint(1, die) for i in xrange(times)] result =", "[bar] out.append(ARG_OPT) elif len(arg) >= 2 and arg[0] == '{' and arg[-1] ==", "'#op': 'give nick or yourself op rights in case you are trusted by", "nick): irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp + 'Usage #kick", "set_unset+mode, nick)) else: nicks = [] for nick in args: nicks.append(nick) if len(nicks)", "value): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None:", "msgslock: msgs = {} f = open('msgs.txt', 'r') for line in f: while", "<= max: return True elif not max and len(line)-1 >= min: return True", "not in trusted: trusted[chan] = [] if account not in trusted[chan]: trusted[chan].append(account) trustedlock.release()", "parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, trustnick) if account: addtrusted(chan,", "if matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline =", "need to be high self.cronctrllock.acquire() for cmd in self.cronctrl: if cmd == 'QUIT':", "Don't try to increment if max is unlimited max += 1 elif argtype", "'/st': savetrusted() elif cmdline[0] == '/lg': loadgods() elif cmdline[0] == '/lm': loadmessages() elif", "= {} godslock = threading.Lock() # receiver: [(sender1, origin1, message1), (sender2, origin2, message2),", "%s %s %s' % (chan, set_unset+mode*4, ' '.join(nicks))) nicks = [] if nicks:", "trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan in trusted: for account in trusted[chan]:", "value = accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index", "try to increment if max is unlimited max += 1 elif argtype ==", "def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid = lambda (id, nick, account): id", "zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic': irc.msg(reply, zwsp +", "nicks.append(nick) if len(nicks) == 4: irc.send('MODE %s %s %s' % (chan, set_unset+mode*4, '", "def __init__(self, msg): self.msg = msg def __str__(self): return 'Error with argument format:", "(id, cknick, account): cknick == nick ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return", "%s %s %s' % (chan, set_unset+mode, nick)) else: nicks = [] for nick", "oonbotti2 and identified with NickServ', '#devoice': 'remove your or nick\\'s voice in case", "the specified channel. Requires user to be trusted', '#help': 'give short info of", "self.timedjobs)) self.timedjobs = filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release() for fn in torun:", "'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, untrustnick) # If account can't", "account: godslock.acquire() if chan not in gods or account not in gods[chan]: rmtrusted(chan,", "{command}') newcmdline = newcmdline.split(' ') if isauthorized(irc, newchan, nick): chan = newchan cmdline", "= threading.Lock() # (ID, nick, account) accountcheck = [] accountcheckid = 0 accountchecklock", "= accountcheck[index] accountcheck[index] = (id, nick, value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock", "None: # Don't try to increment if max is unlimited max += 1", "id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] == '318': # WHOIS: End of", "ArgsfmtError('No command given') line = line[1:] min, max = getargnums(argtypes) if len(line) ==", "')) elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v', args.split('", "1:]) die[1] = die[1][:split] else: plus = 0 die = '%' if die[1]", "'text', '#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick',", "irc.msg(reply, zwsp + 'Usage #kick nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp +", "elif cmdline[0] == '/lg': loadgods() elif cmdline[0] == '/lm': loadmessages() elif cmdline[0] ==", "return elif len(line) >= 4 and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp:", "is released between call to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global accountcheck for", "%s %s' % (chan, set_unset+mode*4, ' '.join(nicks))) nicks = [] if nicks: irc.send('MODE", "[bar] {baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL] args = args.split(' ') out =", "getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global accountcheck for index in range(len(accountcheck)): ckid, cknick,", "max += 1 elif argtype == ARG_UNL: max = None return min, max", "[] self.timedjobslock = threading.Lock() self.cronctrl = [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self,", "- %i)' % (result + plus, text, -plus) irc.msg(reply, zwsp + text) elif", "msg)) msgs_changed = True if msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0] == '/q':", "args match too if len(line) == 0: return False if line[0] != cmd:", "trustedlock.release() def rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan in trusted and", "the command cmd is used, matchcmd(line, cmd, args) checks whether the args match", "\"[query]\" else: # In a channel origin = chan with msgslock: if msgnick", "with msgslock: if (line[1] == 'PRIVMSG' or line[1] == 'JOIN') and nick in", ") -> [ARG_STD, ARG_OPT, ARG_UNL] args = args.split(' ') out = [] for", "== ARG_UNL: max = None return min, max def matchcmd(line, cmd, args=None): #", "zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan == '#esoteric': irc.msg(reply, zwsp + 'Nothing here')", "(0-) number of arguments: {baz} out.append(ARG_UNL) else: # Normal argument: foo out.append(ARG_STD) return", "if nick or yourself is trusted by oonbotti2', '#trust': 'add nick to trusted", "accountchecklock.release() return id # Warning: this does no locking, should only be used", "format used by matchcmd and parsecmd # e.g. parseargsfmt(\"foo [bar] {baz} ) ->", "self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run", "account: if istrusted(chan, untrustnick): account = untrustnick if account: godslock.acquire() if chan not", "{} trustedlock = threading.Lock() gods = {} godslock = threading.Lock() # receiver: [(sender1,", "is used because None is already reserved elif line[1] == 'INVITE' and line[2]", "matchcmd(cmdline, '#esoteric') and chan == '#esoteric': irc.msg(reply, zwsp + 'Nothing here') elif cmdline[0]", "doctor, die_expr line = line.split(' ') nick = line[0].split('!')[0][1:] chan = line[2] if", "nick ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids def getaccount(irc, nick): id", "ARG_OPT: if max != None: # Don't try to increment if max is", "= parsecmd(cmdline, 'nick {reason}') if kicknick.lower() == irc.nick: irc.send('KICK %s %s :Fuck you'", "class Cron(threading.Thread): def __init__(self): self.timedjobs = [] self.timedjobslock = threading.Lock() self.cronctrl = []", "irc.msg(reply, zwsp + '%s is not trusted' % trustnick) else: irc.msg(reply, zwsp +", "nick or yourself op rights in case you are trusted by oonbotti2 and", "in trusted and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted, trustedlock", "+ 'Failed to get account for %s' % trustnick) else: irc.msg(reply, zwsp +", "accountchecklock.acquire() getid = lambda (id, nick, account): id filterbynick = lambda (id, cknick,", "getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None:", "cknick, ckaccount = accountcheck[index] if ckid == id: return index return None def", "+ account) <= 255: # Playing it safe not to get truncated line", "fn)), self.timedjobs) torun = map((lambda (time, fn): fn), filter((lambda (time, fn): time<=0), self.timedjobs))", "ARG_OPT or type == ARG_UNL), argtypes)) > 1: # Disallow more than one", "and len(line) == max: # Optional argument given out = [] for type", "to be trusted', '#help': 'give short info of command or list commands'} if", "command given') line = line[1:] min, max = getargnums(argtypes) if len(line) == min:", "(%s - %i)' % (result + plus, text, -plus) irc.msg(reply, zwsp + text)", "> 128: irc.msg(reply, zwsp + 'Sorry, I don\\'t have that many. Can I", "was sent on the specified channel. Requires user to be trusted', '#help': 'give", "len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: # If line begins with ZWSP", "argtype == ARG_OPT: if max != None: # Don't try to increment if", "accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid = lambda (id, nick, account):", "== '/lt': loadtrusted() elif cmdline[0] == '/st': savetrusted() elif cmdline[0] == '/lg': loadgods()", "# '' Signifies failure return None else: return account def isauthorized(irc, chan, nick):", "and identified with NickServ', '#devoice': 'remove your or nick\\'s voice in case you", "f=open('gods.txt', 'r') for line in f: while len(line) > 0 and line[-1] ==", "self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def", "+ line[3]) elif line[1] == '482': irc.msg(line[3], zwsp + 'Not op') msgs_changed =", "usage = {'#echo': 'text', '#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet':", "== 0: return False if line[0] != cmd: return False if not args:", "get account for %s' % untrustnick) else: irc.msg(reply, zwsp + 'Usage #untrust nick')", "if len(argtypes) >= 1 and ARG_UNL in argtypes[:-1]: # Disallow non-final unlimited arguments", "in args: if len(arg) >= 2 and arg[0] == '[' and arg[-1] ==", "not in msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin, msg)) f.close() def savemessages(): global", "removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None:", "nicks = [] for nick in args: nicks.append(nick) if len(nicks) == 4: irc.send('MODE", "line[1:] elif type == ARG_UNL: out.append(' '.join(line)) line = [] else: raise ArgsfmtError('Number", "[irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}') if len(question) < 2 or question[:2]", "plus = int(die[1][split + 1:]) die[1] = die[1][:split] elif '-' in die[1]: split", "elif matchcmd(cmdline, '#esoteric') and chan == '#esoteric': irc.msg(reply, zwsp + 'Nothing here') elif", "matchcmd(line, cmd, args) checks whether the args match too if len(line) == 0:", "'#chan', 'channel {command}'): newchan, newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split(' ')", "matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic':", "trusted' % trustnick) else: irc.msg(reply, zwsp + 'Failed to get account for %s'", "'give short info of command or list commands'} if cmd=='': return '#echo #op", "nick): account = getaccount(irc, nick) if account: return istrusted(chan, account) else: irc.msg(nick, zwsp", "'#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]') if trustnick == '': trustnick = nick", "parseargsfmt(args) if len(argtypes) >= 1 and ARG_UNL in argtypes[:-1]: # Disallow non-final unlimited", "savemessages() def execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted()", "+ plus, text, -plus) irc.msg(reply, zwsp + text) elif line[1] == '330': #", "nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick') if", "account = untrustnick if account: godslock.acquire() if chan not in gods or account", "cmd in usage: if message: return 'Usage: %s %s' % (cmd, usage[cmd]) else:", "= eliza.eliza() # channel: [user1, user2, ..., userN] trusted = {} trustedlock =", "as-is if not account: if istrusted(chan, untrustnick): account = untrustnick if account: godslock.acquire()", "min and len(line)-1 <= max: return True elif not max and len(line)-1 >=", "== '{' and arg[-1] == '}': # Unlimited (0-) number of arguments: {baz}", "index is not None: ckid, cknick, value = accountcheck[index] accountchecklock.release() return value def", "whether the args match too if len(line) == 0: return False if line[0]", "trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan in trusted: for account in trusted[chan]: f.write('%s", "# Don't try to increment if max is unlimited max += 1 elif", "begins with ZWSP return if line[1]=='PRIVMSG' and line[3][:2] != ': ': reply =", "line[:-1] if len(line) > 0: chan, account = line.split() addtrusted(chan, account) f.close() def", "+ '%s: %s' % (chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp +", "elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v', args.split(' '))", "in torun: fn() def loadmessages(): global msgs, msgslock with msgslock: msgs = {}", "return 'Usage: %s %s' % (cmd, usage[cmd]) else: return usage[cmd] else: return None", "the command if len(line) == 0: raise ArgsfmtError('No command given') line = line[1:]", "out[0] else: return out def parse((line, irc)): global blacklist global msgs, msgslock global", "== '%': if times != 1: irc.msg(reply, zwsp + 'Not supported') else: irc.msg(reply,", "with NickServ', '#deop': 'remove your/nick\\'s op rights', '#voice': 'give nick or yourself voice", "irc.msg(line[3], zwsp + 'Not op') msgs_changed = False with msgslock: if (line[1] ==", "if ckid == id: return index return None def setaccountcheckvalue(id, value): global accountcheck,", "queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release()", "cmdline[0] == '/lt': loadtrusted() elif cmdline[0] == '/st': savetrusted() elif cmdline[0] == '/lg':", "assigned '' argtypes = parseargsfmt(args) if len(argtypes) >= 1 and ARG_UNL in argtypes[:-1]:", "trusted[chan]: f.write('%s %s\\n' % (chan, account)) f.close trustedlock.release() def init(): global cron cron", "gods, godslock global doctor, die_expr line = line.split(' ') nick = line[0].split('!')[0][1:] chan", "for type in argtypes: if type == ARG_STD or type == ARG_OPT: out.append(line[0])", "get account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage: #trusted? [nick]')", "elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}') if len(question) <", "accountcheck[index] accountcheck[index] = (id, nick, value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire()", "'PRIVMSG' or line[1] == 'JOIN') and nick in msgs: for sender, origin, msg", "+ '%s: %s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times =", "<= 255: # Playing it safe not to get truncated line += ',", "nick = line[0].split('!')[0][1:] chan = line[2] if line[2][0] == '#' else nick zwsp", "'%' if die[1] == '%' else int(die[1]) if die == '%': if times", "region.') elif times < 1: irc.msg(reply, zwsp + 'What exactly do you want", "usage: if message: return 'Usage: %s %s' % (cmd, usage[cmd]) else: return usage[cmd]", "'': lines.append(line) for line in lines: irc.msg(nick, zwsp + '%s: %s' % (chan,", "argument: [bar] out.append(ARG_OPT) elif len(arg) >= 2 and arg[0] == '{' and arg[-1]", "f = open('msgs.txt', 'r') for line in f: while len(line) > 0 and", "'Usage #chan channel command') if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text)", "if die == '%': if times != 1: irc.msg(reply, zwsp + 'Not supported')", "irc.send('MODE %s %s %s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire()", "to get account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage #trust", "matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account =", "line.split(' ') nick = line[0].split('!')[0][1:] chan = line[2] if line[2][0] == '#' else", "else: return None def help(cmd): helptext = {'#echo': '#echo text back', '#op': 'give", "threading import random import re import time concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm']", "'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, trustnick) if account: addtrusted(chan, account)", "if msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0] ==", "= {} f = open('msgs.txt', 'r') for line in f: while len(line) >", "Can I borrow yours?') else: rolls = [random.randint(1, die) for i in xrange(times)]", "be found (e.g. it has been deleted, use the parameter as-is if not", "if type == ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] elif", "else: lines.append(line) line = account if line != '': lines.append(line) for line in", "'') # Mark as failed, '' is used because None is already reserved", "len(line) > 0: chan, account = line.split() addtrusted(chan, account) f.close() def loadgods(): global", "= '%' if die[1] == '%' else int(die[1]) if die == '%': if", "type == ARG_STD: out.append(line[0]) line = line[1:] else: out.append('') elif max and len(line)", "= threading.Lock() gods = {} godslock = threading.Lock() # receiver: [(sender1, origin1, message1),", "lines = [] line = '' for account in trusted[chan]: if line ==", "godslock.release() savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for %s' %", "are trusted by oonbotti2 and identified with NickServ', '#deop': 'remove your/nick\\'s op rights',", "Optional argument given out = [] for type in argtypes: if type ==", "= threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs = [] self.timedjobslock = threading.Lock()", "= parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q', [arg + '!*@*']) elif matchcmd(cmdline, '#devoice'):", "# WHOIS: is logged in as whoisnick = line[3] account = line[4] for", "arg[-1] == ']': # Optional (0-1) argument: [bar] out.append(ARG_OPT) elif len(arg) >= 2", "'#prefix') and chan == '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if", "use only in a query', '#chan': 'Runs the command as if it was", "return True else: trustedlock.release() return False def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire()", "if max is unlimited max += 1 elif argtype == ARG_UNL: max =", "% (result + plus, text, plus) elif plus < 0: text = '%i", "== irc.nick: irc.send('KICK %s %s :Fuck you' % (chan, nick)) else: if isauthorized(irc,", "#trust #untrust #ls-trusted #chan #help' elif cmd=='me': return 'I shall.' elif cmd in", "arguments is unlimited for argtype in argtypes: if argtype == ARG_STD: min +=", "die[1] = die[1][:split] elif '-' in die[1]: split = die[1].index('-') plus = -int(die[1][split", "+= ', ' + account else: lines.append(line) line = account if line !=", "account in trusted[chan]: trustedlock.release() return True else: trustedlock.release() return False def initaccountcheck(nick): global", "threading.Lock() gods = {} godslock = threading.Lock() # receiver: [(sender1, origin1, message1), (sender2,", "be trusted', '#help': 'give short info of command or list commands'} if cmd=='':", "account = getaccount(irc, untrustnick) # If account can't be found (e.g. it has", "and len(line)-1 >= min: return True else: return False def parsecmd(line, args): #", "'#echo text back', '#op': 'give nick or yourself op rights in case you", "isauthorized(irc, chan, nick): account = getaccount(irc, untrustnick) # If account can't be found", "int(die[0]) if die[0] else 1 if '+' in die[1]: split = die[1].index('+') plus", "locking, should only be used internally # The index returned cannot be guaranteed", "zwsp + text) elif line[1] == '330': # WHOIS: is logged in as", "godslock global doctor, die_expr line = line.split(' ') nick = line[0].split('!')[0][1:] chan =", "if line[0] != cmd: return False if not args: return True min, max", "{message}') if chan == nick: # In a query: origin = \"[query]\" else:", "else int(die[1]) if die == '%': if times != 1: irc.msg(reply, zwsp +", "isauthorized(irc, newchan, nick): chan = newchan cmdline = newcmdline else: irc.msg(chan, zwsp +", "accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: del accountcheck[index] accountchecklock.release() def", "'/lm': loadmessages() elif cmdline[0] == '/sm': savemessages() def usage(cmd, message = True): usage", "(sender2, origin2, message2), ..., (senderN, origin2, messageN)] msgs = {} msgslock = threading.Lock()", "elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc,", "None: ckid, nick, ckvalue = accountcheck[index] accountcheck[index] = (id, nick, value) accountchecklock.release() def", "> 0: text = '%i (%s + %i)' % (result + plus, text,", "{reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick {reason}') if kicknick.lower() == irc.nick: irc.send('KICK %s", "account): global trusted, trustedlock trustedlock.acquire() if chan in trusted and account in trusted[chan]:", "(nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times = int(die[0]) if die[0] else", "to trusted list', '#untrust': 'remove nick from trusted list', '#ls-trusted': 'list nicks that", "if chan == nick: # In a query: origin = \"[query]\" else: #", "'Usage #kick nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline,", "len(line)-1 <= max: return True elif not max and len(line)-1 >= min: return", "def getindexbyaccountcheckid(id): global accountcheck for index in range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index]", "line.split() addtrusted(chan, account) f.close() def loadgods(): global gods, godslock godslock.acquire() gods = {}", "line.split() if chan not in gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan, account) f.close()", "not available in your space-time region.') elif times < 1: irc.msg(reply, zwsp +", "'#esoteric': irc.msg(reply, zwsp + 'Nothing here') elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question", "< 1: irc.msg(reply, zwsp + 'This die is not available in your space-time", "channel origin = chan with msgslock: if msgnick not in msgs: msgs[msgnick] =", "Returns a tuple containing the arguments. An optional argument that didn't get a", "cmd=='': return '#echo #op #deop #voice #devoice #quiet #dequiet #kick #src #msg #trusted?", "matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args =", "max and len(line)-1 >= min: return True else: return False def parsecmd(line, args):", "'%s: #invite has been removed. Use manual invite' % nick) elif matchcmd(cmdline, '#help'):", "int(die[1]) if die == '%': if times != 1: irc.msg(reply, zwsp + 'Not", "'nick', '#dequiet': 'nick', '#kick': 'nick [reason]', '#src': '', '#msg': 'nick message', '#trusted?': '[nick]',", "+ 'Usage: #msg nick message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick", "'318': # WHOIS: End of /WHOIS list. whoisnick = line[3] for id in", "== ARG_OPT: out.append(line[0]) line = line[1:] elif type == ARG_UNL: out.append(' '.join(line)) line", "unlimited max += 1 elif argtype == ARG_UNL: max = None return min,", "and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted", "% (chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s: #invite has", "== 'PRIVMSG' or line[1] == 'JOIN') and nick in msgs: for sender, origin,", "if max is unlimited max += 1 elif argtype == ARG_OPT: if max", "loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt', 'r') for line", "= int(die[0]) if die[0] else 1 if '+' in die[1]: split = die[1].index('+')", "%s %s %s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if", "set_unset = mode[0] mode = mode[1:] if isauthorized(irc, chan, nick): if args ==", "account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage #trust nick') elif", "1: return out[0] else: return out def parse((line, irc)): global blacklist global msgs,", "+ 'Usage #trust nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick =", "nick account = getaccount(irc, trustnick) if account: if istrusted(chan, account): irc.msg(reply, zwsp +", "== ARG_STD: out.append(line[0]) line = line[1:] else: out.append('') elif max and len(line) ==", "= newcmdline else: irc.msg(chan, zwsp + 'Usage #chan channel command') if matchcmd(cmdline, '#echo'):", "to oonbotti2\\'s git repo', '#msg': 'send a message to nick', '#trusted?': 'tell you", "addtrusted(chan, account) f.close() godslock.release() def savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for", "are trusted by oonbotti2 and identified with NickServ', '#devoice': 'remove your or nick\\'s", "account = None while account == None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if", "if type == ARG_STD: out.append(line[0]) line = line[1:] else: out.append('') elif max and", "== '/lm': loadmessages() elif cmdline[0] == '/sm': savemessages() def usage(cmd, message = True):", "')) elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q', [arg", "help(command) if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan == '#esoteric': irc.msg(reply,", "more transparent bot usage if matchcmd(cmdline, '#chan') and chan != nick: if matchcmd(cmdline,", "line in f: while len(line) > 0 and line[-1] == '\\n': line =", "'%s (%s)' % (str(result), ', '.join([str(i) for i in rolls])) else: text =", "-plus) irc.msg(reply, zwsp + text) elif line[1] == '330': # WHOIS: is logged", "#msg #trusted? #trust #untrust #ls-trusted #chan #help' elif cmd=='me': return 'I shall.' elif", "#help' elif cmd=='me': return 'I shall.' elif cmd in helptext: if helptext[cmd]: return", "argtype == ARG_UNL: max = None return min, max def matchcmd(line, cmd, args=None):", "':'+zwsp: # If line begins with ZWSP return if line[1]=='PRIVMSG' and line[3][:2] !=", ": setaccountcheckvalue(id, ''))) account = None while account == None: account = getaccountcheckvalue(id)", "# If line begins with ZWSP return if line[1]=='PRIVMSG' and line[3][:2] != ':", "zwsp + 'Usage #chan channel command') if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}')", "if account: addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account", "in f: while len(line) > 0 and line[-1] == '\\n': line = line[:-1]", "has been removed. Use manual invite' % nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline,", "+ '%s <%s> %s' % (origin, sender, msg)) msgs_changed = True if msgs_changed:", "line[1] == 'INVITE' and line[2] == irc.nick and line[3][1:] in irc.chan.split(' '): if", "irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o',", "index = getindexbyaccountcheckid(id) if index is not None: ckid, nick, ckvalue = accountcheck[index]", "lock is released between call to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global accountcheck", "untrustnick if account: godslock.acquire() if chan not in gods or account not in", "global msgs, msgslock global trusted, trustedlock, gods, godslock global doctor, die_expr line =", "line[2] == irc.nick and line[3][1:] in irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN", "[user1, user2, ..., userN] trusted = {} trustedlock = threading.Lock() gods = {}", "chan = line[2] if line[2][0] == '#' else nick zwsp = '\\xe2\\x80\\x8b' if", "== ARG_UNL: out.append(' '.join(line)) line = [] else: raise ArgsfmtError('Number of given arguments", "yours?') else: rolls = [random.randint(1, die) for i in xrange(times)] result = reduce((lambda", "untrustnick): account = untrustnick if account: godslock.acquire() if chan not in gods or", "+ '%s%s' % (random.randint(0,9), random.randint(0,9))) elif die < 1: irc.msg(reply, zwsp + 'This", "chan not in gods or account not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted()", "be guaranteed valid if lock is released between call to getindexbyaccountcheckid and use!", "nicks: irc.send('MODE %s %s %s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account):", "global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: ckid,", "args.split(' ')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason =", "elif matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline,", "for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage: #trusted? [nick]') elif matchcmd(cmdline,", "cknick, account): cknick == nick ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids", "None)) accountcheckid += 1 accountchecklock.release() return id # Warning: this does no locking,", "argtypes: if argtype == ARG_STD: min += 1 if max != None: #", "['']: irc.send('MODE %s %s %s' % (chan, set_unset+mode, nick)) else: nicks = []", "1 accountchecklock.release() return id # Warning: this does no locking, should only be", "msgs_changed = False with msgslock: if (line[1] == 'PRIVMSG' or line[1] == 'JOIN')", "#voice #devoice #quiet #dequiet #kick #src #msg #trusted? #trust #untrust #ls-trusted #chan #help'", "Only standard arguments given out = [] for type in argtypes: if type", "trusted' % trustnick) else: irc.msg(reply, zwsp + '%s is not trusted' % trustnick)", "return ids def getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5,", "loadtrusted() elif cmdline[0] == '/st': savetrusted() elif cmdline[0] == '/lg': loadgods() elif cmdline[0]", "by oonbotti2 and identified with NickServ', '#deop': 'remove your/nick\\'s op rights', '#voice': 'give", "#untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in trusted: lines = []", "nick message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]')", "+ 'What exactly do you want me to do?') elif times > 128:", "x + y), rolls) if times > 1: text = '%s (%s)' %", "accountcheck, accountchecklock accountchecklock.acquire() getid = lambda (id, nick, account): id filterbynick = lambda", "else: irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9))) elif die < 1: irc.msg(reply,", "x, y: x + y), rolls) if times > 1: text = '%s", "'{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick,", "= int(die[1][split + 1:]) die[1] = die[1][:split] elif '-' in die[1]: split =", "line[3][:2] != ': ': reply = chan cmdline = [line[3][1:]] + line[4:] while", "msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin, msg)) f.close() def savemessages(): global msgs, msgslock", "return out[0] else: return out def parse((line, irc)): global blacklist global msgs, msgslock", "'#trusted?': 'tell you if nick or yourself is trusted by oonbotti2', '#trust': 'add", "'JOIN') and nick in msgs: for sender, origin, msg in msgs.pop(nick): irc.msg(nick, zwsp", "arguments not possible for given format string') if len(out) == 1: return out[0]", "as failed, '' is used because None is already reserved elif line[1] ==", "type == ARG_OPT or type == ARG_UNL), argtypes)) > 1: # Disallow more", "to get account for %s' % untrustnick) else: irc.msg(reply, zwsp + 'Usage #untrust", "msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt':", "= getaccount(irc, nick) if account: return istrusted(chan, account) else: irc.msg(nick, zwsp + 'Identify", "if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]') if trustnick == '': trustnick", "len(line + ', ' + account) <= 255: # Playing it safe not", "that many. Can I borrow yours?') else: rolls = [random.randint(1, die) for i", "chan != nick: if matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline = parsecmd(cmdline, 'channel", "f.close() godslock.release() def savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan in", "return None def setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if", "oonbotti2 and identified with NickServ', '#quiet': 'give +q to nick!*@*', '#dequiet': 'remove +q", "'\\n': line = line[:-1] if len(line.split('\\t')) == 4: receiver, sender, origin, msg =", "ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run = True while run: time.sleep(1)", "type == ARG_UNL), argtypes)) > 1: # Disallow more than one optional or", "parameter as-is if not account: if istrusted(chan, untrustnick): account = untrustnick if account:", "between call to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global accountcheck for index in", "== '#esoteric': irc.msg(reply, zwsp + 'Nothing here') elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']:", "override prefix # Don't allow this in private messages for more transparent bot", "if len(line) == 0: raise ArgsfmtError('No command given') line = line[1:] min, max", "'+o', args.split(' ')) elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick,", "ckaccount = accountcheck[index] if ckid == id: return index return None def setaccountcheckvalue(id,", "return '%s %s %s' % (cmd, usage(cmd, False), helptext[cmd]) else: return '%s %s'", "0 max = 0 # max = None if number of arguments is", "nick!*@*', '#kick': 'kicks nick with specified reason', '#src': 'paste a link to oonbotti2\\'s", "[] for type in argtypes: if type == ARG_STD: out.append(line[0]) line = line[1:]", "savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for %s' % trustnick)", "the command as if it was sent on the specified channel. Requires user", "'paste a link to oonbotti2\\'s git repo', '#msg': 'send a message to nick',", "else: irc.msg(reply, zwsp + 'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust',", "argtype in argtypes: if argtype == ARG_STD: min += 1 if max !=", "%s' % (chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s: #invite", "%s %s' % (chan, set_unset+mode, nick)) else: nicks = [] for nick in", "int(die[1][split + 1:]) die[1] = die[1][:split] elif '-' in die[1]: split = die[1].index('-')", "channel override prefix # Don't allow this in private messages for more transparent", "%s' % trustnick) else: irc.msg(reply, zwsp + 'Usage #trust nick') elif matchcmd(cmdline, '#untrust'):", "getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] == '318': # WHOIS: End of /WHOIS list.", "+ 'Sorry, I don\\'t have that many. Can I borrow yours?') else: rolls", "threading.Lock() self.cronctrl = [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire()", "len(line)-1 >= min and len(line)-1 <= max: return True elif not max and", "gods, godslock godslock.acquire() gods = {} f=open('gods.txt', 'r') for line in f: while", "a channel origin = chan with msgslock: if msgnick not in msgs: msgs[msgnick]", "'kicks nick with specified reason', '#src': 'paste a link to oonbotti2\\'s git repo',", "mode[1:] if isauthorized(irc, chan, nick): if args == ['']: irc.send('MODE %s %s %s'", "trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt',", "If account can't be found (e.g. it has been deleted, use the parameter", "elif cmdline[0] == '/sm': savemessages() def usage(cmd, message = True): usage = {'#echo':", "[] for nick in args: nicks.append(nick) if len(nicks) == 4: irc.send('MODE %s %s", "receiver, sender, origin, msg = line.split('\\t') if receiver not in msgs: msgs[receiver] =", "channel command') if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline,", "4 and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: # If line begins", "if (line[1] == 'PRIVMSG' or line[1] == 'JOIN') and nick in msgs: for", "die is not available in your space-time region.') elif times < 1: irc.msg(reply,", "random import re import time concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm'] blacklist =", "(id, nick, account): id filterbynick = lambda (id, cknick, account): cknick == nick", "zwsp + 'Not op') msgs_changed = False with msgslock: if (line[1] == 'PRIVMSG'", "irc.msg(nick, zwsp + '%s <%s> %s' % (origin, sender, msg)) msgs_changed = True", "cmdline[0][1:].split('d') times = int(die[0]) if die[0] else 1 if '+' in die[1]: split", "ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] else: out.append('') elif not", "# Optional (0-1) argument: [bar] out.append(ARG_OPT) elif len(arg) >= 2 and arg[0] ==", "chan in trusted and account in trusted[chan]: trustedlock.release() return True else: trustedlock.release() return", "cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}') if len(question) < 2", "= getaccount(irc, trustnick) if account: addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp + 'Failed", "op') msgs_changed = False with msgslock: if (line[1] == 'PRIVMSG' or line[1] ==", "'/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted() elif cmdline[0] == '/st': savetrusted() elif", "random.randint(0,9))) elif die < 1: irc.msg(reply, zwsp + 'This die is not available", "run(self): run = True while run: time.sleep(1) # Accuracy doesn't need to be", "run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn): (time-1, fn)),", "[(sender1, origin1, message1), (sender2, origin2, message2), ..., (senderN, origin2, messageN)] msgs = {}", "truncated line += ', ' + account else: lines.append(line) line = account if", "out.append(line[0]) line = line[1:] else: out.append('') elif not max and len(line) > min:", "usage(cmd, message = True): usage = {'#echo': 'text', '#op': '[nick]', '#deop': '[nick]', '#voice':", "command if len(line) == 0: raise ArgsfmtError('No command given') line = line[1:] min,", "# Accuracy doesn't need to be high self.cronctrllock.acquire() for cmd in self.cronctrl: if", "in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan", "argument format used by matchcmd and parsecmd # e.g. parseargsfmt(\"foo [bar] {baz} )", "# Returns a tuple containing the arguments. An optional argument that didn't get", "matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick {reason}')", "exactly do you want me to do?') elif times > 128: irc.msg(reply, zwsp", "trusted and account in trusted[chan]: trustedlock.release() return True else: trustedlock.release() return False def", "'%i (%s - %i)' % (result + plus, text, -plus) irc.msg(reply, zwsp +", "%s' % (origin, sender, msg)) msgs_changed = True if msgs_changed: savemessages() def execcmd(cmdline):", "command', '#help': '[command]'} if cmd in usage: if message: return 'Usage: %s %s'", "chan cmdline = [line[3][1:]] + line[4:] while '' in cmdline: cmdline.remove('') # #chan:", "else: plus = 0 die = '%' if die[1] == '%' else int(die[1])", "= line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] == '318': #", "line[3][:len(zwsp)+1] == ':'+zwsp: # If line begins with ZWSP return if line[1]=='PRIVMSG' and", "back', '#op': 'give nick or yourself op rights in case you are trusted", "1 if '+' in die[1]: split = die[1].index('+') plus = int(die[1][split + 1:])", "(chan, set_unset+mode, nick)) else: nicks = [] for nick in args: nicks.append(nick) if", "return istrusted(chan, account) else: irc.msg(nick, zwsp + 'Identify with NickServ') class ArgsfmtError(Exception): def", "== '': line = account elif len(line + ', ' + account) <=", "..., (senderN, origin2, messageN)] msgs = {} msgslock = threading.Lock() # (ID, nick,", "% (chan, set_unset+mode, nick)) else: nicks = [] for nick in args: nicks.append(nick)", "if len(line) == 0: return False if line[0] != cmd: return False if", "loadgods() elif cmdline[0] == '/lm': loadmessages() elif cmdline[0] == '/sm': savemessages() def usage(cmd,", "argtypes: if type == ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:]", "msgs = {} f = open('msgs.txt', 'r') for line in f: while len(line)", "elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s: #invite has been removed. Use manual", "times = int(die[0]) if die[0] else 1 if '+' in die[1]: split =", "'remove +q from nick!*@*', '#kick': 'kicks nick with specified reason', '#src': 'paste a", "nick\\'s voice in case you are trusted by oonbotti2 and identified with NickServ',", "'#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel command', '#help': '[command]'} if cmd", "arg[-1] == '}': # Unlimited (0-) number of arguments: {baz} out.append(ARG_UNL) else: #", "= line.split('\\t') if receiver not in msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin, msg))", "is trusted' % trustnick) else: irc.msg(reply, zwsp + '%s is not trusted' %", "= parsecmd(cmdline, '[command]') helptext = help(command) if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric')", "= getaccount(irc, trustnick) if account: if istrusted(chan, account): irc.msg(reply, zwsp + '%s is", "nick, '+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan,", "argtype == ARG_STD: min += 1 if max != None: # Don't try", "irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic': irc.msg(reply, zwsp", "specified reason', '#src': 'paste a link to oonbotti2\\'s git repo', '#msg': 'send a", "cknick == nick ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids def getaccount(irc,", "'#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc,", "accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: ckid, nick,", "max and len(line)-1 >= min and len(line)-1 <= max: return True elif not", "account else: lines.append(line) line = account if line != '': lines.append(line) for line", "account in trusted[chan]: f.write('%s %s\\n' % (chan, account)) f.close trustedlock.release() def init(): global", "'#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick {reason}') if", "self.cronctrl = [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time,", "nick, ckvalue = accountcheck[index] accountcheck[index] = (id, nick, value) accountchecklock.release() def getaccountcheckvalue(id): global", "fn() def loadmessages(): global msgs, msgslock with msgslock: msgs = {} f =", "or yourself voice in case you are trusted by oonbotti2 and identified with", "origin, msg in msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s> %s' % (origin, sender,", "/WHOIS list. whoisnick = line[3] for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None:", "'#esoteric') and chan == '#esoteric': irc.msg(reply, zwsp + 'Nothing here') elif cmdline[0] in", "Accuracy doesn't need to be high self.cronctrllock.acquire() for cmd in self.cronctrl: if cmd", "msg = line.split('\\t') if receiver not in msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin,", "else: irc.msg(reply, zwsp + 'Usage: #msg nick message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline,", "if account: if istrusted(chan, account): irc.msg(reply, zwsp + '%s is trusted' % trustnick)", "voice in case you are trusted by oonbotti2 and identified with NickServ', '#devoice':", "= '%s (%s)' % (str(result), ', '.join([str(i) for i in rolls])) else: text", "line[3][1:], nick): irc.send('JOIN ' + line[3]) elif line[1] == '482': irc.msg(line[3], zwsp +", "does no locking, should only be used internally # The index returned cannot", "line[-1] == '\\n': line = line[:-1] if len(line) > 0: chan, account =", "+ 'Usage #kick nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif", "if cmd=='': return '#echo #op #deop #voice #devoice #quiet #dequiet #kick #src #msg", "gods = {} godslock = threading.Lock() # receiver: [(sender1, origin1, message1), (sender2, origin2,", "tuple containing the arguments. An optional argument that didn't get a value will", "matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick {reason}') if kicknick.lower() ==", "= reduce((lambda x, y: x + y), rolls) if times > 1: text", "= None return min, max def matchcmd(line, cmd, args=None): # matchcmd(line, cmd) matched", "chan, account = line.split() addtrusted(chan, account) f.close() def loadgods(): global gods, godslock godslock.acquire()", "plus) elif plus < 0: text = '%i (%s - %i)' % (result", "args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q',", "== ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] elif type ==", "line[1:] min, max = getargnums(argtypes) if len(line) == min: # Only standard arguments", "argtypes[:-1]: # Disallow non-final unlimited arguments raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type:", "threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire()", "number of arguments is unlimited for argtype in argtypes: if argtype == ARG_STD:", "and line[3][:2] != ': ': reply = chan cmdline = [line[3][1:]] + line[4:]", "[line[3][1:]] + line[4:] while '' in cmdline: cmdline.remove('') # #chan: channel override prefix", "Use manual invite' % nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'): command", "len(argtypes) >= 1 and ARG_UNL in argtypes[:-1]: # Disallow non-final unlimited arguments raise", "= {} f=open('gods.txt', 'r') for line in f: while len(line) > 0 and", "if chan in trusted: lines = [] line = '' for account in", "% untrustnick) else: irc.msg(reply, zwsp + 'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire()", "__init__(self, msg): self.msg = msg def __str__(self): return 'Error with argument format: '", "+ ', ' + account) <= 255: # Playing it safe not to", "index = getindexbyaccountcheckid(id) if index is not None: ckid, cknick, value = accountcheck[index]", "else: return out def parse((line, irc)): global blacklist global msgs, msgslock global trusted,", "matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message = parsecmd(cmdline, 'nick {message}') if chan ==", "receiver in msgs: for sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender,", "NickServ', '#quiet': 'give +q to nick!*@*', '#dequiet': 'remove +q from nick!*@*', '#kick': 'kicks", "'-q', [arg + '!*@*']) elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan,", "'[nick]'): trustnick = parsecmd(cmdline, '[nick]') if trustnick == '': trustnick = nick account", "'remove nick from trusted list', '#ls-trusted': 'list nicks that are trusted. use only", "+ account else: lines.append(line) line = account if line != '': lines.append(line) for", "help(cmd): helptext = {'#echo': '#echo text back', '#op': 'give nick or yourself op", "if cmd == 'QUIT': run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda", "if account not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global trusted, trustedlock", "range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index] if ckid == id: return index return", ">= 1 and ARG_UNL in argtypes[:-1]: # Disallow non-final unlimited arguments raise ArgsfmtError('Non-final", "'[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel command', '#help': '[command]'} if", "of command or list commands'} if cmd=='': return '#echo #op #deop #voice #devoice", "trustnick = nick account = getaccount(irc, trustnick) if account: if istrusted(chan, account): irc.msg(reply,", "account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted =", "'nick {reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick {reason}') if kicknick.lower() == irc.nick: irc.send('KICK", "= [] line = '' for account in trusted[chan]: if line == '':", "zwsp + 'Failed to get account for %s' % trustnick) else: irc.msg(reply, zwsp", "init(): global cron cron = Cron() cron.start() loadtrusted() loadgods() def chmode(irc, chan, nick,", "nick: if matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline", "'#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message = parsecmd(cmdline, 'nick {message}') if", "format') # Remove the command if len(line) == 0: raise ArgsfmtError('No command given')", "list', '#untrust': 'remove nick from trusted list', '#ls-trusted': 'list nicks that are trusted.", "failure return None else: return account def isauthorized(irc, chan, nick): account = getaccount(irc,", "(str(result), ', '.join([str(i) for i in rolls])) else: text = str(result) if plus", "id = initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account", "= parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split(' ') if isauthorized(irc, newchan, nick): chan", "= [random.randint(1, die) for i in xrange(times)] result = reduce((lambda x, y: x", "if times > 1: text = '%s (%s)' % (str(result), ', '.join([str(i) for", "if matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline, '[command]') helptext = help(command) if helptext:", "+ text) elif line[1] == '330': # WHOIS: is logged in as whoisnick", "None else: return account def isauthorized(irc, chan, nick): account = getaccount(irc, nick) if", "[arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick,", "len(line) > 0: chan, account = line.split() if chan not in gods: gods[chan]", "ArgsfmtError(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return 'Error with argument", "elif argtype == ARG_UNL: max = None return min, max def matchcmd(line, cmd,", "for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage #trust nick') elif matchcmd(cmdline,", "'#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline,", "arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q', [arg + '!*@*']) elif matchcmd(cmdline,", "'\\n': line = line[:-1] if len(line) > 0: chan, account = line.split() addtrusted(chan,", "NickServ') class ArgsfmtError(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return 'Error", "nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline, '[command]') helptext", "and chan == '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline,", "def setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is", "# e.g. parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL] args = args.split('", "elif not max and len(line)-1 >= min: return True else: return False def", "'+' in die[1]: split = die[1].index('+') plus = int(die[1][split + 1:]) die[1] =", "max += 1 elif argtype == ARG_OPT: if max != None: # Don't", "matchcmd(line, cmd, args=None): # matchcmd(line, cmd) matched if the command cmd is used,", "id: return index return None def setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire() index", "= initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account =", "if len(out) == 1: return out[0] else: return out def parse((line, irc)): global", "msgslock: if (line[1] == 'PRIVMSG' or line[1] == 'JOIN') and nick in msgs:", "'', '#chan': 'channel command', '#help': '[command]'} if cmd in usage: if message: return", "'What exactly do you want me to do?') elif times > 128: irc.msg(reply,", "if die[1] == '%' else int(die[1]) if die == '%': if times !=", "= [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn))", "matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline, '[command]') helptext = help(command)", "irc.msg(chan, zwsp + '%s: #invite has been removed. Use manual invite' % nick)", "plus = -int(die[1][split + 1:]) die[1] = die[1][:split] else: plus = 0 die", "matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s: #invite has been removed. Use manual invite'", "account not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp + 'Failed", "'tell you if nick or yourself is trusted by oonbotti2', '#trust': 'add nick", "ARG_STD: min += 1 if max != None: # Don't try to increment", "irc.nick: irc.send('KICK %s %s :Fuck you' % (chan, nick)) else: if isauthorized(irc, chan,", "trusted = {} trustedlock.release() f=open('trusted.txt', 'r') for line in f: while len(line) >", "because None is already reserved elif line[1] == 'INVITE' and line[2] == irc.nick", "None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account == '': # '' Signifies", "sender, origin, msg)) f.close() loadmessages() def addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if", "min = 0 max = 0 # max = None if number of", "0: return False if line[0] != cmd: return False if not args: return", "getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') # Mark as failed, '' is", "== nick: # In a query: origin = \"[query]\" else: # In a", "has been deleted, use the parameter as-is if not account: if istrusted(chan, untrustnick):", "else: raise ArgsfmtError('Number of given arguments not possible for given format string') if", "cmdline = [line[3][1:]] + line[4:] while '' in cmdline: cmdline.remove('') # #chan: channel", "addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for %s'", "accountcheck for index in range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index] if ckid ==", "trustnick) else: irc.msg(reply, zwsp + 'Usage #trust nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline,", "elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times = int(die[0]) if die[0] else 1 if", "map((lambda (time, fn): fn), filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time,", "cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted() elif cmdline[0] == '/st': savetrusted() elif cmdline[0]", "+ 'Nothing here') elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}')", "is already reserved elif line[1] == 'INVITE' and line[2] == irc.nick and line[3][1:]", "given out = [] for type in argtypes: if type == ARG_STD or", "and identified with NickServ', '#deop': 'remove your/nick\\'s op rights', '#voice': 'give nick or", "for sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg)) f.close()", "')) elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o', args.split('", "the parameter as-is if not account: if istrusted(chan, untrustnick): account = untrustnick if", "== min: # Only standard arguments given out = [] for type in", "y), rolls) if times > 1: text = '%s (%s)' % (str(result), ',", "getargnums(parseargsfmt(args)) if max and len(line)-1 >= min and len(line)-1 <= max: return True", "% (random.randint(0,9), random.randint(0,9))) elif die < 1: irc.msg(reply, zwsp + 'This die is", "time.sleep(1) # Accuracy doesn't need to be high self.cronctrllock.acquire() for cmd in self.cronctrl:", "+ 1:]) die[1] = die[1][:split] elif '-' in die[1]: split = die[1].index('-') plus", "account)) f.close trustedlock.release() def init(): global cron cron = Cron() cron.start() loadtrusted() loadgods()", "(time, fn): fn), filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time, fn):", "accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick, None)) accountcheckid += 1 accountchecklock.release() return id", "global doctor, die_expr line = line.split(' ') nick = line[0].split('!')[0][1:] chan = line[2]", "argument') if len(filter((lambda type: type == ARG_OPT or type == ARG_UNL), argtypes)) >", "Unlimited argument given out = [] for type in argtypes: if type ==", "{reason}') if kicknick.lower() == irc.nick: irc.send('KICK %s %s :Fuck you' % (chan, nick))", "Playing it safe not to get truncated line += ', ' + account", "text = '%i (%s + %i)' % (result + plus, text, plus) elif", "if message: return 'Usage: %s %s' % (cmd, usage[cmd]) else: return usage[cmd] else:", "irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9))) elif die < 1: irc.msg(reply, zwsp", "trusted by oonbotti2', '#trust': 'add nick to trusted list', '#untrust': 'remove nick from", "account) f.close() godslock.release() def savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan", "nick in args: nicks.append(nick) if len(nicks) == 4: irc.send('MODE %s %s %s' %", "matched if the command cmd is used, matchcmd(line, cmd, args) checks whether the", "1: # Disallow more than one optional or unlimited argument per argument string", "line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s: #invite has been removed.", "'nick'): untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, untrustnick)", "in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg)) f.close() loadmessages() def addtrusted(chan, account):", "you are trusted by oonbotti2 and identified with NickServ', '#deop': 'remove your/nick\\'s op", "origin2, message2), ..., (senderN, origin2, messageN)] msgs = {} msgslock = threading.Lock() #", "nicks that are trusted. use only in a query', '#chan': 'Runs the command", "(chan, set_unset+mode*4, ' '.join(nicks))) nicks = [] if nicks: irc.send('MODE %s %s %s'", "[] doctor = eliza.eliza() # channel: [user1, user2, ..., userN] trusted = {}", "savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan in trusted: for account", "nick with specified reason', '#src': 'paste a link to oonbotti2\\'s git repo', '#msg':", "= False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn): (time-1, fn)), self.timedjobs)", "parse((line, irc)): global blacklist global msgs, msgslock global trusted, trustedlock, gods, godslock global", "== 'QUIT': run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn):", "sender, origin, msg in msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s> %s' % (origin,", "in range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index] if ckid == id: return index", "True elif not max and len(line)-1 >= min: return True else: return False", "to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global accountcheck for index in range(len(accountcheck)): ckid,", "False def parsecmd(line, args): # Returns a tuple containing the arguments. An optional", "removeaccountcheck(id) if account == '': # '' Signifies failure return None else: return", "+ 'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in trusted: lines", "global msgs, msgslock with msgslock: f=open('msgs.txt', 'w') for receiver in msgs: for sender,", "else: out.append('') elif not max and len(line) > min: # Unlimited argument given", "Cron(threading.Thread): def __init__(self): self.timedjobs = [] self.timedjobslock = threading.Lock() self.cronctrl = [] self.cronctrllock", "savemessages() else: irc.msg(reply, zwsp + 'Usage: #msg nick message') elif matchcmd(cmdline, '#trusted?'): if", "else: irc.msg(reply, zwsp + '%s is not trusted' % trustnick) else: irc.msg(reply, zwsp", "'#deop': 'remove your/nick\\'s op rights', '#voice': 'give nick or yourself voice in case", "global cron cron = Cron() cron.start() loadtrusted() loadgods() def chmode(irc, chan, nick, mode,", "irc.send('MODE %s %s %s' % (chan, set_unset+mode, nick)) else: nicks = [] for", "not account: if istrusted(chan, untrustnick): account = untrustnick if account: godslock.acquire() if chan", "with msgslock: if msgnick not in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message))", "arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q', [arg + '!*@*']) elif matchcmd(cmdline,", "or yourself op rights in case you are trusted by oonbotti2 and identified", "given') line = line[1:] min, max = getargnums(argtypes) if len(line) == min: #", "#deop #voice #devoice #quiet #dequiet #kick #src #msg #trusted? #trust #untrust #ls-trusted #chan", "else: return usage[cmd] else: return None def help(cmd): helptext = {'#echo': '#echo text", "'-v', args.split(' ')) elif matchcmd(cmdline, '#kick'): if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason", "% (receiver, sender, origin, msg)) f.close() loadmessages() def addtrusted(chan, account): global trusted, trustedlock", "True else: trustedlock.release() return False def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id", "account): cknick == nick ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids def", "'' Signifies failure return None else: return account def isauthorized(irc, chan, nick): account", "'%s is trusted' % trustnick) else: irc.msg(reply, zwsp + '%s is not trusted'", "'[command]'): command = parsecmd(cmdline, '[command]') helptext = help(command) if helptext: irc.msg(reply, zwsp+helptext) elif", "elif cmd=='me': return 'I shall.' elif cmd in helptext: if helptext[cmd]: return '%s", "return 'Error with argument format: ' + msg ARG_STD = 0 ARG_OPT =", "in [irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}') if len(question) < 2 or", "'.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if chan in trusted and account in trusted[chan]:", "zwsp + '%s: %s' % (chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp", "1:]) die[1] = die[1][:split] elif '-' in die[1]: split = die[1].index('-') plus =", "= die[1].index('-') plus = -int(die[1][split + 1:]) die[1] = die[1][:split] else: plus =", "chan in trusted and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted,", "mode = mode[1:] if isauthorized(irc, chan, nick): if args == ['']: irc.send('MODE %s", "here') elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}') if len(question)", "origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg)) f.close() loadmessages() def", "self.timedjobs) self.timedjobslock.release() for fn in torun: fn() def loadmessages(): global msgs, msgslock with", "if line == '': line = account elif len(line + ', ' +", "None: setaccountcheckvalue(id, '') # Mark as failed, '' is used because None is", "if not args: return True min, max = getargnums(parseargsfmt(args)) if max and len(line)-1", "op rights in case you are trusted by oonbotti2 and identified with NickServ',", "msgslock with msgslock: f=open('msgs.txt', 'w') for receiver in msgs: for sender, origin, msg", "die[1][:split] else: plus = 0 die = '%' if die[1] == '%' else", "Unlimited (0-) number of arguments: {baz} out.append(ARG_UNL) else: # Normal argument: foo out.append(ARG_STD)", "user2, ..., userN] trusted = {} trustedlock = threading.Lock() gods = {} godslock", "die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs = [] self.timedjobslock = threading.Lock() self.cronctrl =", "= line[2] if line[2][0] == '#' else nick zwsp = '\\xe2\\x80\\x8b' if nick", "% (chan, set_unset+mode*4, ' '.join(nicks))) nicks = [] if nicks: irc.send('MODE %s %s", "if account: godslock.acquire() if chan not in gods or account not in gods[chan]:", "elif line[1] == '482': irc.msg(line[3], zwsp + 'Not op') msgs_changed = False with", "line = account if line != '': lines.append(line) for line in lines: irc.msg(nick,", "''))) account = None while account == None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id)", "trusted by oonbotti2 and identified with NickServ', '#quiet': 'give +q to nick!*@*', '#dequiet':", "nick, '+q', [arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc,", "== None: setaccountcheckvalue(id, '') # Mark as failed, '' is used because None", "= {} msgslock = threading.Lock() # (ID, nick, account) accountcheck = [] accountcheckid", "not in gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def savetrusted():", "msgnick, message = parsecmd(cmdline, 'nick {message}') if chan == nick: # In a", "line[3] for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') # Mark", "I borrow yours?') else: rolls = [random.randint(1, die) for i in xrange(times)] result", "account def isauthorized(irc, chan, nick): account = getaccount(irc, nick) if account: return istrusted(chan,", "type == ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] else: out.append('')", "origin, msg)) f.close() def savemessages(): global msgs, msgslock with msgslock: f=open('msgs.txt', 'w') for", "if len(nicks) == 4: irc.send('MODE %s %s %s' % (chan, set_unset+mode*4, ' '.join(nicks)))", "self.timedjobs = filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release() for fn in torun: fn()", "= mode[0] mode = mode[1:] if isauthorized(irc, chan, nick): if args == ['']:", "one optional or unlimited argument per argument string raise ArgsfmtError('Ambiguous argument format') #", "to get account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage: #trusted?", "not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if", "' + account else: lines.append(line) line = account if line != '': lines.append(line)", "+ plus, text, plus) elif plus < 0: text = '%i (%s -", "for argtype in argtypes: if argtype == ARG_STD: min += 1 if max", "isauthorized(irc, chan, nick): if args == ['']: irc.send('MODE %s %s %s' % (chan,", "number of arguments: {baz} out.append(ARG_UNL) else: # Normal argument: foo out.append(ARG_STD) return out", "commands'} if cmd=='': return '#echo #op #deop #voice #devoice #quiet #dequiet #kick #src", "and line[3][1:] in irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' + line[3])", "ARG_UNL = 2 def parseargsfmt(args): # parses the argument format used by matchcmd", "if it was sent on the specified channel. Requires user to be trusted',", "threading.Lock() # receiver: [(sender1, origin1, message1), (sender2, origin2, message2), ..., (senderN, origin2, messageN)]", "NickServ', '#devoice': 'remove your or nick\\'s voice in case you are trusted by", "chan == '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg',", "git repo', '#msg': 'send a message to nick', '#trusted?': 'tell you if nick", "global gods, godslock godslock.acquire() gods = {} f=open('gods.txt', 'r') for line in f:", "elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v', args.split(' '))", "argument format') # Remove the command if len(line) == 0: raise ArgsfmtError('No command", "getaccount(irc, nick) if account: return istrusted(chan, account) else: irc.msg(nick, zwsp + 'Identify with", "in case you are trusted by oonbotti2 and identified with NickServ', '#deop': 'remove", "are trusted. use only in a query', '#chan': 'Runs the command as if", "% nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline, '[command]')", "'#msg': 'send a message to nick', '#trusted?': 'tell you if nick or yourself", "trustnick) else: irc.msg(reply, zwsp + 'Failed to get account for %s' % trustnick)", "nick: # In a query: origin = \"[query]\" else: # In a channel", "% trustnick) else: irc.msg(reply, zwsp + '%s is not trusted' % trustnick) else:", "trustnick == '': trustnick = nick account = getaccount(irc, trustnick) if account: if", "many. Can I borrow yours?') else: rolls = [random.randint(1, die) for i in", "for fn in torun: fn() def loadmessages(): global msgs, msgslock with msgslock: msgs", "'This die is not available in your space-time region.') elif times < 1:", "'#dequiet': 'nick', '#kick': 'nick [reason]', '#src': '', '#msg': 'nick message', '#trusted?': '[nick]', '#trust':", "internally # The index returned cannot be guaranteed valid if lock is released", "with NickServ', '#devoice': 'remove your or nick\\'s voice in case you are trusted", "+ 'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick =", "= -int(die[1][split + 1:]) die[1] = die[1][:split] else: plus = 0 die =", "else 1 if '+' in die[1]: split = die[1].index('+') plus = int(die[1][split +", "and arg[0] == '[' and arg[-1] == ']': # Optional (0-1) argument: [bar]", "if isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' + line[3]) elif line[1] == '482': irc.msg(line[3],", "f=open('trusted.txt', 'r') for line in f: while len(line) > 0 and line[-1] ==", "if len(line.split('\\t')) == 4: receiver, sender, origin, msg = line.split('\\t') if receiver not", "if matchcmd(cmdline, '#kick', 'nick {reason}'): kicknick, kickreason = parsecmd(cmdline, 'nick {reason}') if kicknick.lower()", "ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type: type == ARG_OPT or type == ARG_UNL),", "time concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm'] blacklist = [] doctor = eliza.eliza()", "== '482': irc.msg(line[3], zwsp + 'Not op') msgs_changed = False with msgslock: if", "id filterbynick = lambda (id, cknick, account): cknick == nick ids = map(getid,", "line in lines: irc.msg(nick, zwsp + '%s: %s' % (chan, line)) trustedlock.release() elif", "msgs, msgslock global trusted, trustedlock, gods, godslock global doctor, die_expr line = line.split('", "'{question}') if len(question) < 2 or question[:2] != ':D': # Mandated by #osdev-offtopic", "global accountcheck for index in range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index] if ckid", "!= 1: irc.msg(reply, zwsp + 'Not supported') else: irc.msg(reply, zwsp + '%s%s' %", "'nick'): trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, trustnick)", "nick', '#trusted?': 'tell you if nick or yourself is trusted by oonbotti2', '#trust':", "'#voice': 'give nick or yourself voice in case you are trusted by oonbotti2", "'%' else int(die[1]) if die == '%': if times != 1: irc.msg(reply, zwsp", "= line.split() addtrusted(chan, account) f.close() def loadgods(): global gods, godslock godslock.acquire() gods =", "in blacklist: return elif len(line) >= 4 and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1]", "line = line[:-1] if len(line) > 0: chan, account = line.split() if chan", "and ARG_UNL in argtypes[:-1]: # Disallow non-final unlimited arguments raise ArgsfmtError('Non-final unlimited argument')", "1 if max != None: # Don't try to increment if max is", "chan, nick, '+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc,", "line[3][1:] in irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' + line[3]) elif", "unlimited for argtype in argtypes: if argtype == ARG_STD: min += 1 if", "while len(line) > 0 and line[-1] == '\\n': line = line[:-1] if len(line)", "args.split(' ')) elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o',", "cmdline[0] == '/lm': loadmessages() elif cmdline[0] == '/sm': savemessages() def usage(cmd, message =", "= parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg", "== ARG_OPT: if max != None: # Don't try to increment if max", "y: x + y), rolls) if times > 1: text = '%s (%s)'", "trustedlock trustedlock.acquire() if chan in trusted and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def", "out.append('') elif max and len(line) == max: # Optional argument given out =", "else: irc.msg(reply, zwsp + 'Usage #trust nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust',", "index is not None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire()", "been removed. Use manual invite' % nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help',", "die = cmdline[0][1:].split('d') times = int(die[0]) if die[0] else 1 if '+' in", "line[1] == '482': irc.msg(line[3], zwsp + 'Not op') msgs_changed = False with msgslock:", "1: irc.msg(reply, zwsp + 'What exactly do you want me to do?') elif", "rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan in trusted and account in", "in trusted: for account in trusted[chan]: f.write('%s %s\\n' % (chan, account)) f.close trustedlock.release()", "'/lt', '/st', '/lg', '/lm', '/sm'] blacklist = [] doctor = eliza.eliza() # channel:", "trustnick = parsecmd(cmdline, '[nick]') if trustnick == '': trustnick = nick account =", "die_expr line = line.split(' ') nick = line[0].split('!')[0][1:] chan = line[2] if line[2][0]", "mode[0] mode = mode[1:] if isauthorized(irc, chan, nick): if args == ['']: irc.send('MODE", "# In a channel origin = chan with msgslock: if msgnick not in", "can't be found (e.g. it has been deleted, use the parameter as-is if", "#chan #help' elif cmd=='me': return 'I shall.' elif cmd in helptext: if helptext[cmd]:", "if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message = parsecmd(cmdline, 'nick {message}') if chan", "type == ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] elif type", "plus, text, -plus) irc.msg(reply, zwsp + text) elif line[1] == '330': # WHOIS:", "if len(question) < 2 or question[:2] != ':D': # Mandated by #osdev-offtopic law", "format string') if len(out) == 1: return out[0] else: return out def parse((line,", "#untrust #ls-trusted #chan #help' elif cmd=='me': return 'I shall.' elif cmd in helptext:", "account = line.split() addtrusted(chan, account) f.close() def loadgods(): global gods, godslock godslock.acquire() gods", "the argument format used by matchcmd and parsecmd # e.g. parseargsfmt(\"foo [bar] {baz}", "die[1].index('-') plus = -int(die[1][split + 1:]) die[1] = die[1][:split] else: plus = 0", "fn): fn), filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time, fn): time>0),", "ARG_OPT, ARG_UNL] args = args.split(' ') out = [] for arg in args:", "'Error with argument format: ' + msg ARG_STD = 0 ARG_OPT = 1", ">= 2 and arg[0] == '{' and arg[-1] == '}': # Unlimited (0-)", "line[2] if line[2][0] == '#' else nick zwsp = '\\xe2\\x80\\x8b' if nick in", "accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is", "chan == '#esoteric': irc.msg(reply, zwsp + 'Nothing here') elif cmdline[0] in [irc.nick, irc.nick+',',", "if number of arguments is unlimited for argtype in argtypes: if argtype ==", "return True min, max = getargnums(parseargsfmt(args)) if max and len(line)-1 >= min and", "+ 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix')", "reserved elif line[1] == 'INVITE' and line[2] == irc.nick and line[3][1:] in irc.chan.split('", "accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: ckid, nick, ckvalue", "used, matchcmd(line, cmd, args) checks whether the args match too if len(line) ==", "out.append('') elif not max and len(line) > min: # Unlimited argument given out", "repo', '#msg': 'send a message to nick', '#trusted?': 'tell you if nick or", "if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args", "[random.randint(1, die) for i in xrange(times)] result = reduce((lambda x, y: x +", "'nick') chmode(irc, chan, nick, '+q', [arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg =", "chmode(irc, chan, nick, '+q', [arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline,", "= filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release() for fn in torun: fn() def", "+ 'This die is not available in your space-time region.') elif times <", "'{args}') chmode(irc, chan, nick, '+o', args.split(' ')) elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline,", "it safe not to get truncated line += ', ' + account else:", "elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o', args.split(' '))", "= untrustnick if account: godslock.acquire() if chan not in gods or account not", "'#deop'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o', args.split(' ')) elif matchcmd(cmdline,", "global trusted, trustedlock, gods, godslock global doctor, die_expr line = line.split(' ') nick", "None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid = lambda", "chmode(irc, chan, nick, '+o', args.split(' ')) elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}')", "usage if matchcmd(cmdline, '#chan') and chan != nick: if matchcmd(cmdline, '#chan', 'channel {command}'):", "irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account = None while", "len(line) == 0: return False if line[0] != cmd: return False if not", "min, max = getargnums(argtypes) if len(line) == min: # Only standard arguments given", "== '#' else nick zwsp = '\\xe2\\x80\\x8b' if nick in blacklist: return elif", "irc)): global blacklist global msgs, msgslock global trusted, trustedlock, gods, godslock global doctor,", "'#voice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v', args.split(' ')) elif matchcmd(cmdline,", "irc.nick+',', irc.nick+':']: question = parsecmd(cmdline, '{question}') if len(question) < 2 or question[:2] !=", "index in range(len(accountcheck)): ckid, cknick, ckaccount = accountcheck[index] if ckid == id: return", "cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account = None while account == None: account", "'/sm': savemessages() def usage(cmd, message = True): usage = {'#echo': 'text', '#op': '[nick]',", "if matchcmd(cmdline, '#chan') and chan != nick: if matchcmd(cmdline, '#chan', 'channel {command}'): newchan,", "msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s> %s' % (origin, sender, msg)) msgs_changed =", "getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') # Mark as failed, '' is used because", "out = [] for arg in args: if len(arg) >= 2 and arg[0]", "' '.join(nicks))) nicks = [] if nicks: irc.send('MODE %s %s %s' % (chan,", "= parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o', args.split(' ')) elif matchcmd(cmdline, '#voice'): args", "None is already reserved elif line[1] == 'INVITE' and line[2] == irc.nick and", "index is not None: ckid, nick, ckvalue = accountcheck[index] accountcheck[index] = (id, nick,", "def getargnums(argtypes): min = 0 max = 0 # max = None if", "irc.msg(chan, zwsp + 'Usage #chan channel command') if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline,", "times < 1: irc.msg(reply, zwsp + 'What exactly do you want me to", "a tuple containing the arguments. An optional argument that didn't get a value", "in self.cronctrl: if cmd == 'QUIT': run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs", "== id: return index return None def setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire()", "in trusted[chan]: if line == '': line = account elif len(line + ',", "zwsp + 'Not supported') else: irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9))) elif", "line begins with ZWSP return if line[1]=='PRIVMSG' and line[3][:2] != ': ': reply", "'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in trusted: lines =", "zwsp + 'Nothing here') elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question = parsecmd(cmdline,", "out.append(ARG_UNL) else: # Normal argument: foo out.append(ARG_STD) return out def getargnums(argtypes): min =", "= parseargsfmt(args) if len(argtypes) >= 1 and ARG_UNL in argtypes[:-1]: # Disallow non-final", "if index is not None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock", "identified with NickServ', '#quiet': 'give +q to nick!*@*', '#dequiet': 'remove +q from nick!*@*',", "args == ['']: irc.send('MODE %s %s %s' % (chan, set_unset+mode, nick)) else: nicks", "untrustnick) else: irc.msg(reply, zwsp + 'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if", "irc.msg(reply, zwsp + 'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in", "else nick zwsp = '\\xe2\\x80\\x8b' if nick in blacklist: return elif len(line) >=", "== 'INVITE' and line[2] == irc.nick and line[3][1:] in irc.chan.split(' '): if isauthorized(irc,", "threading.Lock() # (ID, nick, account) accountcheck = [] accountcheckid = 0 accountchecklock =", "= getaccount(irc, untrustnick) # If account can't be found (e.g. it has been", "':D': # Mandated by #osdev-offtopic law irc.msg(reply, zwsp + '%s: %s' % (nick,", "'w') for receiver in msgs: for sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' %", "len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: # If line begins with ZWSP return if", "parsecmd(cmdline, 'nick {reason}') if kicknick.lower() == irc.nick: irc.send('KICK %s %s :Fuck you' %", "getaccount(irc, untrustnick) # If account can't be found (e.g. it has been deleted,", "def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd)", "matchcmd(cmdline, '#help', '[command]'): command = parsecmd(cmdline, '[command]') helptext = help(command) if helptext: irc.msg(reply,", "'%s: %s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times = int(die[0])", "zwsp + 'Identify with NickServ') class ArgsfmtError(Exception): def __init__(self, msg): self.msg = msg", "', '.join([str(i) for i in rolls])) else: text = str(result) if plus >", "'#echo #op #deop #voice #devoice #quiet #dequiet #kick #src #msg #trusted? #trust #untrust", "= [] self.timedjobslock = threading.Lock() self.cronctrl = [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def", "== '318': # WHOIS: End of /WHOIS list. whoisnick = line[3] for id", "len(nicks) == 4: irc.send('MODE %s %s %s' % (chan, set_unset+mode*4, ' '.join(nicks))) nicks", "== ARG_UNL), argtypes)) > 1: # Disallow more than one optional or unlimited", "out.append(line[0]) line = line[1:] else: out.append('') elif max and len(line) == max: #", "usage[cmd] else: return None def help(cmd): helptext = {'#echo': '#echo text back', '#op':", "% (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times = int(die[0]) if die[0]", "# Warning: this does no locking, should only be used internally # The", "def __init__(self): self.timedjobs = [] self.timedjobslock = threading.Lock() self.cronctrl = [] self.cronctrllock =", "if matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account", "account in trusted[chan]: if line == '': line = account elif len(line +", "+ '!*@*']) elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v',", "def __str__(self): return 'Error with argument format: ' + msg ARG_STD = 0", "in argtypes: if type == ARG_STD or type == ARG_OPT: out.append(line[0]) line =", "', ' + account) <= 255: # Playing it safe not to get", "!= nick: if matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline = parsecmd(cmdline, 'channel {command}')", "== 4: receiver, sender, origin, msg = line.split('\\t') if receiver not in msgs:", "parsecmd(cmdline, 'nick {message}') if chan == nick: # In a query: origin =", "split = die[1].index('-') plus = -int(die[1][split + 1:]) die[1] = die[1][:split] else: plus", "elif times > 128: irc.msg(reply, zwsp + 'Sorry, I don\\'t have that many.", "plus < 0: text = '%i (%s - %i)' % (result + plus,", "nick)) else: nicks = [] for nick in args: nicks.append(nick) if len(nicks) ==", "filter(filterbynick, accountcheck)) accountchecklock.release() return ids def getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS '", "and account in trusted[chan]: trustedlock.release() return True else: trustedlock.release() return False def initaccountcheck(nick):", "out = [] for type in argtypes: if type == ARG_STD or type", "increment if max is unlimited max += 1 elif argtype == ARG_OPT: if", "elif len(line + ', ' + account) <= 255: # Playing it safe", "account == '': # '' Signifies failure return None else: return account def", "list', '#ls-trusted': 'list nicks that are trusted. use only in a query', '#chan':", "string raise ArgsfmtError('Ambiguous argument format') # Remove the command if len(line) == 0:", "list. whoisnick = line[3] for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id,", "'#src': 'paste a link to oonbotti2\\'s git repo', '#msg': 'send a message to", "or question[:2] != ':D': # Mandated by #osdev-offtopic law irc.msg(reply, zwsp + '%s:", "= str(result) if plus > 0: text = '%i (%s + %i)' %", "elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan ==", "or type == ARG_OPT: out.append(line[0]) line = line[1:] elif type == ARG_UNL: out.append('", "len(out) == 1: return out[0] else: return out def parse((line, irc)): global blacklist", "line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] == '318': # WHOIS:", "= parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-v', args.split(' ')) elif matchcmd(cmdline, '#kick'): if", "map((lambda (time, fn): (time-1, fn)), self.timedjobs) torun = map((lambda (time, fn): fn), filter((lambda", "account) <= 255: # Playing it safe not to get truncated line +=", "in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') # Mark as failed, ''", "time.sleep(0.1) removeaccountcheck(id) if account == '': # '' Signifies failure return None else:", "usage(cmd, False), helptext[cmd]) else: return '%s %s' % (cmd, usage(cmd, False)) else: return", "while account == None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account == '':", "args.split(' ')) elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v',", "userN] trusted = {} trustedlock = threading.Lock() gods = {} godslock = threading.Lock()", "by oonbotti2 and identified with NickServ', '#devoice': 'remove your or nick\\'s voice in", "# Disallow non-final unlimited arguments raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type: type", "'{args}') chmode(irc, chan, nick, '-o', args.split(' ')) elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline,", "line[:-1] if len(line.split('\\t')) == 4: receiver, sender, origin, msg = line.split('\\t') if receiver", "channel. Requires user to be trusted', '#help': 'give short info of command or", "index return None def setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id)", ":Fuck you' % (chan, nick)) else: if isauthorized(irc, chan, nick): irc.send('KICK %s %s", "cmd == 'QUIT': run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time,", "#kick nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix')", "nick, account) accountcheck = [] accountcheckid = 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class", "< 1: irc.msg(reply, zwsp + 'What exactly do you want me to do?')", "getindexbyaccountcheckid(id) if index is not None: ckid, cknick, value = accountcheck[index] accountchecklock.release() return", "if chan not in gods or account not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release()", "msgs, msgslock with msgslock: msgs = {} f = open('msgs.txt', 'r') for line", "f.write('%s %s\\n' % (chan, account)) f.close trustedlock.release() def init(): global cron cron =", "# #chan: channel override prefix # Don't allow this in private messages for", "account elif len(line + ', ' + account) <= 255: # Playing it", "failed, '' is used because None is already reserved elif line[1] == 'INVITE'", "self.timedjobs = [] self.timedjobslock = threading.Lock() self.cronctrl = [] self.cronctrllock = threading.Lock() threading.Thread.__init__(self)", "'#msg', 'nick {message}'): msgnick, message = parsecmd(cmdline, 'nick {message}') if chan == nick:", "min, max def matchcmd(line, cmd, args=None): # matchcmd(line, cmd) matched if the command", "threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd):", "%i)' % (result + plus, text, -plus) irc.msg(reply, zwsp + text) elif line[1]", "been deleted, use the parameter as-is if not account: if istrusted(chan, untrustnick): account", "Normal argument: foo out.append(ARG_STD) return out def getargnums(argtypes): min = 0 max =", "= line[3] for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') #", "Disallow non-final unlimited arguments raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type: type ==", "to nick!*@*', '#dequiet': 'remove +q from nick!*@*', '#kick': 'kicks nick with specified reason',", "== '\\n': line = line[:-1] if len(line) > 0: chan, account = line.split()", "'#help': 'give short info of command or list commands'} if cmd=='': return '#echo", "account can't be found (e.g. it has been deleted, use the parameter as-is", "or account not in gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp +", "cmd in self.cronctrl: if cmd == 'QUIT': run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire()", "return out def parse((line, irc)): global blacklist global msgs, msgslock global trusted, trustedlock,", "[] accountcheckid = 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs", "elif len(line) >= 4 and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: #", "[] self.cronctrllock = threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release()", "msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg)) f.close() loadmessages() def addtrusted(chan,", "to increment if max is unlimited max += 1 elif argtype == ARG_UNL:", "istrusted(chan, account): irc.msg(reply, zwsp + '%s is trusted' % trustnick) else: irc.msg(reply, zwsp", "'\\xe2\\x80\\x8b' if nick in blacklist: return elif len(line) >= 4 and len(line[3]) >=", "to get truncated line += ', ' + account else: lines.append(line) line =", "= help(command) if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan == '#esoteric':", "godslock.acquire() if chan not in gods or account not in gods[chan]: rmtrusted(chan, untrustnick)", "%s' % (chan, set_unset+mode, nick)) else: nicks = [] for nick in args:", "parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, untrustnick) # If account", "is not None: ckid, cknick, value = accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id):", "# Only standard arguments given out = [] for type in argtypes: if", "identified with NickServ', '#devoice': 'remove your or nick\\'s voice in case you are", "msgs: for sender, origin, msg in msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s> %s'", "irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' + line[3]) elif line[1] ==", "= line.split(' ') nick = line[0].split('!')[0][1:] chan = line[2] if line[2][0] == '#'", "min: # Unlimited argument given out = [] for type in argtypes: if", ">= 2 and arg[0] == '[' and arg[-1] == ']': # Optional (0-1)", "untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, untrustnick) #", "%s %s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if chan", "'#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick):", "matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v', args.split(' ')) elif", "irc.msg(reply, zwsp + 'Usage: #msg nick message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?',", "import threading import random import re import time concmd=['/q', '/lt', '/st', '/lg', '/lm',", "def usage(cmd, message = True): usage = {'#echo': 'text', '#op': '[nick]', '#deop': '[nick]',", "accountchecklock accountchecklock.acquire() getid = lambda (id, nick, account): id filterbynick = lambda (id,", "kicknick.lower() == irc.nick: irc.send('KICK %s %s :Fuck you' % (chan, nick)) else: if", "Disallow more than one optional or unlimited argument per argument string raise ArgsfmtError('Ambiguous", "msgslock: f=open('msgs.txt', 'w') for receiver in msgs: for sender, origin, msg in msgs[receiver]:", "None: ckid, cknick, value = accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id): global accountcheck,", "True): usage = {'#echo': 'text', '#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]',", "= True if msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT') elif", "def istrusted(chan, account): trustedlock.acquire() if chan in trusted and account in trusted[chan]: trustedlock.release()", "'#chan') and chan != nick: if matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline =", "max: return True elif not max and len(line)-1 >= min: return True else:", "'nick') chmode(irc, chan, nick, '-q', [arg + '!*@*']) elif matchcmd(cmdline, '#devoice'): args =", "'[command]'} if cmd in usage: if message: return 'Usage: %s %s' % (cmd,", "getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid = lambda (id, nick, account): id filterbynick", "you want me to do?') elif times > 128: irc.msg(reply, zwsp + 'Sorry,", "getindexbyaccountcheckid(id) if index is not None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck,", "= 0 ARG_OPT = 1 ARG_UNL = 2 def parseargsfmt(args): # parses the", "line = account elif len(line + ', ' + account) <= 255: #", "return account def isauthorized(irc, chan, nick): account = getaccount(irc, nick) if account: return", "args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o', args.split(' ')) elif matchcmd(cmdline, '#deop'):", "= 0 die = '%' if die[1] == '%' else int(die[1]) if die", "plus = 0 die = '%' if die[1] == '%' else int(die[1]) if", "returned cannot be guaranteed valid if lock is released between call to getindexbyaccountcheckid", "len(line)-1 >= min: return True else: return False def parsecmd(line, args): # Returns", "'.join(line)) line = [] else: raise ArgsfmtError('Number of given arguments not possible for", "match too if len(line) == 0: return False if line[0] != cmd: return", "1 and ARG_UNL in argtypes[:-1]: # Disallow non-final unlimited arguments raise ArgsfmtError('Non-final unlimited", "msg ARG_STD = 0 ARG_OPT = 1 ARG_UNL = 2 def parseargsfmt(args): #", "times != 1: irc.msg(reply, zwsp + 'Not supported') else: irc.msg(reply, zwsp + '%s%s'", "None def setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index", "msg)) f.close() def savemessages(): global msgs, msgslock with msgslock: f=open('msgs.txt', 'w') for receiver", "'#msg': 'nick message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel", "split = die[1].index('+') plus = int(die[1][split + 1:]) die[1] = die[1][:split] elif '-'", "return None else: return account def isauthorized(irc, chan, nick): account = getaccount(irc, nick)", "zwsp + 'Usage: #msg nick message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'):", "else: if isauthorized(irc, chan, nick): irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply,", "or unlimited argument per argument string raise ArgsfmtError('Ambiguous argument format') # Remove the", "accountcheck = [] accountcheckid = 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def", "run: time.sleep(1) # Accuracy doesn't need to be high self.cronctrllock.acquire() for cmd in", "arg[0] == '[' and arg[-1] == ']': # Optional (0-1) argument: [bar] out.append(ARG_OPT)", "or line[1] == 'JOIN') and nick in msgs: for sender, origin, msg in", "return if line[1]=='PRIVMSG' and line[3][:2] != ': ': reply = chan cmdline =", "getargnums(argtypes): min = 0 max = 0 # max = None if number", "are trusted by oonbotti2 and identified with NickServ', '#quiet': 'give +q to nick!*@*',", "per argument string raise ArgsfmtError('Ambiguous argument format') # Remove the command if len(line)", "cknick, value = accountcheck[index] accountchecklock.release() return value def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire()", "= parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, untrustnick) # If", "+ 1:]) die[1] = die[1][:split] else: plus = 0 die = '%' if", "identified with NickServ', '#deop': 'remove your/nick\\'s op rights', '#voice': 'give nick or yourself", "<%s> %s' % (origin, sender, msg)) msgs_changed = True if msgs_changed: savemessages() def", "#osdev-offtopic law irc.msg(reply, zwsp + '%s: %s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die", "account): trustedlock.acquire() if chan in trusted and account in trusted[chan]: trustedlock.release() return True", "msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply, zwsp + 'Usage: #msg", "value will be assigned '' argtypes = parseargsfmt(args) if len(argtypes) >= 1 and", "you if nick or yourself is trusted by oonbotti2', '#trust': 'add nick to", ">= min: return True else: return False def parsecmd(line, args): # Returns a", "f.close trustedlock.release() def init(): global cron cron = Cron() cron.start() loadtrusted() loadgods() def", "(random.randint(0,9), random.randint(0,9))) elif die < 1: irc.msg(reply, zwsp + 'This die is not", "'send a message to nick', '#trusted?': 'tell you if nick or yourself is", "on the specified channel. Requires user to be trusted', '#help': 'give short info", "len(line) == min: # Only standard arguments given out = [] for type", "chan, nick): if args == ['']: irc.send('MODE %s %s %s' % (chan, set_unset+mode,", "nick') elif matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in trusted: lines = [] line", "'+q', [arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan,", "+ 'Usage #chan channel command') if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply,", "in die[1]: split = die[1].index('-') plus = -int(die[1][split + 1:]) die[1] = die[1][:split]", "'\\n': line = line[:-1] if len(line) > 0: chan, account = line.split() if", "sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg)) f.close() loadmessages()", "given arguments not possible for given format string') if len(out) == 1: return", "trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s: #invite has been removed. Use", "#invite has been removed. Use manual invite' % nick) elif matchcmd(cmdline, '#help'): if", "== '[' and arg[-1] == ']': # Optional (0-1) argument: [bar] out.append(ARG_OPT) elif", "if kicknick.lower() == irc.nick: irc.send('KICK %s %s :Fuck you' % (chan, nick)) else:", "f: while len(line) > 0 and line[-1] == '\\n': line = line[:-1] if", "if istrusted(chan, account): irc.msg(reply, zwsp + '%s is trusted' % trustnick) else: irc.msg(reply,", "= accountcheck[index] if ckid == id: return index return None def setaccountcheckvalue(id, value):", "(time, fn): time>0), self.timedjobs) self.timedjobslock.release() for fn in torun: fn() def loadmessages(): global", "in argtypes: if type == ARG_STD: out.append(line[0]) line = line[1:] else: out.append('') elif", "account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage: #trusted? [nick]') elif", "'nick {message}'): msgnick, message = parsecmd(cmdline, 'nick {message}') if chan == nick: #", "deleted, use the parameter as-is if not account: if istrusted(chan, untrustnick): account =", "[] line = '' for account in trusted[chan]: if line == '': line", "= lambda (id, cknick, account): cknick == nick ids = map(getid, filter(filterbynick, accountcheck))", "in argtypes: if argtype == ARG_STD: min += 1 if max != None:", "setaccountcheckvalue(id, '') # Mark as failed, '' is used because None is already", "specified channel. Requires user to be trusted', '#help': 'give short info of command", "self.timedjobslock.release() for fn in torun: fn() def loadmessages(): global msgs, msgslock with msgslock:", "chan == nick: # In a query: origin = \"[query]\" else: # In", "kicknick, kickreason)) else: irc.msg(reply, zwsp + 'Usage #kick nick reason') elif matchcmd(cmdline, '#src'):", "line = line[1:] else: out.append('') elif max and len(line) == max: # Optional", "if isauthorized(irc, chan, nick): account = getaccount(irc, untrustnick) # If account can't be", "fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release() for fn", "+ line[4:] while '' in cmdline: cmdline.remove('') # #chan: channel override prefix #", "msgslock = threading.Lock() # (ID, nick, account) accountcheck = [] accountcheckid = 0", "elif line[1] == '318': # WHOIS: End of /WHOIS list. whoisnick = line[3]", "threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs = [] self.timedjobslock = threading.Lock() self.cronctrl", "if chan not in trusted: trusted[chan] = [] if account not in trusted[chan]:", "nick, value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if", "for i in xrange(times)] result = reduce((lambda x, y: x + y), rolls)", "parsecmd(cmdline, '[nick]') if trustnick == '': trustnick = nick account = getaccount(irc, trustnick)", "trusted: trusted[chan] = [] if account not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan,", "not max and len(line)-1 >= min: return True else: return False def parsecmd(line,", "'#trust': 'add nick to trusted list', '#untrust': 'remove nick from trusted list', '#ls-trusted':", "not args: return True min, max = getargnums(parseargsfmt(args)) if max and len(line)-1 >=", "godslock.acquire() gods = {} f=open('gods.txt', 'r') for line in f: while len(line) >", "def getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda :", "out = [] for type in argtypes: if type == ARG_STD: out.append(line[0]) line", "len(line) == max: # Optional argument given out = [] for type in", "[] else: raise ArgsfmtError('Number of given arguments not possible for given format string')", "'[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick [reason]', '#src':", "len(question) < 2 or question[:2] != ':D': # Mandated by #osdev-offtopic law irc.msg(reply,", "line = '' for account in trusted[chan]: if line == '': line =", "msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin, msg)) f.close() loadmessages() def addtrusted(chan, account): global", "len(arg) >= 2 and arg[0] == '[' and arg[-1] == ']': # Optional", "= chan with msgslock: if msgnick not in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick,", "line[3] account = line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] ==", "is trusted by oonbotti2', '#trust': 'add nick to trusted list', '#untrust': 'remove nick", "max is unlimited max += 1 elif argtype == ARG_UNL: max = None", "space-time region.') elif times < 1: irc.msg(reply, zwsp + 'What exactly do you", "global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick, None)) accountcheckid +=", "(lambda : setaccountcheckvalue(id, ''))) account = None while account == None: account =", "== '%' else int(die[1]) if die == '%': if times != 1: irc.msg(reply,", "-int(die[1][split + 1:]) die[1] = die[1][:split] else: plus = 0 die = '%'", "message2), ..., (senderN, origin2, messageN)] msgs = {} msgslock = threading.Lock() # (ID,", "godslock.release() def savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan in trusted:", "1 ARG_UNL = 2 def parseargsfmt(args): # parses the argument format used by", "irc.msg(reply, zwsp + 'Failed to get account for %s' % untrustnick) else: irc.msg(reply,", "message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel command', '#help':", "== 0: raise ArgsfmtError('No command given') line = line[1:] min, max = getargnums(argtypes)", "max = getargnums(argtypes) if len(line) == min: # Only standard arguments given out", "# Don't allow this in private messages for more transparent bot usage if", "in xrange(times)] result = reduce((lambda x, y: x + y), rolls) if times", "reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan", ">= 4 and len(line[3]) >= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: # If line", "% (str(result), ', '.join([str(i) for i in rolls])) else: text = str(result) if", "> 0 and line[-1] == '\\n': line = line[:-1] if len(line) > 0:", "'#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick [reason]', '#src': '', '#msg': 'nick message', '#trusted?':", "f.close() def loadgods(): global gods, godslock godslock.acquire() gods = {} f=open('gods.txt', 'r') for", "(origin, sender, msg)) msgs_changed = True if msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0]", "or list commands'} if cmd=='': return '#echo #op #deop #voice #devoice #quiet #dequiet", "trustedlock.acquire() if chan in trusted and account in trusted[chan]: trustedlock.release() return True else:", "return None def help(cmd): helptext = {'#echo': '#echo text back', '#op': 'give nick", "# receiver: [(sender1, origin1, message1), (sender2, origin2, message2), ..., (senderN, origin2, messageN)] msgs", "# Remove the command if len(line) == 0: raise ArgsfmtError('No command given') line", "rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for", "by oonbotti2 and identified with NickServ', '#quiet': 'give +q to nick!*@*', '#dequiet': 'remove", "trustnick) if account: if istrusted(chan, account): irc.msg(reply, zwsp + '%s is trusted' %", "nick, '-q', [arg + '!*@*']) elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}') chmode(irc,", "(e.g. it has been deleted, use the parameter as-is if not account: if", "newchan, newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split(' ') if isauthorized(irc, newchan,", "'Sorry, I don\\'t have that many. Can I borrow yours?') else: rolls =", "foo out.append(ARG_STD) return out def getargnums(argtypes): min = 0 max = 0 #", "that are trusted. use only in a query', '#chan': 'Runs the command as", "savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for %s' % untrustnick)", "zwsp + 'Sorry, I don\\'t have that many. Can I borrow yours?') else:", "irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp + 'Usage #kick nick", "addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan not in trusted: trusted[chan] =", "line[1]=='PRIVMSG' and line[3][:2] != ': ': reply = chan cmdline = [line[3][1:]] +", "% (origin, sender, msg)) msgs_changed = True if msgs_changed: savemessages() def execcmd(cmdline): if", "+ 'Identify with NickServ') class ArgsfmtError(Exception): def __init__(self, msg): self.msg = msg def", "% (cmd, usage[cmd]) else: return usage[cmd] else: return None def help(cmd): helptext =", "matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'):", "trusted = {} trustedlock = threading.Lock() gods = {} godslock = threading.Lock() #", "{message}'): msgnick, message = parsecmd(cmdline, 'nick {message}') if chan == nick: # In", "Mandated by #osdev-offtopic law irc.msg(reply, zwsp + '%s: %s' % (nick, doctor.respond(question))) elif", "if argtype == ARG_STD: min += 1 if max != None: # Don't", "0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self): self.timedjobs = [] self.timedjobslock", "'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message = parsecmd(cmdline,", "in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted = {}", "= {} trustedlock = threading.Lock() gods = {} godslock = threading.Lock() # receiver:", "nick!*@*', '#dequiet': 'remove +q from nick!*@*', '#kick': 'kicks nick with specified reason', '#src':", "if len(line) == min: # Only standard arguments given out = [] for", "= {} trustedlock.release() f=open('trusted.txt', 'r') for line in f: while len(line) > 0", "= open('msgs.txt', 'r') for line in f: while len(line) > 0 and line[-1]", "newchan cmdline = newcmdline else: irc.msg(chan, zwsp + 'Usage #chan channel command') if", "len(line) == 0: raise ArgsfmtError('No command given') line = line[1:] min, max =", "' + nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account = None while account", "self.cronctrllock.release() def run(self): run = True while run: time.sleep(1) # Accuracy doesn't need", "with msgslock: f=open('msgs.txt', 'w') for receiver in msgs: for sender, origin, msg in", "0 ARG_OPT = 1 ARG_UNL = 2 def parseargsfmt(args): # parses the argument", "= line[0].split('!')[0][1:] chan = line[2] if line[2][0] == '#' else nick zwsp =", "a query: origin = \"[query]\" else: # In a channel origin = chan", "accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick, None)) accountcheckid += 1 accountchecklock.release()", "= map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids def getaccount(irc, nick): id = initaccountcheck(nick)", "filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release()", "line = [] else: raise ArgsfmtError('Number of given arguments not possible for given", "channel: [user1, user2, ..., userN] trusted = {} trustedlock = threading.Lock() gods =", "[] if account not in trusted[chan]: trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global trusted,", "line[-1] == '\\n': line = line[:-1] if len(line.split('\\t')) == 4: receiver, sender, origin,", "elif line[1] == '330': # WHOIS: is logged in as whoisnick = line[3]", "value def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is", "'#kick': 'nick [reason]', '#src': '', '#msg': 'nick message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust':", "matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '-o', args.split(' ')) elif", "given format string') if len(out) == 1: return out[0] else: return out def", "valid if lock is released between call to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id):", "'{' and arg[-1] == '}': # Unlimited (0-) number of arguments: {baz} out.append(ARG_UNL)", "else: irc.msg(reply, zwsp + 'Usage #kick nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp", "die[1][:split] elif '-' in die[1]: split = die[1].index('-') plus = -int(die[1][split + 1:])", "shall.' elif cmd in helptext: if helptext[cmd]: return '%s %s %s' % (cmd,", "+ nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account = None while account ==", "xrange(times)] result = reduce((lambda x, y: x + y), rolls) if times >", "irc.nick+':']: question = parsecmd(cmdline, '{question}') if len(question) < 2 or question[:2] != ':D':", "nicks = [] if nicks: irc.send('MODE %s %s %s' % (chan, set_unset+mode*len(nicks), '", "= chan cmdline = [line[3][1:]] + line[4:] while '' in cmdline: cmdline.remove('') #", "matchcmd(cmdline, '#ls-trusted'): trustedlock.acquire() if chan in trusted: lines = [] line = ''", "irc.send('JOIN ' + line[3]) elif line[1] == '482': irc.msg(line[3], zwsp + 'Not op')", "loadgods() def chmode(irc, chan, nick, mode, args): set_unset = mode[0] mode = mode[1:]", "blacklist global msgs, msgslock global trusted, trustedlock, gods, godslock global doctor, die_expr line", "= [] for type in argtypes: if type == ARG_STD: out.append(line[0]) line =", "False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn): (time-1, fn)), self.timedjobs) torun", "and line[2] == irc.nick and line[3][1:] in irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick):", "'nick', '#kick': 'nick [reason]', '#src': '', '#msg': 'nick message', '#trusted?': '[nick]', '#trust': 'nick',", "for line in lines: irc.msg(nick, zwsp + '%s: %s' % (chan, line)) trustedlock.release()", "accountcheckid accountcheck.append((id, nick, None)) accountcheckid += 1 accountchecklock.release() return id # Warning: this", "is unlimited max += 1 elif argtype == ARG_OPT: if max != None:", "elif not max and len(line) > min: # Unlimited argument given out =", "msgslock with msgslock: msgs = {} f = open('msgs.txt', 'r') for line in", "die[1].index('+') plus = int(die[1][split + 1:]) die[1] = die[1][:split] elif '-' in die[1]:", "in a query', '#chan': 'Runs the command as if it was sent on", "ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] elif type == ARG_UNL:", "die[1] = die[1][:split] else: plus = 0 die = '%' if die[1] ==", "addtrusted(chan, account) f.close() def loadgods(): global gods, godslock godslock.acquire() gods = {} f=open('gods.txt',", "untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account for %s'", "len(line) > 0 and line[-1] == '\\n': line = line[:-1] if len(line) >", "nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id, ''))) account = None while account == None:", "'w') for chan in trusted: for account in trusted[chan]: f.write('%s %s\\n' % (chan,", "= [] accountcheckid = 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\") class Cron(threading.Thread): def __init__(self):", "Cron() cron.start() loadtrusted() loadgods() def chmode(irc, chan, nick, mode, args): set_unset = mode[0]", "istrusted(chan, account) else: irc.msg(nick, zwsp + 'Identify with NickServ') class ArgsfmtError(Exception): def __init__(self,", "else: irc.msg(reply, zwsp + 'Failed to get account for %s' % trustnick) else:", "kickreason = parsecmd(cmdline, 'nick {reason}') if kicknick.lower() == irc.nick: irc.send('KICK %s %s :Fuck", "args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'):", "self.timedjobs) torun = map((lambda (time, fn): fn), filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs", "if args == ['']: irc.send('MODE %s %s %s' % (chan, set_unset+mode, nick)) else:", "[nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick') if", "die[1]: split = die[1].index('+') plus = int(die[1][split + 1:]) die[1] = die[1][:split] elif", "in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] == '318': # WHOIS: End of /WHOIS", "message)) savemessages() else: irc.msg(reply, zwsp + 'Usage: #msg nick message') elif matchcmd(cmdline, '#trusted?'):", "raise ArgsfmtError('No command given') line = line[1:] min, max = getargnums(argtypes) if len(line)", "= [] doctor = eliza.eliza() # channel: [user1, user2, ..., userN] trusted =", "i in rolls])) else: text = str(result) if plus > 0: text =", "msgslock global trusted, trustedlock, gods, godslock global doctor, die_expr line = line.split(' ')", "use the parameter as-is if not account: if istrusted(chan, untrustnick): account = untrustnick", "lines: irc.msg(nick, zwsp + '%s: %s' % (chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'):", "if '+' in die[1]: split = die[1].index('+') plus = int(die[1][split + 1:]) die[1]", "trustedlock.acquire() if chan not in trusted: trusted[chan] = [] if account not in", "None def help(cmd): helptext = {'#echo': '#echo text back', '#op': 'give nick or", "text) elif line[1] == '330': # WHOIS: is logged in as whoisnick =", "in args: nicks.append(nick) if len(nicks) == 4: irc.send('MODE %s %s %s' % (chan,", "def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run = True while run:", "to nick', '#trusted?': 'tell you if nick or yourself is trusted by oonbotti2',", "not possible for given format string') if len(out) == 1: return out[0] else:", "# WHOIS: End of /WHOIS list. whoisnick = line[3] for id in getaccountcheckidbynick(whoisnick):", "trustedlock.release() return False def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid", "account if line != '': lines.append(line) for line in lines: irc.msg(nick, zwsp +", "if matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account", "is used, matchcmd(line, cmd, args) checks whether the args match too if len(line)", "== '/sm': savemessages() def usage(cmd, message = True): usage = {'#echo': 'text', '#op':", "0: raise ArgsfmtError('No command given') line = line[1:] min, max = getargnums(argtypes) if", "high self.cronctrllock.acquire() for cmd in self.cronctrl: if cmd == 'QUIT': run = False", "'' in cmdline: cmdline.remove('') # #chan: channel override prefix # Don't allow this", "cmd: return False if not args: return True min, max = getargnums(parseargsfmt(args)) if", "for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') # Mark as", "'', '#msg': 'nick message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan':", "{'#echo': '#echo text back', '#op': 'give nick or yourself op rights in case", "trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, trustnick) if", "irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan == '#esoteric': irc.msg(reply, zwsp + 'Nothing", "ckid == id: return index return None def setaccountcheckvalue(id, value): global accountcheck, accountchecklock", "else: out.append('') elif max and len(line) == max: # Optional argument given out", "in msgs: msgs[receiver] = [] msgs[receiver].append((sender, origin, msg)) f.close() def savemessages(): global msgs,", "trustnick) if account: addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp + 'Failed to get", "trusted, trustedlock, gods, godslock global doctor, die_expr line = line.split(' ') nick =", "1: irc.msg(reply, zwsp + 'Not supported') else: irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9),", "None while account == None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account ==", "') nick = line[0].split('!')[0][1:] chan = line[2] if line[2][0] == '#' else nick", "not None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick): global accountcheck, accountchecklock accountchecklock.acquire() getid =", "str(result) if plus > 0: text = '%i (%s + %i)' % (result", "= nick account = getaccount(irc, trustnick) if account: if istrusted(chan, account): irc.msg(reply, zwsp", "in your space-time region.') elif times < 1: irc.msg(reply, zwsp + 'What exactly", "(%s)' % (str(result), ', '.join([str(i) for i in rolls])) else: text = str(result)", "'#help': '[command]'} if cmd in usage: if message: return 'Usage: %s %s' %", "elif cmdline[0] == '/lt': loadtrusted() elif cmdline[0] == '/st': savetrusted() elif cmdline[0] ==", "{baz} out.append(ARG_UNL) else: # Normal argument: foo out.append(ARG_STD) return out def getargnums(argtypes): min", "Don't allow this in private messages for more transparent bot usage if matchcmd(cmdline,", "yourself op rights in case you are trusted by oonbotti2 and identified with", "messages for more transparent bot usage if matchcmd(cmdline, '#chan') and chan != nick:", ">= len(zwsp)+1 and line[3][:len(zwsp)+1] == ':'+zwsp: # If line begins with ZWSP return", "account) f.close() def loadgods(): global gods, godslock godslock.acquire() gods = {} f=open('gods.txt', 'r')", "+= 1 elif argtype == ARG_UNL: max = None return min, max def", "def savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt', 'w') for chan in trusted: for", "chan not in trusted: trusted[chan] = [] if account not in trusted[chan]: trusted[chan].append(account)", "line == '': line = account elif len(line + ', ' + account)", "'/lm', '/sm'] blacklist = [] doctor = eliza.eliza() # channel: [user1, user2, ...,", "zwsp = '\\xe2\\x80\\x8b' if nick in blacklist: return elif len(line) >= 4 and", "== '/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted() elif cmdline[0] == '/st': savetrusted()", "if type == ARG_STD or type == ARG_OPT: out.append(line[0]) line = line[1:] else:", "class ArgsfmtError(Exception): def __init__(self, msg): self.msg = msg def __str__(self): return 'Error with", "account = getaccount(irc, nick) if account: return istrusted(chan, account) else: irc.msg(nick, zwsp +", "== '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick", "or type == ARG_OPT: out.append(line[0]) line = line[1:] else: out.append('') elif not max", "#devoice #quiet #dequiet #kick #src #msg #trusted? #trust #untrust #ls-trusted #chan #help' elif", "else: trustedlock.release() return False def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id =", "return min, max def matchcmd(line, cmd, args=None): # matchcmd(line, cmd) matched if the", "1: text = '%s (%s)' % (str(result), ', '.join([str(i) for i in rolls]))", "== irc.nick and line[3][1:] in irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN '", "lines.append(line) line = account if line != '': lines.append(line) for line in lines:", "'add nick to trusted list', '#untrust': 'remove nick from trusted list', '#ls-trusted': 'list", "trustnick) else: irc.msg(reply, zwsp + '%s is not trusted' % trustnick) else: irc.msg(reply,", "= getindexbyaccountcheckid(id) if index is not None: ckid, nick, ckvalue = accountcheck[index] accountcheck[index]", "reply = chan cmdline = [line[3][1:]] + line[4:] while '' in cmdline: cmdline.remove('')", "matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q', [arg + '!*@*'])", "(%s + %i)' % (result + plus, text, plus) elif plus < 0:", "already reserved elif line[1] == 'INVITE' and line[2] == irc.nick and line[3][1:] in", "chan, nick, '+o', args.split(' ')) elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}') chmode(irc,", "': reply = chan cmdline = [line[3][1:]] + line[4:] while '' in cmdline:", "account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account == '': # '' Signifies failure", "% trustnick) else: irc.msg(reply, zwsp + 'Failed to get account for %s' %", "and identified with NickServ', '#quiet': 'give +q to nick!*@*', '#dequiet': 'remove +q from", "'#src': '', '#msg': 'nick message', '#trusted?': '[nick]', '#trust': 'nick', '#untrust': 'nick', '#ls-trusted': '',", "torun = map((lambda (time, fn): fn), filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs =", "msgslock: if msgnick not in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message)) savemessages()", "line[2][0] == '#' else nick zwsp = '\\xe2\\x80\\x8b' if nick in blacklist: return", "'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic': irc.msg(reply, zwsp + 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif", "helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan == '#esoteric': irc.msg(reply, zwsp +", "fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def run(self):", "message: return 'Usage: %s %s' % (cmd, usage[cmd]) else: return usage[cmd] else: return", "this does no locking, should only be used internally # The index returned", "and use! def getindexbyaccountcheckid(id): global accountcheck for index in range(len(accountcheck)): ckid, cknick, ckaccount", "# Normal argument: foo out.append(ARG_STD) return out def getargnums(argtypes): min = 0 max", "'%s%s' % (random.randint(0,9), random.randint(0,9))) elif die < 1: irc.msg(reply, zwsp + 'This die", "trustedlock, gods, godslock global doctor, die_expr line = line.split(' ') nick = line[0].split('!')[0][1:]", "in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply, zwsp +", "max = None return min, max def matchcmd(line, cmd, args=None): # matchcmd(line, cmd)", "'#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick [reason]',", "irc.msg(reply, zwsp + 'Sorry, I don\\'t have that many. Can I borrow yours?')", "ARG_UNL: out.append(' '.join(line)) line = [] else: raise ArgsfmtError('Number of given arguments not", ":%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp + 'Usage #kick nick reason') elif matchcmd(cmdline,", "min: # Only standard arguments given out = [] for type in argtypes:", "+= 1 if max != None: # Don't try to increment if max", "sender, msg)) msgs_changed = True if msgs_changed: savemessages() def execcmd(cmdline): if cmdline[0] ==", "savetrusted() elif cmdline[0] == '/lg': loadgods() elif cmdline[0] == '/lm': loadmessages() elif cmdline[0]", "execcmd(cmdline): if cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted() elif cmdline[0]", "trustedlock.release() def init(): global cron cron = Cron() cron.start() loadtrusted() loadgods() def chmode(irc,", "+ '%s is not trusted' % trustnick) else: irc.msg(reply, zwsp + 'Failed to", "1 elif argtype == ARG_OPT: if max != None: # Don't try to", "2 def parseargsfmt(args): # parses the argument format used by matchcmd and parsecmd", "'remove your or nick\\'s voice in case you are trusted by oonbotti2 and", "'': trustnick = nick account = getaccount(irc, trustnick) if account: if istrusted(chan, account):", "matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q', [arg + '!*@*'])", "> min: # Unlimited argument given out = [] for type in argtypes:", "trusted[chan]: if line == '': line = account elif len(line + ', '", "in trusted[chan]: trustedlock.release() return True else: trustedlock.release() return False def initaccountcheck(nick): global accountcheck,", "or yourself is trusted by oonbotti2', '#trust': 'add nick to trusted list', '#untrust':", "= parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o', args.split(' ')) elif matchcmd(cmdline, '#deop'): args", "if len(filter((lambda type: type == ARG_OPT or type == ARG_UNL), argtypes)) > 1:", "Signifies failure return None else: return account def isauthorized(irc, chan, nick): account =", "if line[1]=='PRIVMSG' and line[3][:2] != ': ': reply = chan cmdline = [line[3][1:]]", "matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message = parsecmd(cmdline, 'nick {message}')", "if len(line) > 0: chan, account = line.split() addtrusted(chan, account) f.close() def loadgods():", "open('msgs.txt', 'r') for line in f: while len(line) > 0 and line[-1] ==", "matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline = parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split('", "origin = \"[query]\" else: # In a channel origin = chan with msgslock:", "optional or unlimited argument per argument string raise ArgsfmtError('Ambiguous argument format') # Remove", "'nick {message}') if chan == nick: # In a query: origin = \"[query]\"", "+ 'Failed to get account for %s' % untrustnick) else: irc.msg(reply, zwsp +", "nick): chan = newchan cmdline = newcmdline else: irc.msg(chan, zwsp + 'Usage #chan", "__str__(self): return 'Error with argument format: ' + msg ARG_STD = 0 ARG_OPT", "+ '%s is trusted' % trustnick) else: irc.msg(reply, zwsp + '%s is not", "and chan == '#esoteric': irc.msg(reply, zwsp + 'Nothing here') elif cmdline[0] in [irc.nick,", "False), helptext[cmd]) else: return '%s %s' % (cmd, usage(cmd, False)) else: return None", "to do?') elif times > 128: irc.msg(reply, zwsp + 'Sorry, I don\\'t have", "chan = newchan cmdline = newcmdline else: irc.msg(chan, zwsp + 'Usage #chan channel", "elif die < 1: irc.msg(reply, zwsp + 'This die is not available in", "line[0] != cmd: return False if not args: return True min, max =", "times > 1: text = '%s (%s)' % (str(result), ', '.join([str(i) for i", "#op #deop #voice #devoice #quiet #dequiet #kick #src #msg #trusted? #trust #untrust #ls-trusted", "irc.msg(reply, zwsp + 'This die is not available in your space-time region.') elif", "def help(cmd): helptext = {'#echo': '#echo text back', '#op': 'give nick or yourself", "(time, fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release() for", "if chan in trusted and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global", "elif cmd in helptext: if helptext[cmd]: return '%s %s %s' % (cmd, usage(cmd,", "out.append(ARG_STD) return out def getargnums(argtypes): min = 0 max = 0 # max", "value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index", "irc.send('KICK %s %s :Fuck you' % (chan, nick)) else: if isauthorized(irc, chan, nick):", "parsecmd(cmdline, 'channel {command}') newcmdline = newcmdline.split(' ') if isauthorized(irc, newchan, nick): chan =", "If line begins with ZWSP return if line[1]=='PRIVMSG' and line[3][:2] != ': ':", "isauthorized(irc, chan, nick): account = getaccount(irc, trustnick) if account: addtrusted(chan, account) savetrusted() else:", "ARG_UNL] args = args.split(' ') out = [] for arg in args: if", "die[1] == '%' else int(die[1]) if die == '%': if times != 1:", "def isauthorized(irc, chan, nick): account = getaccount(irc, nick) if account: return istrusted(chan, account)", "= die[1][:split] elif '-' in die[1]: split = die[1].index('-') plus = -int(die[1][split +", "return usage[cmd] else: return None def help(cmd): helptext = {'#echo': '#echo text back',", "parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q', [arg + '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg", "loadgods(): global gods, godslock godslock.acquire() gods = {} f=open('gods.txt', 'r') for line in", "initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick, None)) accountcheckid", "import eliza import threading import random import re import time concmd=['/q', '/lt', '/st',", "'' for account in trusted[chan]: if line == '': line = account elif", "removed. Use manual invite' % nick) elif matchcmd(cmdline, '#help'): if matchcmd(cmdline, '#help', '[command]'):", "'/st', '/lg', '/lm', '/sm'] blacklist = [] doctor = eliza.eliza() # channel: [user1,", "= [] if nicks: irc.send('MODE %s %s %s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks)))", "line[:-1] if len(line) > 0: chan, account = line.split() if chan not in", "is logged in as whoisnick = line[3] account = line[4] for id in", "# channel: [user1, user2, ..., userN] trusted = {} trustedlock = threading.Lock() gods", "= newcmdline.split(' ') if isauthorized(irc, newchan, nick): chan = newchan cmdline = newcmdline", "'.join(nicks))) nicks = [] if nicks: irc.send('MODE %s %s %s' % (chan, set_unset+mode*len(nicks),", "in cmdline: cmdline.remove('') # #chan: channel override prefix # Don't allow this in", "lambda (id, cknick, account): cknick == nick ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release()", "args): set_unset = mode[0] mode = mode[1:] if isauthorized(irc, chan, nick): if args", "== None: account = getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account == '': # ''", "law irc.msg(reply, zwsp + '%s: %s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die =", "msg): self.msg = msg def __str__(self): return 'Error with argument format: ' +", "self.cronctrllock.acquire() for cmd in self.cronctrl: if cmd == 'QUIT': run = False self.cronctrl=[]", "def savemessages(): global msgs, msgslock with msgslock: f=open('msgs.txt', 'w') for receiver in msgs:", "= die[1].index('+') plus = int(die[1][split + 1:]) die[1] = die[1][:split] elif '-' in", "'%i (%s + %i)' % (result + plus, text, plus) elif plus <", "and nick in msgs: for sender, origin, msg in msgs.pop(nick): irc.msg(nick, zwsp +", "trusted, trustedlock trustedlock.acquire() if chan in trusted and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release()", "is unlimited for argtype in argtypes: if argtype == ARG_STD: min += 1", "elif max and len(line) == max: # Optional argument given out = []", "zwsp + '%s: #invite has been removed. Use manual invite' % nick) elif", "= account elif len(line + ', ' + account) <= 255: # Playing", "# Playing it safe not to get truncated line += ', ' +", "'Runs the command as if it was sent on the specified channel. Requires", "= threading.Lock() # receiver: [(sender1, origin1, message1), (sender2, origin2, message2), ..., (senderN, origin2,", "#ls-trusted #chan #help' elif cmd=='me': return 'I shall.' elif cmd in helptext: if", "trustedlock.release() return True else: trustedlock.release() return False def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock", "message = parsecmd(cmdline, 'nick {message}') if chan == nick: # In a query:", "newcmdline else: irc.msg(chan, zwsp + 'Usage #chan channel command') if matchcmd(cmdline, '#echo'): text", "-> [ARG_STD, ARG_OPT, ARG_UNL] args = args.split(' ') out = [] for arg", "import re import time concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm'] blacklist = []", "%s %s :Fuck you' % (chan, nick)) else: if isauthorized(irc, chan, nick): irc.send('KICK", "message1), (sender2, origin2, message2), ..., (senderN, origin2, messageN)] msgs = {} msgslock =", "trusted, trustedlock trustedlock.acquire() if chan not in trusted: trusted[chan] = [] if account", "short info of command or list commands'} if cmd=='': return '#echo #op #deop", "= 1 ARG_UNL = 2 def parseargsfmt(args): # parses the argument format used", "= parsecmd(cmdline, '[nick]') if trustnick == '': trustnick = nick account = getaccount(irc,", "!= ':D': # Mandated by #osdev-offtopic law irc.msg(reply, zwsp + '%s: %s' %", "text = '%i (%s - %i)' % (result + plus, text, -plus) irc.msg(reply,", "def chmode(irc, chan, nick, mode, args): set_unset = mode[0] mode = mode[1:] if", "trusted and account in trusted[chan]: trusted[chan].remove(account) trustedlock.release() def loadtrusted(): global trusted, trustedlock trustedlock.acquire()", "if isauthorized(irc, chan, nick): irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp", "line = line[:-1] if len(line) > 0: chan, account = line.split() addtrusted(chan, account)", "setaccountcheckvalue(id, account) elif line[1] == '318': # WHOIS: End of /WHOIS list. whoisnick", "'/sm'] blacklist = [] doctor = eliza.eliza() # channel: [user1, user2, ..., userN]", "# (ID, nick, account) accountcheck = [] accountcheckid = 0 accountchecklock = threading.Lock()", "re import time concmd=['/q', '/lt', '/st', '/lg', '/lm', '/sm'] blacklist = [] doctor", "'%': if times != 1: irc.msg(reply, zwsp + 'Not supported') else: irc.msg(reply, zwsp", "line = line[1:] min, max = getargnums(argtypes) if len(line) == min: # Only", "account: return istrusted(chan, account) else: irc.msg(nick, zwsp + 'Identify with NickServ') class ArgsfmtError(Exception):", "# Mandated by #osdev-offtopic law irc.msg(reply, zwsp + '%s: %s' % (nick, doctor.respond(question)))", "trustnick) else: irc.msg(reply, zwsp + 'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline,", "accountcheck)) accountchecklock.release() return ids def getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS ' +", "self.cronctrl.append(cmd) self.cronctrllock.release() def run(self): run = True while run: time.sleep(1) # Accuracy doesn't", "chan, nick, mode, args): set_unset = mode[0] mode = mode[1:] if isauthorized(irc, chan,", "mode, args): set_unset = mode[0] mode = mode[1:] if isauthorized(irc, chan, nick): if", "# Optional argument given out = [] for type in argtypes: if type", "'#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2') elif matchcmd(cmdline, '#prefix') and chan == '#osdev-offtopic': irc.msg(reply,", "id # Warning: this does no locking, should only be used internally #", "if chan in trusted and account in trusted[chan]: trustedlock.release() return True else: trustedlock.release()", "index returned cannot be guaranteed valid if lock is released between call to", "= threading.Lock() threading.Thread.__init__(self) def queuejob(self, time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self,", "optional argument that didn't get a value will be assigned '' argtypes =", "raise ArgsfmtError('Ambiguous argument format') # Remove the command if len(line) == 0: raise", "0: chan, account = line.split() addtrusted(chan, account) f.close() def loadgods(): global gods, godslock", "f=open('trusted.txt', 'w') for chan in trusted: for account in trusted[chan]: f.write('%s %s\\n' %", "'nick {reason}') if kicknick.lower() == irc.nick: irc.send('KICK %s %s :Fuck you' % (chan,", "in lines: irc.msg(nick, zwsp + '%s: %s' % (chan, line)) trustedlock.release() elif matchcmd(cmdline,", "argtypes = parseargsfmt(args) if len(argtypes) >= 1 and ARG_UNL in argtypes[:-1]: # Disallow", "'Usage: %s %s' % (cmd, usage[cmd]) else: return usage[cmd] else: return None def", "= [] msgs[receiver].append((sender, origin, msg)) f.close() def savemessages(): global msgs, msgslock with msgslock:", "get a value will be assigned '' argtypes = parseargsfmt(args) if len(argtypes) >=", "chan, nick): account = getaccount(irc, trustnick) if account: addtrusted(chan, account) savetrusted() else: irc.msg(reply,", "max and len(line) == max: # Optional argument given out = [] for", "matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick, '+o', args.split(' ')) elif", "'[' and arg[-1] == ']': # Optional (0-1) argument: [bar] out.append(ARG_OPT) elif len(arg)", "== 4: irc.send('MODE %s %s %s' % (chan, set_unset+mode*4, ' '.join(nicks))) nicks =", "irc.msg(reply, zwsp + 'What exactly do you want me to do?') elif times", "': ': reply = chan cmdline = [line[3][1:]] + line[4:] while '' in", "+ '!*@*']) elif matchcmd(cmdline, '#dequiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '-q',", "msgnick not in msgs: msgs[msgnick] = [] msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply,", "= getargnums(parseargsfmt(args)) if max and len(line)-1 >= min and len(line)-1 <= max: return", "'nick', '#untrust': 'nick', '#ls-trusted': '', '#chan': 'channel command', '#help': '[command]'} if cmd in", "or type == ARG_UNL), argtypes)) > 1: # Disallow more than one optional", "'Not supported') else: irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9))) elif die <", "accountcheck[index] = (id, nick, value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index", "irc.msg(nick, zwsp + 'Identify with NickServ') class ArgsfmtError(Exception): def __init__(self, msg): self.msg =", "lines.append(line) for line in lines: irc.msg(nick, zwsp + '%s: %s' % (chan, line))", "%i)' % (result + plus, text, plus) elif plus < 0: text =", "args=None): # matchcmd(line, cmd) matched if the command cmd is used, matchcmd(line, cmd,", "cron cron = Cron() cron.start() loadtrusted() loadgods() def chmode(irc, chan, nick, mode, args):", "loadmessages(): global msgs, msgslock with msgslock: msgs = {} f = open('msgs.txt', 'r')", "irc.msg(reply, zwsp + '%s: %s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d')", "die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times = int(die[0]) if die[0] else 1 if '+'", "unlimited arguments raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type: type == ARG_OPT or", "(chan, line)) trustedlock.release() elif matchcmd(cmdline, '#invite'): irc.msg(chan, zwsp + '%s: #invite has been", "trusted[chan].append(account) trustedlock.release() def rmtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan in trusted", "plus, text, plus) elif plus < 0: text = '%i (%s - %i)'", "account) elif line[1] == '318': # WHOIS: End of /WHOIS list. whoisnick =", "if times != 1: irc.msg(reply, zwsp + 'Not supported') else: irc.msg(reply, zwsp +", "trusted by oonbotti2 and identified with NickServ', '#deop': 'remove your/nick\\'s op rights', '#voice':", "parsecmd(line, args): # Returns a tuple containing the arguments. An optional argument that", "list commands'} if cmd=='': return '#echo #op #deop #voice #devoice #quiet #dequiet #kick", "self.cronctrl: if cmd == 'QUIT': run = False self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs =", "< 0: text = '%i (%s - %i)' % (result + plus, text,", "in case you are trusted by oonbotti2 and identified with NickServ', '#devoice': 'remove", "'#help', '[command]'): command = parsecmd(cmdline, '[command]') helptext = help(command) if helptext: irc.msg(reply, zwsp+helptext)", "you are trusted by oonbotti2 and identified with NickServ', '#devoice': 'remove your or", "gods[chan] = [] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def savetrusted(): global trusted, trustedlock", "account = getaccount(irc, trustnick) if account: addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp +", "command or list commands'} if cmd=='': return '#echo #op #deop #voice #devoice #quiet", "released between call to getindexbyaccountcheckid and use! def getindexbyaccountcheckid(id): global accountcheck for index", "type in argtypes: if type == ARG_STD or type == ARG_OPT: out.append(line[0]) line", "(line[1] == 'PRIVMSG' or line[1] == 'JOIN') and nick in msgs: for sender,", "gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def savetrusted(): global trusted,", "= map((lambda (time, fn): fn), filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda", "= parsecmd(cmdline, 'nick {message}') if chan == nick: # In a query: origin", "sender, origin, msg = line.split('\\t') if receiver not in msgs: msgs[receiver] = []", "__init__(self): self.timedjobs = [] self.timedjobslock = threading.Lock() self.cronctrl = [] self.cronctrllock = threading.Lock()", "in msgs.pop(nick): irc.msg(nick, zwsp + '%s <%s> %s' % (origin, sender, msg)) msgs_changed", "!= None: # Don't try to increment if max is unlimited max +=", "' + line[3]) elif line[1] == '482': irc.msg(line[3], zwsp + 'Not op') msgs_changed", "= (id, nick, value) accountchecklock.release() def getaccountcheckvalue(id): global accountcheck, accountchecklock accountchecklock.acquire() index =", "setaccountcheckvalue(id, value): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not", "want me to do?') elif times > 128: irc.msg(reply, zwsp + 'Sorry, I", "zwsp + 'What exactly do you want me to do?') elif times >", "for cmd in self.cronctrl: if cmd == 'QUIT': run = False self.cronctrl=[] self.cronctrllock.release()", "= mode[1:] if isauthorized(irc, chan, nick): if args == ['']: irc.send('MODE %s %s", "set_unset+mode*4, ' '.join(nicks))) nicks = [] if nicks: irc.send('MODE %s %s %s' %", "'#devoice': 'remove your or nick\\'s voice in case you are trusted by oonbotti2", "'#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick):", "self.timedjobs = map((lambda (time, fn): (time-1, fn)), self.timedjobs) torun = map((lambda (time, fn):", "trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt', 'r') for line in f: while len(line)", "if chan not in gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release()", "irc.nick and line[3][1:] in irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' +", "else: text = str(result) if plus > 0: text = '%i (%s +", "'[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick", "and len(line) > min: # Unlimited argument given out = [] for type", "(chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan, account): trustedlock.acquire() if chan in trusted and", "= parsecmd(cmdline, 'nick') if isauthorized(irc, chan, nick): account = getaccount(irc, trustnick) if account:", "account): irc.msg(reply, zwsp + '%s is trusted' % trustnick) else: irc.msg(reply, zwsp +", "cmd, args=None): # matchcmd(line, cmd) matched if the command cmd is used, matchcmd(line,", "if not account: if istrusted(chan, untrustnick): account = untrustnick if account: godslock.acquire() if", "and len(line)-1 <= max: return True elif not max and len(line)-1 >= min:", "ArgsfmtError('Number of given arguments not possible for given format string') if len(out) ==", "'Failed to get account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage:", "+q from nick!*@*', '#kick': 'kicks nick with specified reason', '#src': 'paste a link", "#kick #src #msg #trusted? #trust #untrust #ls-trusted #chan #help' elif cmd=='me': return 'I", "%s' % trustnick) else: irc.msg(reply, zwsp + 'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'):", "line = line.split(' ') nick = line[0].split('!')[0][1:] chan = line[2] if line[2][0] ==", "text, plus) elif plus < 0: text = '%i (%s - %i)' %", "nick from trusted list', '#ls-trusted': 'list nicks that are trusted. use only in", "setaccountcheckvalue(id, ''))) account = None while account == None: account = getaccountcheckvalue(id) time.sleep(0.1)", "trusted[chan]: trustedlock.release() return True else: trustedlock.release() return False def initaccountcheck(nick): global accountcheck, accountcheckid,", "result = reduce((lambda x, y: x + y), rolls) if times > 1:", "isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' + line[3]) elif line[1] == '482': irc.msg(line[3], zwsp", "if getaccountcheckvalue(id) == None: setaccountcheckvalue(id, '') # Mark as failed, '' is used", "(senderN, origin2, messageN)] msgs = {} msgslock = threading.Lock() # (ID, nick, account)", "doesn't need to be high self.cronctrllock.acquire() for cmd in self.cronctrl: if cmd ==", "rolls])) else: text = str(result) if plus > 0: text = '%i (%s", "4: irc.send('MODE %s %s %s' % (chan, set_unset+mode*4, ' '.join(nicks))) nicks = []", "'Failed to get account for %s' % trustnick) else: irc.msg(reply, zwsp + 'Usage", "zwsp + '%s is not trusted' % trustnick) else: irc.msg(reply, zwsp + 'Failed", "4: receiver, sender, origin, msg = line.split('\\t') if receiver not in msgs: msgs[receiver]", "chan not in gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def", "a message to nick', '#trusted?': 'tell you if nick or yourself is trusted", "if len(line) > 0: chan, account = line.split() if chan not in gods:", "nick, '+o', args.split(' ')) elif matchcmd(cmdline, '#deop'): args = parsecmd(cmdline, '{args}') chmode(irc, chan,", "with msgslock: msgs = {} f = open('msgs.txt', 'r') for line in f:", "> 0 and line[-1] == '\\n': line = line[:-1] if len(line.split('\\t')) == 4:", "%s :Fuck you' % (chan, nick)) else: if isauthorized(irc, chan, nick): irc.send('KICK %s", "global trusted, trustedlock trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt', 'r') for line in", "% trustnick) else: irc.msg(reply, zwsp + 'Usage #trust nick') elif matchcmd(cmdline, '#untrust'): if", "chmode(irc, chan, nick, '+v', args.split(' ')) elif matchcmd(cmdline, '#quiet'): arg = parsecmd(cmdline, 'nick')", "fn): time>0), self.timedjobs) self.timedjobslock.release() for fn in torun: fn() def loadmessages(): global msgs,", "helptext = help(command) if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan ==", "origin1, message1), (sender2, origin2, message2), ..., (senderN, origin2, messageN)] msgs = {} msgslock", "= True): usage = {'#echo': 'text', '#op': '[nick]', '#deop': '[nick]', '#voice': '[nick]', '#devoice':", "getid = lambda (id, nick, account): id filterbynick = lambda (id, cknick, account):", "True else: return False def parsecmd(line, args): # Returns a tuple containing the", "'Usage: #msg nick message') elif matchcmd(cmdline, '#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick =", "= [] else: raise ArgsfmtError('Number of given arguments not possible for given format", "cannot be guaranteed valid if lock is released between call to getindexbyaccountcheckid and", "= newchan cmdline = newcmdline else: irc.msg(chan, zwsp + 'Usage #chan channel command')", "getaccountcheckvalue(id) time.sleep(0.1) removeaccountcheck(id) if account == '': # '' Signifies failure return None", "trusted: lines = [] line = '' for account in trusted[chan]: if line", "for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1] == '318': # WHOIS: End", "#chan channel command') if matchcmd(cmdline, '#echo'): text = parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif", "while run: time.sleep(1) # Accuracy doesn't need to be high self.cronctrllock.acquire() for cmd", "text = '%s (%s)' % (str(result), ', '.join([str(i) for i in rolls])) else:", "%s' % untrustnick) else: irc.msg(reply, zwsp + 'Usage #untrust nick') elif matchcmd(cmdline, '#ls-trusted'):", "don\\'t have that many. Can I borrow yours?') else: rolls = [random.randint(1, die)", "receiver: [(sender1, origin1, message1), (sender2, origin2, message2), ..., (senderN, origin2, messageN)] msgs =", "f.close() def savemessages(): global msgs, msgslock with msgslock: f=open('msgs.txt', 'w') for receiver in", "if isauthorized(irc, chan, nick): if args == ['']: irc.send('MODE %s %s %s' %", "in msgs: for sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver, sender, origin,", "return True elif not max and len(line)-1 >= min: return True else: return", "accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: del accountcheck[index]", "argtypes)) > 1: # Disallow more than one optional or unlimited argument per", "accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index is not None: ckid, cknick, value =", "used by matchcmd and parsecmd # e.g. parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD,", "zwsp + 'Usage #kick nick reason') elif matchcmd(cmdline, '#src'): irc.msg(reply, zwsp + 'https://github.com/JuEeHa/oonbotti2')", "+ 'gopher://ayu.smar.fi:7070/0/hash-prefix') elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message =", "'-o', args.split(' ')) elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}') chmode(irc, chan, nick,", "unlimited max += 1 elif argtype == ARG_OPT: if max != None: #", "def init(): global cron cron = Cron() cron.start() loadtrusted() loadgods() def chmode(irc, chan,", "args: return True min, max = getargnums(parseargsfmt(args)) if max and len(line)-1 >= min", "'#kick': 'kicks nick with specified reason', '#src': 'paste a link to oonbotti2\\'s git", "and arg[0] == '{' and arg[-1] == '}': # Unlimited (0-) number of", "matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick') if isauthorized(irc, chan,", "'#quiet'): arg = parsecmd(cmdline, 'nick') chmode(irc, chan, nick, '+q', [arg + '!*@*']) elif", "out.append(line[0]) line = line[1:] elif type == ARG_UNL: out.append(' '.join(line)) line = []", "nick): account = getaccount(irc, trustnick) if account: addtrusted(chan, account) savetrusted() else: irc.msg(reply, zwsp", "from trusted list', '#ls-trusted': 'list nicks that are trusted. use only in a", "'#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick [reason]', '#src': '', '#msg': 'nick", "min: return True else: return False def parsecmd(line, args): # Returns a tuple", "< 2 or question[:2] != ':D': # Mandated by #osdev-offtopic law irc.msg(reply, zwsp", "more than one optional or unlimited argument per argument string raise ArgsfmtError('Ambiguous argument", "for receiver in msgs: for sender, origin, msg in msgs[receiver]: f.write('%s\\t%s\\t%s\\t%s\\n' % (receiver,", "return False def parsecmd(line, args): # Returns a tuple containing the arguments. An", "[] if nicks: irc.send('MODE %s %s %s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def", "'%s <%s> %s' % (origin, sender, msg)) msgs_changed = True if msgs_changed: savemessages()", "return True else: return False def parsecmd(line, args): # Returns a tuple containing", "min, max = getargnums(parseargsfmt(args)) if max and len(line)-1 >= min and len(line)-1 <=", "self.msg = msg def __str__(self): return 'Error with argument format: ' + msg", "else: irc.msg(chan, zwsp + 'Usage #chan channel command') if matchcmd(cmdline, '#echo'): text =", "== '/lg': loadgods() elif cmdline[0] == '/lm': loadmessages() elif cmdline[0] == '/sm': savemessages()", "for more transparent bot usage if matchcmd(cmdline, '#chan') and chan != nick: if", "in gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def savetrusted(): global", "ids = map(getid, filter(filterbynick, accountcheck)) accountchecklock.release() return ids def getaccount(irc, nick): id =", "your space-time region.') elif times < 1: irc.msg(reply, zwsp + 'What exactly do", "accountcheck.append((id, nick, None)) accountcheckid += 1 accountchecklock.release() return id # Warning: this does", "== '/st': savetrusted() elif cmdline[0] == '/lg': loadgods() elif cmdline[0] == '/lm': loadmessages()", "newcmdline.split(' ') if isauthorized(irc, newchan, nick): chan = newchan cmdline = newcmdline else:", "case you are trusted by oonbotti2 and identified with NickServ', '#deop': 'remove your/nick\\'s", "origin, msg = line.split('\\t') if receiver not in msgs: msgs[receiver] = [] msgs[receiver].append((sender,", "for type in argtypes: if type == ARG_STD: out.append(line[0]) line = line[1:] else:", "False def initaccountcheck(nick): global accountcheck, accountcheckid, accountchecklock accountchecklock.acquire() id = accountcheckid accountcheck.append((id, nick,", "elif cmdline[0] == '/lm': loadmessages() elif cmdline[0] == '/sm': savemessages() def usage(cmd, message", "increment if max is unlimited max += 1 elif argtype == ARG_UNL: max", "not max and len(line) > min: # Unlimited argument given out = []", "%s\\n' % (chan, account)) f.close trustedlock.release() def init(): global cron cron = Cron()", "== max: # Optional argument given out = [] for type in argtypes:", "self.cronctrl=[] self.cronctrllock.release() self.timedjobslock.acquire() self.timedjobs = map((lambda (time, fn): (time-1, fn)), self.timedjobs) torun =", "irc.msg(reply, zwsp + 'Usage: #trusted? [nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'):", "chan, account = line.split() if chan not in gods: gods[chan] = [] gods[chan].append(account)", "parseargsfmt(args): # parses the argument format used by matchcmd and parsecmd # e.g.", "+ msg ARG_STD = 0 ARG_OPT = 1 ARG_UNL = 2 def parseargsfmt(args):", "irc.msg(reply, zwsp + 'Not supported') else: irc.msg(reply, zwsp + '%s%s' % (random.randint(0,9), random.randint(0,9)))", "getaccount(irc, trustnick) if account: if istrusted(chan, account): irc.msg(reply, zwsp + '%s is trusted'", "'INVITE' and line[2] == irc.nick and line[3][1:] in irc.chan.split(' '): if isauthorized(irc, line[3][1:],", "elif argtype == ARG_OPT: if max != None: # Don't try to increment", "End of /WHOIS list. whoisnick = line[3] for id in getaccountcheckidbynick(whoisnick): if getaccountcheckvalue(id)", "arg[0] == '{' and arg[-1] == '}': # Unlimited (0-) number of arguments:", "nick): irc.send('JOIN ' + line[3]) elif line[1] == '482': irc.msg(line[3], zwsp + 'Not", "in irc.chan.split(' '): if isauthorized(irc, line[3][1:], nick): irc.send('JOIN ' + line[3]) elif line[1]", "if len(arg) >= 2 and arg[0] == '[' and arg[-1] == ']': #", "chmode(irc, chan, nick, '-q', [arg + '!*@*']) elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline,", "0 and line[-1] == '\\n': line = line[:-1] if len(line.split('\\t')) == 4: receiver,", "== ARG_OPT or type == ARG_UNL), argtypes)) > 1: # Disallow more than", "by matchcmd and parsecmd # e.g. parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD, ARG_OPT,", "loadmessages() elif cmdline[0] == '/sm': savemessages() def usage(cmd, message = True): usage =", "= [] for arg in args: if len(arg) >= 2 and arg[0] ==", "= line[3] account = line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif line[1]", "# max = None if number of arguments is unlimited for argtype in", "cmdline[0] == '/q': cron.ctrl('QUIT') elif cmdline[0] == '/lt': loadtrusted() elif cmdline[0] == '/st':", "doctor.respond(question))) elif die_expr.match(cmdline[0]): die = cmdline[0][1:].split('d') times = int(die[0]) if die[0] else 1", "[] gods[chan].append(account) addtrusted(chan, account) f.close() godslock.release() def savetrusted(): global trusted, trustedlock trustedlock.acquire() f=open('trusted.txt',", "= line.split() if chan not in gods: gods[chan] = [] gods[chan].append(account) addtrusted(chan, account)", "cmdline[0] == '/st': savetrusted() elif cmdline[0] == '/lg': loadgods() elif cmdline[0] == '/lm':", "#chan: channel override prefix # Don't allow this in private messages for more", "'[nick]', '#devoice': '[nick]', '#quiet': 'nick', '#dequiet': 'nick', '#kick': 'nick [reason]', '#src': '', '#msg':", "(ID, nick, account) accountcheck = [] accountcheckid = 0 accountchecklock = threading.Lock() die_expr=re.compile(\"#[0-9]*d([0-9]+|%)([+-][0-9]+)?$\")", "'%s %s %s' % (cmd, usage(cmd, False), helptext[cmd]) else: return '%s %s' %", "accountchecklock.release() return value def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if", "I don\\'t have that many. Can I borrow yours?') else: rolls = [random.randint(1,", "else: # In a channel origin = chan with msgslock: if msgnick not", "ARG_OPT = 1 ARG_UNL = 2 def parseargsfmt(args): # parses the argument format", "return value def removeaccountcheck(id): global accountcheck, accountchecklock accountchecklock.acquire() index = getindexbyaccountcheckid(id) if index", "' + account) <= 255: # Playing it safe not to get truncated", "arguments raise ArgsfmtError('Non-final unlimited argument') if len(filter((lambda type: type == ARG_OPT or type", "else: irc.msg(reply, zwsp + 'Failed to get account for %s' % untrustnick) else:", "gods[chan]: rmtrusted(chan, untrustnick) godslock.release() savetrusted() else: irc.msg(reply, zwsp + 'Failed to get account", "#trusted? [nick]') elif matchcmd(cmdline, '#trust'): if matchcmd(cmdline, '#trust', 'nick'): trustnick = parsecmd(cmdline, 'nick')", "'Identify with NickServ') class ArgsfmtError(Exception): def __init__(self, msg): self.msg = msg def __str__(self):", "irc.msg(reply, zwsp + 'Nothing here') elif cmdline[0] in [irc.nick, irc.nick+',', irc.nick+':']: question =", "whoisnick = line[3] account = line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id, account) elif", "NickServ', '#deop': 'remove your/nick\\'s op rights', '#voice': 'give nick or yourself voice in", "irc.msg(reply, zwsp + '%s is trusted' % trustnick) else: irc.msg(reply, zwsp + '%s", "255: # Playing it safe not to get truncated line += ', '", "parsecmd # e.g. parseargsfmt(\"foo [bar] {baz} ) -> [ARG_STD, ARG_OPT, ARG_UNL] args =", "account) else: irc.msg(nick, zwsp + 'Identify with NickServ') class ArgsfmtError(Exception): def __init__(self, msg):", "time>0), self.timedjobs) self.timedjobslock.release() for fn in torun: fn() def loadmessages(): global msgs, msgslock", "= args.split(' ') out = [] for arg in args: if len(arg) >=", "max != None: # Don't try to increment if max is unlimited max", "nick zwsp = '\\xe2\\x80\\x8b' if nick in blacklist: return elif len(line) >= 4", "in as whoisnick = line[3] account = line[4] for id in getaccountcheckidbynick(whoisnick): setaccountcheckvalue(id,", "len(filter((lambda type: type == ARG_OPT or type == ARG_UNL), argtypes)) > 1: #", "matchcmd(cmdline, '#chan') and chan != nick: if matchcmd(cmdline, '#chan', 'channel {command}'): newchan, newcmdline", "= accountcheckid accountcheck.append((id, nick, None)) accountcheckid += 1 accountchecklock.release() return id # Warning:", "die) for i in xrange(times)] result = reduce((lambda x, y: x + y),", "if index is not None: ckid, cknick, value = accountcheck[index] accountchecklock.release() return value", "{} trustedlock.release() f=open('trusted.txt', 'r') for line in f: while len(line) > 0 and", "chan in trusted: lines = [] line = '' for account in trusted[chan]:", "# Disallow more than one optional or unlimited argument per argument string raise", "[] msgs[receiver].append((sender, origin, msg)) f.close() def savemessages(): global msgs, msgslock with msgslock: f=open('msgs.txt',", "trusted list', '#ls-trusted': 'list nicks that are trusted. use only in a query',", "nick, None)) accountcheckid += 1 accountchecklock.release() return id # Warning: this does no", "possible for given format string') if len(out) == 1: return out[0] else: return", "loadmessages() def addtrusted(chan, account): global trusted, trustedlock trustedlock.acquire() if chan not in trusted:", "None return min, max def matchcmd(line, cmd, args=None): # matchcmd(line, cmd) matched if", "arg in args: if len(arg) >= 2 and arg[0] == '[' and arg[-1]", "'Failed to get account for %s' % untrustnick) else: irc.msg(reply, zwsp + 'Usage", "with argument format: ' + msg ARG_STD = 0 ARG_OPT = 1 ARG_UNL", "chmode(irc, chan, nick, '-o', args.split(' ')) elif matchcmd(cmdline, '#voice'): args = parsecmd(cmdline, '{args}')", "type == ARG_UNL: out.append(' '.join(line)) line = [] else: raise ArgsfmtError('Number of given", "time, fn): self.timedjobslock.acquire() self.timedjobs.append((time, fn)) self.timedjobslock.release() def ctrl(self, cmd): self.cronctrllock.acquire() self.cronctrl.append(cmd) self.cronctrllock.release() def", "accountchecklock.release() return ids def getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS ' + nick)", "== ARG_STD: min += 1 if max != None: # Don't try to", "chan in trusted: for account in trusted[chan]: f.write('%s %s\\n' % (chan, account)) f.close", "rights in case you are trusted by oonbotti2 and identified with NickServ', '#deop':", "{} msgslock = threading.Lock() # (ID, nick, account) accountcheck = [] accountcheckid =", "trustedlock trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt', 'r') for line in f: while", "ZWSP return if line[1]=='PRIVMSG' and line[3][:2] != ': ': reply = chan cmdline", "if account: return istrusted(chan, account) else: irc.msg(nick, zwsp + 'Identify with NickServ') class", "nick): account = getaccount(irc, untrustnick) # If account can't be found (e.g. it", "args): # Returns a tuple containing the arguments. An optional argument that didn't", "= False with msgslock: if (line[1] == 'PRIVMSG' or line[1] == 'JOIN') and", "getaccount(irc, nick): id = initaccountcheck(nick) irc.send('WHOIS ' + nick) cron.queuejob(5, (lambda : setaccountcheckvalue(id,", "def parseargsfmt(args): # parses the argument format used by matchcmd and parsecmd #", "and arg[-1] == ']': # Optional (0-1) argument: [bar] out.append(ARG_OPT) elif len(arg) >=", "is unlimited max += 1 elif argtype == ARG_UNL: max = None return", "argument format: ' + msg ARG_STD = 0 ARG_OPT = 1 ARG_UNL =", "cron.start() loadtrusted() loadgods() def chmode(irc, chan, nick, mode, args): set_unset = mode[0] mode", "elif matchcmd(cmdline, '#msg'): if matchcmd(cmdline, '#msg', 'nick {message}'): msgnick, message = parsecmd(cmdline, 'nick", "== ['']: irc.send('MODE %s %s %s' % (chan, set_unset+mode, nick)) else: nicks =", "= True while run: time.sleep(1) # Accuracy doesn't need to be high self.cronctrllock.acquire()", "for account in trusted[chan]: if line == '': line = account elif len(line", "have that many. Can I borrow yours?') else: rolls = [random.randint(1, die) for", "= [] msgs[msgnick].append((nick, origin, message)) savemessages() else: irc.msg(reply, zwsp + 'Usage: #msg nick", "index = getindexbyaccountcheckid(id) if index is not None: del accountcheck[index] accountchecklock.release() def getaccountcheckidbynick(nick):", "'list nicks that are trusted. use only in a query', '#chan': 'Runs the", "return False if line[0] != cmd: return False if not args: return True", "out def parse((line, irc)): global blacklist global msgs, msgslock global trusted, trustedlock, gods,", "'#' else nick zwsp = '\\xe2\\x80\\x8b' if nick in blacklist: return elif len(line)", "'#ls-trusted': '', '#chan': 'channel command', '#help': '[command]'} if cmd in usage: if message:", "eliza import threading import random import re import time concmd=['/q', '/lt', '/st', '/lg',", "len(line.split('\\t')) == 4: receiver, sender, origin, msg = line.split('\\t') if receiver not in", "'482': irc.msg(line[3], zwsp + 'Not op') msgs_changed = False with msgslock: if (line[1]", "isauthorized(irc, chan, nick): irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp +", "else: # Normal argument: foo out.append(ARG_STD) return out def getargnums(argtypes): min = 0", "for chan in trusted: for account in trusted[chan]: f.write('%s %s\\n' % (chan, account))", "= '' for account in trusted[chan]: if line == '': line = account", "savemessages(): global msgs, msgslock with msgslock: f=open('msgs.txt', 'w') for receiver in msgs: for", "if helptext: irc.msg(reply, zwsp+helptext) elif matchcmd(cmdline, '#esoteric') and chan == '#esoteric': irc.msg(reply, zwsp", "if nicks: irc.send('MODE %s %s %s' % (chan, set_unset+mode*len(nicks), ' '.join(nicks))) def istrusted(chan,", "get truncated line += ', ' + account else: lines.append(line) line = account", "'#trusted?'): if matchcmd(cmdline, '#trusted?', '[nick]'): trustnick = parsecmd(cmdline, '[nick]') if trustnick == '':", "#trust nick') elif matchcmd(cmdline, '#untrust'): if matchcmd(cmdline, '#untrust', 'nick'): untrustnick = parsecmd(cmdline, 'nick')", "global msgs, msgslock with msgslock: msgs = {} f = open('msgs.txt', 'r') for", "== ':'+zwsp: # If line begins with ZWSP return if line[1]=='PRIVMSG' and line[3][:2]", "godslock godslock.acquire() gods = {} f=open('gods.txt', 'r') for line in f: while len(line)", "args: if len(arg) >= 2 and arg[0] == '[' and arg[-1] == ']':", "getargnums(argtypes) if len(line) == min: # Only standard arguments given out = []", "'give nick or yourself op rights in case you are trusted by oonbotti2", "{} f = open('msgs.txt', 'r') for line in f: while len(line) > 0", "by #osdev-offtopic law irc.msg(reply, zwsp + '%s: %s' % (nick, doctor.respond(question))) elif die_expr.match(cmdline[0]):", "parsecmd(cmdline, '{text}') irc.msg(reply, zwsp+text) elif matchcmd(cmdline, '#op'): args = parsecmd(cmdline, '{args}') chmode(irc, chan,", "(result + plus, text, plus) elif plus < 0: text = '%i (%s", "Requires user to be trusted', '#help': 'give short info of command or list", "argument given out = [] for type in argtypes: if type == ARG_STD", "oonbotti2 and identified with NickServ', '#deop': 'remove your/nick\\'s op rights', '#voice': 'give nick", "'/lg', '/lm', '/sm'] blacklist = [] doctor = eliza.eliza() # channel: [user1, user2,", "chan, nick, '-q', [arg + '!*@*']) elif matchcmd(cmdline, '#devoice'): args = parsecmd(cmdline, '{args}')", "line[3]) elif line[1] == '482': irc.msg(line[3], zwsp + 'Not op') msgs_changed = False", "of given arguments not possible for given format string') if len(out) == 1:", "in die[1]: split = die[1].index('+') plus = int(die[1][split + 1:]) die[1] = die[1][:split]", "filter((lambda (time, fn): time>0), self.timedjobs) self.timedjobslock.release() for fn in torun: fn() def loadmessages():", "fn), filter((lambda (time, fn): time<=0), self.timedjobs)) self.timedjobs = filter((lambda (time, fn): time>0), self.timedjobs)", "def loadtrusted(): global trusted, trustedlock trustedlock.acquire() trusted = {} trustedlock.release() f=open('trusted.txt', 'r') for", "of arguments is unlimited for argtype in argtypes: if argtype == ARG_STD: min", "the arguments. An optional argument that didn't get a value will be assigned", "+= 1 accountchecklock.release() return id # Warning: this does no locking, should only", "by oonbotti2', '#trust': 'add nick to trusted list', '#untrust': 'remove nick from trusted", "'}': # Unlimited (0-) number of arguments: {baz} out.append(ARG_UNL) else: # Normal argument:", "trustedlock = threading.Lock() gods = {} godslock = threading.Lock() # receiver: [(sender1, origin1,", "chan, nick): irc.send('KICK %s %s :%s'%(chan, kicknick, kickreason)) else: irc.msg(reply, zwsp + 'Usage", "only in a query', '#chan': 'Runs the command as if it was sent", "it was sent on the specified channel. Requires user to be trusted', '#help':", "Warning: this does no locking, should only be used internally # The index", "argument that didn't get a value will be assigned '' argtypes = parseargsfmt(args)", "len(arg) >= 2 and arg[0] == '{' and arg[-1] == '}': # Unlimited", "that didn't get a value will be assigned '' argtypes = parseargsfmt(args) if", "accountcheck[index] if ckid == id: return index return None def setaccountcheckvalue(id, value): global", "line[0].split('!')[0][1:] chan = line[2] if line[2][0] == '#' else nick zwsp = '\\xe2\\x80\\x8b'" ]
[ "for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter", "import weight_norm, remove_weight_norm from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing", "gains = torch.rand_like(maxes) sound = (sound/maxes) * gains return sound def configure_optimizers(self): optim", "enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave)", "= nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1,", "(u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for", "= F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x = xt + x return x", "F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x", ").sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0)", "c2(xt) x = xt + x return x def remove_weight_norm(self): for l in", "= len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected input lengthes and", "channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))),", "if with_random: out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out", "wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1)", "urs: lo = init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo # get", "super().__init__() self.h = h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft =", "expected input lengthes and output lengthes init_len = uik self.L_ins = [init_len] self.L_outs", "uik self.L_ins = [init_len] self.L_outs = [] for r in urs: lo =", "= walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave)", "h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) #", "enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self,", "j in range(self.num_kernels): if xs is None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs +=", "torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes) sound", "in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def", ": (-1,ch, l) \"\"\" if previous_wave is None: if self.silence is None: self.set_silence()", "output lengthes init_len = uik self.L_ins = [init_len] self.L_outs = [] for r", "h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim =", "out2], dim=1) #.tanh() # notanh return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean", "-1 ).sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") /", "self.convs1: remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h): super().__init__()", "j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights)", "dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([", "np import random from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE =", "r in urs: lo = init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo", "return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy", "self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps", "= self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood +", "[init_len] self.L_outs = [] for r in drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo)", "if previous_wave.size(-1) < self.n_fft : pad_len = self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape", "l in self.dns: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module):", "for i in range(len(self.dns)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post", "plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1, 3, 5)):", "self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out = torch.cat([out1, out2], dim=1)", "None: super().__init__() self.h = h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft", "+ mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def", "= self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft)", "mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return out,mean,var def", "class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h = h self.reset_seed() self.encoder", "F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x = xt + x return x def", "= h.walking_resolution self.out_len = self.decoder.out_len self.view_interval = 10 self.kl_lambda = h.kl_lambda # training", "training settings self.MSE = nn.MSELoss() self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def", "mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out)", "torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def", "tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ ==", "ch = uic for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i,", "grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed = self.h.seed", "dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self, x): for c1, c2 in zip(self.convs1,", "import summary from torch.nn.utils import weight_norm, remove_weight_norm from utils import get_padding, get_conv1d_outlen, init_weights,", "def __init__(self, channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert len(dilation) == 3 self.convs1", "class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert len(dilation) ==", "in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels self.channels = [uic]", "predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action : (-1, ratent_dim, 1) previous_wave : (-1,ch,", "self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x =", "self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.dns)): ch =", "self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) ->", "= torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None def set_silence(self): self.silence =", "3 if previous_wave.size(-1) < self.n_fft : pad_len = self.n_fft - previous_wave.size(-1) n,c,l =", "range(len(self.ups)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights)", "= int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len] #x = x.tanh() # grad explosion", "if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out", "kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels,", "self.L_outs.append(lo) init_len = lo # get upsampling paddings self.pads = [] for i,r", "# grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed =", "zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE)", "enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels self.channels = [uic] ch", "summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight norm...\") for l in self.dns: remove_weight_norm(l) for", "= self.ups[i](x) xs = None for j in range(self.num_kernels): if xs is None:", "pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels self.channels = [] for i", "Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len) +", "self.decoder.out_len self.view_interval = 10 self.kl_lambda = h.kl_lambda # training settings self.MSE = nn.MSELoss()", "weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size,", "upsampling channels self.channels = [uic] ch = uic for i in range(len(urs)): self.channels.append(int(ch/(2**i)))", "self.conv_post(x) l = x.size(-1) start = int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len] #x", "torch.pow(mean,2) + var - torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True", "= torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight norm...\") for l in self.dns:", "downsampling paddings self.pads = [] for i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad)", "[] for r in urs: lo = init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len", "r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo # get upsampling paddings self.pads = []", "= weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x)", "# get upsampling paddings self.pads = [] for i,r in enumerate(urs): pad =", "var - torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out))", "weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self, x): for", "pytorch_lightning as pl from torchsummaryX import summary from torch.nn.utils import weight_norm, remove_weight_norm from", "in self.ups: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def", "torch.rand_like(maxes) sound = (sound/maxes) * gains return sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(),", "previous_wave : (-1,ch, l) \"\"\" if previous_wave is None: if self.silence is None:", "1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights)", "xs = None for j in range(self.num_kernels): if xs is None: xs =", "(-1, 1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1)", "import SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def __init__(self,", "None: super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik =", "get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple from torchsummaryX import summary", "self.num_kernels = len(rks) self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get", "F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs = None for j in range(self.num_kernels): if", "from torch.nn.utils import weight_norm, remove_weight_norm from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space", "batch sound = sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound", "grad explotsion mse = self.MSE(ans, out) mae = self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2)", "downsampling channels self.channels = [] for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList()", "self.dns[i](x) xs = None for j in range(self.num_kernels): if xs is None: xs", "0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1)", "for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)):", "with_random: out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out =", "configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence", "explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed = self.h.seed np.random.seed(seed)", "= h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs = h.downsample_rates drks = h.downsample_kernel_sizes dci =", "self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1, 1, n_fft) x2: (-1,", "as np import random from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE", "Tuple from torchsummaryX import summary import numpy as np import random from torch.utils.tensorboard", "#.tanh() # notanh return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var)", "through the ratent space and log audio wave. \"\"\" if self.current_epoch%self.view_interval !=0: return", "xs / self.num_kernels x = F.leaky_relu(x) x = self.conv_post(x) l = x.size(-1) start", "atanh grad explotsion mse = self.MSE(ans, out) mae = self.MAE(ans,out) KL = 0.5*torch.sum(", "x = self.ups[i](x) xs = None for j in range(self.num_kernels): if xs is", "* self.kl_lambda #loss = self.kl_lambda * KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL", "n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes) sound =", "self.convs2.apply(init_weights) def forward(self, x): for c1, c2 in zip(self.convs1, self.convs2): xt = F.leaky_relu(x,", "out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss =", "int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1) # get downsampling paddings self.pads =", "= weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input lengthes and output lengthes init_len =", "self.silence is None: self.set_silence() previous_wave = self.silence assert len(action.shape) == 3 assert len(previous_wave.shape)", "torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if", "= self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2 =", "self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1) # get downsampling paddings self.pads = []", "self.channels = [uic] ch = uic for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups =", "torch.Tensor: x = self.conv_pre(x) for i in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x", "mean1,mean2 out = torch.cat([out1, out2], dim=1) #.tanh() # notanh return out @staticmethod def", "1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1,", "init_len = h.n_fft self.L_ins = [init_len] self.L_outs = [] for r in drs:", "self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x)", "self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) ))", "remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu')", "dci = h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci,", "remove_weight_norm(self): print('Removing weight norm...') for l in self.ups: remove_weight_norm(l) for l in self.resblocks:", "notanh out = self.decoder(out) return out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self,", "#print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood + KL *", "return def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] =", "dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2) if with_random:", "i in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs = None", "out_.tanh() # atanh grad explotsion mse = self.MSE(ans, out) mae = self.MAE(ans,out) KL", "sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains =", "dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1,", "= x[:,:,start:start+self.out_len] #x = x.tanh() # grad explosion ? return x def summary(self):", "channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1)))", "previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()#", "self.conv_pre(x) for i in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs", "dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size,", "/ self.num_kernels x = F.leaky_relu(x) mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8", "= self.kl_lambda * KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood)", "i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels self.channels =", "= fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l", "get upsampling channels self.channels = [uic] ch = uic for i in range(len(urs)):", "= previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out", "self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i", "get expected input lengthes and output lengths init_len = h.n_fft self.L_ins = [init_len]", "= self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out = torch.cat([out1, out2],", "wave. \"\"\" if self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave =", "sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out = out_.tanh() # atanh grad explotsion", "nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1,", "forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_upsamples): x =", "nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1,", "tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape maxes=", "torch.nn.functional as F import pytorch_lightning as pl from torchsummaryX import summary from torch.nn.utils", "self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo # get upsampling paddings self.pads = [] for", "output lengths init_len = h.n_fft self.L_ins = [init_len] self.L_outs = [] for r", "self.h = h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft", "out_,mean,var = self.forward(x1,x2) out = out_.tanh() # atanh grad explotsion mse = self.MSE(ans,", "x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_downsamples): x = F.leaky_relu(x,", "out.size(0) loss = marginal_likelihood + KL * self.kl_lambda #loss = self.kl_lambda * KL", "= 10 self.kl_lambda = h.kl_lambda # training settings self.MSE = nn.MSELoss() self.MAE =", "reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True): dummy", "drs = h.downsample_rates drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples", "def reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True):", "= h.resblock_dilation_sizes drs = h.downsample_rates drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels =", "h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples =", "writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__':", "def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1, 1, n_fft) x2: (-1, 1,", "= c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x = xt +", "h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs = h.downsample_rates drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel", "# training settings self.MSE = nn.MSELoss() self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype)", "utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple from torchsummaryX", "= nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1:", "lengthes init_len = uik self.L_ins = [init_len] self.L_outs = [] for r in", "input lengthes and output lengths init_len = h.n_fft self.L_ins = [init_len] self.L_outs =", "self.forward(x1,x2) out = out_.tanh() # atanh grad explotsion mse = self.MSE(ans, out) mae", "dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def", "self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in", "= [] for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i, (u,", "self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var) -1 ).sum() / out.size(0)", "+ KL * self.kl_lambda #loss = self.kl_lambda * KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse)", "range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i, (u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm(", "dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight norm...\") for l in", "= x.tanh() # grad explosion ? return x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1)", "summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...') for l in self.ups: remove_weight_norm(l) for l", "assert len(action.shape) == 3 assert len(previous_wave.shape) == 3 if previous_wave.size(-1) < self.n_fft :", "= torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad explosion ? wave", "rds = h.resblock_dilation_sizes drs = h.downsample_rates drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels", "in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels self.channels = []", "explotsion mse = self.MSE(ans, out) mae = self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) +", "= get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels self.channels = [] for i in", "nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList()", "+= self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) x = self.conv_post(x)", "return sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs", ": pad_len = self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave", "? wave = torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed)", "from torchsummaryX import summary import numpy as np import random from torch.utils.tensorboard import", "[] for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i, (u, k)", "action : (-1, ratent_dim, 1) previous_wave : (-1,ch, l) \"\"\" if previous_wave is", "channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2])))", "= nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks =", "self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h", "= d_out.tanh() # grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self):", "init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple from torchsummaryX import summary import numpy", "weight_norm, remove_weight_norm from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import", "mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2)", "in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor:", "= nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1,", "x = self.dns[i](x) xs = None for j in range(self.num_kernels): if xs is", "\"\"\" batch : (-1, ch, n_fft+hop_len) \"\"\" sound, = batch sound = sound.type(self.dtype)", "= h.kl_lambda # training settings self.MSE = nn.MSELoss() self.MAE = nn.L1Loss() self.actions =", "1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels,", "rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs = h.upsample_rates urks = h.upsample_kernel_sizes uic", "= self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def", "self.view_interval = 10 self.kl_lambda = h.kl_lambda # training settings self.MSE = nn.MSELoss() self.MAE", "nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.dns)): ch = self.channels[i+1]", "c1, c2 in zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt", "d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1)", "self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.ups)): ch =", "self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels,", "nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.ups)): ch = self.channels[i+1]", "def __init__(self, h): super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes", "torchsummaryX import summary from torch.nn.utils import weight_norm, remove_weight_norm from utils import get_padding, get_conv1d_outlen,", "self.pads = [] for i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get", "self.pads.append(pad) # get upsampling channels self.channels = [uic] ch = uic for i", "channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels,", "= [] for i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling", "torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in =", "i, (u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList()", "1e-8 return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2", "out = self.decoder(out) return out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor],", "if previous_wave is None: if self.silence is None: self.set_silence() previous_wave = self.silence assert", "= True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer = SummaryWriter() writer.add_graph(self,", "expected input lengthes and output lengths init_len = h.n_fft self.L_ins = [init_len] self.L_outs", "Encoder(nn.Module): def __init__(self, h): super().__init__() self.h = h rks = h.resblock_kernel_sizes rds =", "1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels,", "h.hop_len) + 1 self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len self.view_interval = 10 self.kl_lambda", "= self.dns[i](x) xs = None for j in range(self.num_kernels): if xs is None:", "self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) ->", "self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) x = self.conv_post(x) l", "= [uic] ch = uic for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList()", "i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm(", "== 3 if previous_wave.size(-1) < self.n_fft : pad_len = self.n_fft - previous_wave.size(-1) n,c,l", "dilation=(1, 3, 5)): super().__init__() assert len(dilation) == 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels,", "F.leaky_relu(x) mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def dual_flow(self,", "self.pads = [] for i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get", "self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x =", "dummy) def remove_weight_norm(self): print(\"Removing weight norm...\") for l in self.dns: remove_weight_norm(l) for l", "self.L_outs.append(1) # get downsampling paddings self.pads = [] for i,r in enumerate(drs): pad", "[] for i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels", "= nn.ModuleList() for i, (u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) ))", "__init__(self, h) -> None: super().__init__() self.h = h rks = h.resblock_kernel_sizes rds =", "self.resblocks = nn.ModuleList() for i in range(len(self.ups)): ch = self.channels[i+1] for j,(k,d) in", "self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for act", "None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval: self.view_interval= interval", "self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_upsamples):", "KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad()", "(-1, ratent_dim, 1) previous_wave : (-1,ch, l) \"\"\" if previous_wave is None: if", "h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) #", "self.out_len)/2) x = x[:,:,start:start+self.out_len] #x = x.tanh() # grad explosion ? return x", "channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self, x): for c1,", "if xs is None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x =", "= h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples", "explosion ? return x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing", "return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for act in self.actions.unsqueeze(1):", "summary(self, dummy,dummy) if tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm()", "channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert len(dilation) == 3 self.convs1 = nn.ModuleList([", "sound out_,mean,var = self.forward(x1,x2) out = out_.tanh() # atanh grad explotsion mse =", "out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy =", "-> torch.Tensor: \"\"\" x1: (-1, 1, n_fft) x2: (-1, 1, n_fft) \"\"\" mean1,var1", "== 3 assert len(previous_wave.shape) == 3 if previous_wave.size(-1) < self.n_fft : pad_len =", "]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels,", "= h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len) + 1 self.walking_resolution = h.walking_resolution self.out_len", "- torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood=", "import matplotlib.pyplot as plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3,", "get downsampling channels self.channels = [] for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns =", "self.L_ins = [init_len] self.L_outs = [] for r in drs: lo = int(init_len/r)", "padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1,", "1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self, x): for c1, c2 in", "[init_len] self.L_outs = [] for r in urs: lo = init_len * r", "nn.MSELoss() self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor:", "wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space", "action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action : (-1, ratent_dim, 1) previous_wave : (-1,ch, l)", "out1,out2 = mean1,mean2 out = torch.cat([out1, out2], dim=1) #.tanh() # notanh return out", "h.kl_lambda # training settings self.MSE = nn.MSELoss() self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim,", "xs / self.num_kernels x = F.leaky_relu(x) mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x)) +", "= mean1,mean2 out = torch.cat([out1, out2], dim=1) #.tanh() # notanh return out @staticmethod", "marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood + KL * self.kl_lambda #loss =", "padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size,", "# grad explosion ? return x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def", "return x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...')", "dim=1) #.tanh() # notanh return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean +", "(sound/maxes) * gains return sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler =", "1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self, x):", "-> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch : (-1,", "summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight norm...\") for l", "upsampling paddings self.pads = [] for i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad)", "for i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels self.channels", "def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self,", "in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in", "else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) mean", "scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None def set_silence(self): self.silence", "l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h =", "if __name__ == '__main__': from utils import load_config config = load_config(\"hparams/origin.json\") model =", "= h.n_fft self.L_ins = [init_len] self.L_outs = [] for r in drs: lo", "h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs = h.upsample_rates urks = h.upsample_kernel_sizes", "torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return out,mean,var def on_fit_start(self) ->", "= wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig", "remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h = h", "random from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE = 0.1 class", "-> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE)", "for l in self.convs1: remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def", "i in range(len(self.ups)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post =", "self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__': from utils import load_config config = load_config(\"hparams/origin.json\")", "self.kl_lambda = h.kl_lambda # training settings self.MSE = nn.MSELoss() self.MAE = nn.L1Loss() self.actions", "out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\"", "self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected input lengthes", "l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h) -> None:", "init_len = lo self.L_outs.append(1) # get downsampling paddings self.pads = [] for i,r", "class Encoder(nn.Module): def __init__(self, h): super().__init__() self.h = h rks = h.resblock_kernel_sizes rds", "as plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1, 3,", "= F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt)", "dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...') for l in self.ups:", "h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len) + 1 self.walking_resolution =", "h) -> None: super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes", "log audio wave. \"\"\" if self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype)", "# notanh return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def", "pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh", "= h.upsample_initial_kernel urs = h.upsample_rates urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len =", "# get expected input lengthes and output lengthes init_len = uik self.L_ins =", "wave = None for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() #", "(-1,ch, l) \"\"\" if previous_wave is None: if self.silence is None: self.set_silence() previous_wave", "def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action : (-1, ratent_dim, 1) previous_wave :", "mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def dual_flow(self, x1:torch.Tensor,", "= torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None", "= torch.cat([out1, out2], dim=1) #.tanh() # notanh return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor):", "< self.n_fft : pad_len = self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape pad =", "def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval: self.view_interval= interval def", "= h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs = h.upsample_rates urks =", "1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1,", "remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h): super().__init__() self.h = h rks = h.resblock_kernel_sizes", "if self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for", "self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor)", "div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self) -> None: \"\"\" walk through", "the ratent space and log audio wave. \"\"\" if self.current_epoch%self.view_interval !=0: return self.actions", "? return x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight", "torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype)", "n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded", "in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i],", "= weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected input lengthes and output lengths init_len", "\"\"\" action : (-1, ratent_dim, 1) previous_wave : (-1,ch, l) \"\"\" if previous_wave", "= torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out", "def summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer =", "= weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor:", "[uic] ch = uic for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for", "torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor:", "= self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True)", "lo # get upsampling paddings self.pads = [] for i,r in enumerate(urs): pad", "dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2],", "+ 1 self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len self.view_interval = 10 self.kl_lambda =", "out2 = self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out = torch.cat([out1, out2], dim=1) #.tanh()", "mae = self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var) -1 ).sum()", "__name__ == '__main__': from utils import load_config config = load_config(\"hparams/origin.json\") model = VoiceBand(config)", "else: out1,out2 = mean1,mean2 out = torch.cat([out1, out2], dim=1) #.tanh() # notanh return", "range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs = None for j", "xt + x return x def remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l) for", "summary from torch.nn.utils import weight_norm, remove_weight_norm from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down,", "torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...') for l in self.ups: remove_weight_norm(l) for", "\"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()#", "self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_downsamples):", "10 self.kl_lambda = h.kl_lambda # training settings self.MSE = nn.MSELoss() self.MAE = nn.L1Loss()", "paddings self.pads = [] for i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) #", "walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave", "self.n_fft : pad_len = self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device)", "None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch : (-1, ch,", "mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1) out2 =", "n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out =", "self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected input lengthes and output lengths", "#marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood", "kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert len(dilation) == 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels,", "= init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo # get upsampling paddings", "enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x", "x = self.conv_post(x) l = x.size(-1) start = int((l - self.out_len)/2) x =", "set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval: self.view_interval= interval def predict_one_step(self,", "ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var =", "init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo # get upsampling paddings self.pads", "LRELU_SLOPE) x = self.ups[i](x) xs = None for j in range(self.num_kernels): if xs", "-> None: super().__init__() self.h = h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device)", "= int(h.breath_len / h.hop_len) + 1 self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len self.view_interval", "self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return wave", "torch.nn.utils import weight_norm, remove_weight_norm from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from", "SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__': from utils", "self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs", "-> None: \"\"\" walk through the ratent space and log audio wave. \"\"\"", "torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard:", "x def remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l)", "= x.size(-1) start = int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len] #x = x.tanh()", "[] for r in drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo", "None for j in range(self.num_kernels): if xs is None: xs = self.resblocks[i*self.num_kernels+j](x) else:", "nn.ModuleList() for i in range(len(self.ups)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d))", "self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h): super().__init__() self.h = h rks =", "self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return out,mean,var", "k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i", "scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self,", "encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh()", "= get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels self.channels = [uic] ch = uic", "l) \"\"\" if previous_wave is None: if self.silence is None: self.set_silence() previous_wave =", "typing import Tuple from torchsummaryX import summary import numpy as np import random", "ch, n_fft+hop_len) \"\"\" sound, = batch sound = sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound)", "= len(rks) self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input", "settings self.MSE = nn.MSELoss() self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self,", "torch.cat([out1, out2], dim=1) #.tanh() # notanh return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return", "-> torch.Tensor: \"\"\" batch : (-1, ch, n_fft+hop_len) \"\"\" sound, = batch sound", "if interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action : (-1,", "get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple from torchsummaryX import summary import", "= None for j in range(self.num_kernels): if xs is None: xs = self.resblocks[i*self.num_kernels+j](x)", "self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len) + 1", "import torch.nn.functional as F import pytorch_lightning as pl from torchsummaryX import summary from", "= out_.tanh() # atanh grad explotsion mse = self.MSE(ans, out) mae = self.MAE(ans,out)", "= h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs = h.upsample_rates urks = h.upsample_kernel_sizes uic =", "for i, (u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks =", "self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.dns)): ch = self.channels[i+1] for", "weight norm...\") for l in self.dns: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre)", "urs = h.upsample_rates urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len", "and log audio wave. \"\"\" if self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,", "i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i, (u, k) in enumerate(zip(drs,", "kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ])", "Decoder(nn.Module): def __init__(self, h) -> None: super().__init__() self.h = h rks = h.resblock_kernel_sizes", "np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy)", "ratent space and log audio wave. \"\"\" if self.current_epoch%self.view_interval !=0: return self.actions =", "self.MSE(ans, out) mae = self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var)", "def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler]", "# tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure()", "# atanh grad explotsion mse = self.MSE(ans, out) mae = self.MAE(ans,out) KL =", "h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len) + 1 self.walking_resolution = h.walking_resolution self.out_len =", "len(previous_wave.shape) == 3 if previous_wave.size(-1) < self.n_fft : pad_len = self.n_fft - previous_wave.size(-1)", "= self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1)", "torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out =", "return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 =", "self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out = torch.cat([out1, out2], dim=1) #.tanh() # notanh", "+ torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight", "self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input lengthes and output lengthes init_len", "= self.decoder(out) return out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx)", "sound, = batch sound = sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft],", "batch_idx) -> torch.Tensor: \"\"\" batch : (-1, ch, n_fft+hop_len) \"\"\" sound, = batch", "self.kl_lambda * KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return", "(-1, 1, n_fft) x2: (-1, 1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 =", "in zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt = F.leaky_relu(xt,", "norm...\") for l in self.dns: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post)", "self.num_kernels = len(rks) self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected", "loss = marginal_likelihood + KL * self.kl_lambda #loss = self.kl_lambda * KL +", "random.seed(seed) def summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer", "paddings self.pads = [] for i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) #", "F import pytorch_lightning as pl from torchsummaryX import summary from torch.nn.utils import weight_norm,", "Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len", "start = int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len] #x = x.tanh() # grad", "F.leaky_relu(x) x = self.conv_post(x) l = x.size(-1) start = int((l - self.out_len)/2) x", "for c1, c2 in zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt)", "self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len /", "remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h)", "* gains return sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim,", "+ 1e-8 return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1)", "x = self.conv_pre(x) for i in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x =", "None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels", "in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h) -> None: super().__init__()", "len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected input lengthes and output", "self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave =", "x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...') for", "= torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self):", "= uic for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i, (u,k)", "= len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input lengthes and output", "return wave def reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool", "self.L_ins.append(lo) init_len = lo self.L_outs.append(1) # get downsampling paddings self.pads = [] for", "h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples = len(urs) self.conv_pre =", "= torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in", "self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size,", "for i in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs =", "= F.leaky_relu(x) mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def", "= 0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood =", "print(\"Removing weight norm...\") for l in self.dns: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm()", "out = self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return out,mean,var def on_fit_start(self) -> None:", "= Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len)", "= self.forward(x1) mean2,var2 = self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2)", "super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel", "len(rks) self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input lengthes", "\"\"\" if self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None", "= h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3))", "def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_downsamples): x", "= walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1, 1, n_fft)", "sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return", "for r in urs: lo = init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len =", "self.channels = [] for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i,", "rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs = h.upsample_rates urks", "5)): super().__init__() assert len(dilation) == 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1,", "self.pads.append(pad) # get downsampling channels self.channels = [] for i in range(len(drs)+1): self.channels.append(dci*(2**i))", "uic for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i, (u,k) in", "from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple from", "= [] for r in urs: lo = init_len * r self.L_ins.append(lo) self.L_outs.append(lo)", "self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self) ->", "# get downsampling channels self.channels = [] for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns", "gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def", "1))) ]) self.convs2.apply(init_weights) def forward(self, x): for c1, c2 in zip(self.convs1, self.convs2): xt", "= self.MSE(ans, out) mae = self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) + var -", "KL * self.kl_lambda #loss = self.kl_lambda * KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae)", "torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad explosion ? wave =", "optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence =", "xt = c2(xt) x = xt + x return x def remove_weight_norm(self): for", "walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1, 1, n_fft) x2:", "x): for c1, c2 in zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt =", "torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm()", "get upsampling paddings self.pads = [] for i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r)", "def forward(self, x): for c1, c2 in zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE)", "= torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) ->", "as F import pytorch_lightning as pl from torchsummaryX import summary from torch.nn.utils import", "-> None: super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik", "tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig,", "= nn.ModuleList() for i in range(len(self.dns)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)):", "ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape", ")) self.resblocks = nn.ModuleList() for i in range(len(self.dns)): ch = self.channels[i+1] for j,(k,d)", "= self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h)", "[dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__': from utils import load_config", "in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.ups)):", "batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch : (-1, ch, n_fft+hop_len) \"\"\" sound, =", "tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax", "self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch : (-1, ch, n_fft+hop_len)", "self.out_len = self.decoder.out_len self.view_interval = 10 self.kl_lambda = h.kl_lambda # training settings self.MSE", "assert len(dilation) == 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size,", "self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs = h.downsample_rates drks", "= sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var =", "x[:,:,start:start+self.out_len] #x = x.tanh() # grad explosion ? return x def summary(self): dummy", "self.dns: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self,", "sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out = out_.tanh() # atanh grad explotsion mse", "return [optim],[scheduler] silence = None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None):", "import torch import torch.nn as nn import torch.nn.functional as F import pytorch_lightning as", "torch.Tensor: x = self.conv_pre(x) for i in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x", "self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out =", "== '__main__': from utils import load_config config = load_config(\"hparams/origin.json\") model = VoiceBand(config) model.summary()", "padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self,", "x = xs / self.num_kernels x = F.leaky_relu(x) x = self.conv_post(x) l =", "kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size,", "= self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out", "on_epoch_end(self) -> None: \"\"\" walk through the ratent space and log audio wave.", "matplotlib.pyplot as plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1,", "weight norm...') for l in self.ups: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre)", "self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) mean = self.conv_post(x) var", "return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self):", "= F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1", "F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood + KL * self.kl_lambda #loss = self.kl_lambda", "is None: self.set_silence() previous_wave = self.silence assert len(action.shape) == 3 assert len(previous_wave.shape) ==", "= xs / self.num_kernels x = F.leaky_relu(x) mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x))", "on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch :", "self.ups[i](x) xs = None for j in range(self.num_kernels): if xs is None: xs", "previous_wave = self.silence assert len(action.shape) == 3 assert len(previous_wave.shape) == 3 if previous_wave.size(-1)", "get_padding_up,walk_ratent_space from typing import Tuple from torchsummaryX import summary import numpy as np", "pad_len = self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave =", "self.silence assert len(action.shape) == 3 assert len(previous_wave.shape) == 3 if previous_wave.size(-1) < self.n_fft", "= torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...') for l in self.ups: remove_weight_norm(l)", "weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels,", "nn.ModuleList() for i, (u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks", "init_len = uik self.L_ins = [init_len] self.L_outs = [] for r in urs:", "__init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h = h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder", "for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor)", "sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes) sound = (sound/maxes) * gains return", "from typing import Tuple from torchsummaryX import summary import numpy as np import", "forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1, 1, n_fft) x2: (-1, 1, n_fft)", "x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1, 1, n_fft) x2: (-1, 1, n_fft) \"\"\"", "len(rks) self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected input", "i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels self.channels =", "for j in range(self.num_kernels): if xs is None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs", "torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight norm...\") for l in self.dns: remove_weight_norm(l)", "for i in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs =", "\"\"\" if previous_wave is None: if self.silence is None: self.set_silence() previous_wave = self.silence", "self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self) -> None: \"\"\" walk", "var = F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor:", "uic = h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples = len(urs)", "self.walking_steps = int(h.breath_len / h.hop_len) + 1 self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len", "remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module):", "h.upsample_rates urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels =", "summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...') for l in", "\"\"\" x1: (-1, 1, n_fft) x2: (-1, 1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1)", "self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2", "* r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo # get upsampling paddings self.pads =", "= self.conv_pre(x) for i in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x)", "self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None def set_silence(self):", "= self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() #", "= self.conv_pre(x) for i in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x)", "len(dilation) == 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))),", "= (sound/maxes) * gains return sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler", "x = F.leaky_relu(x) mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var", "= self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var) -1 ).sum() /", "= h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik))", "from torchsummaryX import summary from torch.nn.utils import weight_norm, remove_weight_norm from utils import get_padding,", "kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels,", "lo = init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo # get upsampling", "audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return", "in self.dns: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def", "def __init__(self, h) -> None: super().__init__() self.h = h rks = h.resblock_kernel_sizes rds", "drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples = len(drs) self.conv_pre", "summary import numpy as np import random from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot", "= self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x =", "= uik self.L_ins = [init_len] self.L_outs = [] for r in urs: lo", "def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__': from utils import load_config config", "== 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels,", "= lo self.L_outs.append(1) # get downsampling paddings self.pads = [] for i,r in", "self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self,", "-> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE)", "notanh return out @staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self):", "xt = F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x = xt + x return", "= 0.1 class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert", "channels self.channels = [] for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for", ")) self.resblocks = nn.ModuleList() for i in range(len(self.ups)): ch = self.channels[i+1] for j,(k,d)", "training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch : (-1, ch, n_fft+hop_len) \"\"\" sound,", "i in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs = None", "torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood + KL * self.kl_lambda #loss", "self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft) summary(self,", "def set_view_interval(self, interval:int=None): if interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\"", "is None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs /", "batch : (-1, ch, n_fft+hop_len) \"\"\" sound, = batch sound = sound.type(self.dtype) if", "torch.Tensor: \"\"\" batch : (-1, ch, n_fft+hop_len) \"\"\" sound, = batch sound =", "None for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging", ": (-1, ratent_dim, 1) previous_wave : (-1,ch, l) \"\"\" if previous_wave is None:", "seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool = True): dummy =", "self.decoder(out) return out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) ->", "self.L_outs = [] for r in drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len", "len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input lengthes and output lengthes", "LRELU_SLOPE) x = self.dns[i](x) xs = None for j in range(self.num_kernels): if xs", "= lo # get upsampling paddings self.pads = [] for i,r in enumerate(urs):", "h.upsample_initial_kernel urs = h.upsample_rates urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len = h.n_fft", "i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i", "mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self)", "= nn.ModuleList() for i in range(len(self.ups)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)):", "space and log audio wave. \"\"\" if self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim,", "self.resblocks = nn.ModuleList() for i in range(len(self.dns)): ch = self.channels[i+1] for j,(k,d) in", "self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x)", "= self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return", "= torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return out,mean,var def on_fit_start(self)", "1 self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len self.view_interval = 10 self.kl_lambda = h.kl_lambda", "= None for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard", "assert len(previous_wave.shape) == 3 if previous_wave.size(-1) < self.n_fft : pad_len = self.n_fft -", "padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 =", "self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input lengthes and", "self.current_epoch) return def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0]", "enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.dns)):", "1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out", "KL = 0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood", "= sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out = out_.tanh() # atanh grad", "def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h = h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device)", "ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights)", "self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size,", "= h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs = h.downsample_rates drks =", "torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def", "drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1) # get downsampling", "mean2,var2 = self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2", "def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch : (-1, ch, n_fft+hop_len) \"\"\"", "self.num_kernels x = F.leaky_relu(x) mean = self.conv_post(x) var = F.softplus(self.conv_post_var(x)) + 1e-8 return", "self.dns = nn.ModuleList() for i, (u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i])", "3, 5)): super().__init__() assert len(dilation) == 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size,", "l = x.size(-1) start = int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len] #x =", "mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var = torch.cat([mean1,mean2],dim=1),torch.cat([var1,var2],dim=1) out = self.encoder.random_sample(mean,var)#.tanh()# notanh", "padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1,", "mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing", "self.encoder.random_sample(mean,var)#.tanh()# notanh out = self.decoder(out) return out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def", "in range(len(self.ups)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3))", "in drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1) # get", "1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights)", "else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) x", "c2 in zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt =", "maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes) sound = (sound/maxes) * gains return sound", "sound = sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var", "= self.decoder.out_len self.view_interval = 10 self.kl_lambda = h.kl_lambda # training settings self.MSE =", "drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.dns)): ch", "0.1 class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert len(dilation)", "h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1,", "3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels,", "+ x return x def remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l) for l", "#loss = self.kl_lambda * KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal", "self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len self.view_interval = 10 self.kl_lambda = h.kl_lambda #", "torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay) scheduler.last_epoch=self.trainer.max_epochs return [optim],[scheduler] silence = None def", "uic,uik)) # get expected input lengthes and output lengthes init_len = uik self.L_ins", "lengths init_len = h.n_fft self.L_ins = [init_len] self.L_outs = [] for r in", "import torch.nn as nn import torch.nn.functional as F import pytorch_lightning as pl from", "self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h) -> None: super().__init__() self.h", "return out,mean,var def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor:", "l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h) -> None: super().__init__() self.h =", "= h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs =", "* KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss", "channels self.channels = [uic] ch = uic for i in range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups", "summary(self,tensorboard:bool = True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer = SummaryWriter()", "d_out.tanh() # grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed", "previous_wave.size(-1) < self.n_fft : pad_len = self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape pad", "= [init_len] self.L_outs = [] for r in drs: lo = int(init_len/r) self.L_outs.append(lo)", "rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs = h.downsample_rates drks = h.downsample_kernel_sizes dci", "# get downsampling paddings self.pads = [] for i,r in enumerate(drs): pad =", "in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter = self.logger.experiment", "+ var - torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in", "previous_wave is None: if self.silence is None: self.set_silence() previous_wave = self.silence assert len(action.shape)", "]) self.convs2.apply(init_weights) def forward(self, x): for c1, c2 in zip(self.convs1, self.convs2): xt =", "= 1.0 gains = torch.rand_like(maxes) sound = (sound/maxes) * gains return sound def", "wave def reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed) def summary(self,tensorboard:bool =", "= self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1]))", "forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_downsamples): x =", "= weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for", "as nn import torch.nn.functional as F import pytorch_lightning as pl from torchsummaryX import", "for i in range(len(self.ups)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post", "= torch.rand_like(maxes) sound = (sound/maxes) * gains return sound def configure_optimizers(self): optim =", "xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) mean =", "wave = torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed)", "self.L_outs = [] for r in urs: lo = init_len * r self.L_ins.append(lo)", "= batch sound = sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:],", "l in self.ups: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule):", "int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len] #x = x.tanh() # grad explosion ?", "nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1],", "remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h): super().__init__() self.h", "= h.downsample_rates drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples =", "wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate)", "is None: if self.silence is None: self.set_silence() previous_wave = self.silence assert len(action.shape) ==", "fig = plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self,", "xt = F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt =", "torch.Tensor: \"\"\" action : (-1, ratent_dim, 1) previous_wave : (-1,ch, l) \"\"\" if", "- previous_wave.size(-1) n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in =", "if tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__", "[optim],[scheduler] silence = None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if", "(-1, ch, n_fft+hop_len) \"\"\" sound, = batch sound = sound.type(self.dtype) if self.h.random_gain: sound=", "3 assert len(previous_wave.shape) == 3 if previous_wave.size(-1) < self.n_fft : pad_len = self.n_fft", "-> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1)", "self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i, (u, k) in enumerate(zip(drs, drks)): self.dns.append(weight_norm( nn.Conv1d(self.channels[i],", "self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self) -> None:", "for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') ->", "xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) x =", "remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h = h self.reset_seed()", "self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for act in self.actions.unsqueeze(1): wave=", "out = out_.tanh() # atanh grad explotsion mse = self.MSE(ans, out) mae =", "previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out =", "lengthes and output lengths init_len = h.n_fft self.L_ins = [init_len] self.L_outs = []", "l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h): super().__init__() self.h = h", "torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1) out2", "from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module):", "as pl from torchsummaryX import summary from torch.nn.utils import weight_norm, remove_weight_norm from utils", "# get expected input lengthes and output lengths init_len = h.n_fft self.L_ins =", "1, n_fft) x2: (-1, 1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2)", "self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked", "norm...') for l in self.ups: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post)", "padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self, x): for c1, c2 in zip(self.convs1, self.convs2):", "dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def", "and output lengths init_len = h.n_fft self.L_ins = [init_len] self.L_outs = [] for", "= [] for r in drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len =", "[] for i,r in enumerate(urs): pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels", "x = x[:,:,start:start+self.out_len] #x = x.tanh() # grad explosion ? return x def", "int(h.breath_len / h.hop_len) + 1 self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len self.view_interval =", "out) mae = self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) + var - torch.log(var) -1", "= Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps =", "x = F.leaky_relu(x) x = self.conv_post(x) l = x.size(-1) start = int((l -", ": (-1, ch, n_fft+hop_len) \"\"\" sound, = batch sound = sound.type(self.dtype) if self.h.random_gain:", "1) previous_wave : (-1,ch, l) \"\"\" if previous_wave is None: if self.silence is", "dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad explosion ?", "dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size, dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2", "def remove_weight_norm(self): print(\"Removing weight norm...\") for l in self.dns: remove_weight_norm(l) for l in", "remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__': from utils import load_config config =", "fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l =", "self.ups = nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks", "range(len(self.dns)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var", "/ h.hop_len) + 1 self.walking_resolution = h.walking_resolution self.out_len = self.decoder.out_len self.view_interval = 10", "silence = None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval:", "x2: (-1, 1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var =", "nn.ModuleList() for i in range(len(self.dns)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d))", "h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs = h.upsample_rates urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel", "enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels self.channels = [] for", "def remove_weight_norm(self): print('Removing weight norm...') for l in self.ups: remove_weight_norm(l) for l in", "audio wave. \"\"\" if self.current_epoch%self.view_interval !=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave", "= h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim", "self.kl_lambda #loss = self.kl_lambda * KL + mse self.log(\"loss\",loss) self.log(\"mse\",mse) self.log(\"mae\",mae) self.log(\"KL div\",KL)", "len(action.shape) == 3 assert len(previous_wave.shape) == 3 if previous_wave.size(-1) < self.n_fft : pad_len", "torch import torch.nn as nn import torch.nn.functional as F import pytorch_lightning as pl", "space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch)", "ratent_dim, 1) previous_wave : (-1,ch, l) \"\"\" if previous_wave is None: if self.silence", "h.resblock_dilation_sizes drs = h.downsample_rates drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels = len(rks)", "self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood + KL", "self.set_silence() previous_wave = self.silence assert len(action.shape) == 3 assert len(previous_wave.shape) == 3 if", "LRELU_SLOPE) xt = c2(xt) x = xt + x return x def remove_weight_norm(self):", "= c2(xt) x = xt + x return x def remove_weight_norm(self): for l", "self.num_kernels x = F.leaky_relu(x) x = self.conv_post(x) l = x.size(-1) start = int((l", "n_fft+hop_len) \"\"\" sound, = batch sound = sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans", "tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax = fig.add_subplot()", "for l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h): super().__init__() self.h =", "(u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in", "x.tanh() # grad explosion ? return x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy)", "self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad", "x = xt + x return x def remove_weight_norm(self): for l in self.convs1:", "= F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs = None for j in range(self.num_kernels):", "self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out = out_.tanh() #", "= sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes) sound = (sound/maxes)", "interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action : (-1, ratent_dim,", "interval:int=None): if interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action :", "in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__()", "self.n_fft - previous_wave.size(-1) n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in", "weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size,", "range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs = None for j", "for l in self.dns: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class", "h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes uik = h.upsample_initial_kernel urs = h.upsample_rates", "= F.leaky_relu(x) x = self.conv_post(x) l = x.size(-1) start = int((l - self.out_len)/2)", "self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\"", "LRELU_SLOPE) xt = c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x =", "self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None)", "in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs = None for", "self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave,", "torchsummaryX import summary import numpy as np import random from torch.utils.tensorboard import SummaryWriter", "= self.conv_post(x) l = x.size(-1) start = int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len]", "remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h) -> None: super().__init__() self.h = h rks", "self.ups: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self,", "h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h = h self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder =", "def random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0", "previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded = self.encoder.forward(enc_in)[0]#.tanh()# notanh dec_in = torch.cat([encoded,action],dim=1)", "self.decoder.remove_weight_norm() if __name__ == '__main__': from utils import load_config config = load_config(\"hparams/origin.json\") model", "self.forward(x1) mean2,var2 = self.forward(x2) if with_random: out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else:", "input lengthes and output lengthes init_len = uik self.L_ins = [init_len] self.L_outs =", "= self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return", "= torch.cat([previous_wave,d_out],dim=-1) return wave def reset_seed(self): seed = self.h.seed np.random.seed(seed) torch.manual_seed(seed) torch.cuda.manual_seed(seed) random.seed(seed)", "range(len(urs)): self.channels.append(int(ch/(2**i))) self.ups = nn.ModuleList() for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i])", "numpy as np import random from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as plt", "None: if self.silence is None: self.set_silence() previous_wave = self.silence assert len(action.shape) == 3", "None: self.set_silence() previous_wave = self.silence assert len(action.shape) == 3 assert len(previous_wave.shape) == 3", "previous_wave.size(-1) n,c,l = previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device)", "import numpy as np import random from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as", "remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h) -> None: super().__init__() self.h = h", "'__main__': from utils import load_config config = load_config(\"hparams/origin.json\") model = VoiceBand(config) model.summary() model.remove_weight_norm()", "h.downsample_rates drks = h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples = len(drs)", "/ out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss", "x.size(-1) start = int((l - self.out_len)/2) x = x[:,:,start:start+self.out_len] #x = x.tanh() #", "weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i", "i in range(len(self.dns)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post =", "-> torch.Tensor: \"\"\" action : (-1, ratent_dim, 1) previous_wave : (-1,ch, l) \"\"\"", "def remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l) class", "= h.n_fft self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len) + 1 self.walking_resolution", "__init__(self, h): super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs", "class Decoder(nn.Module): def __init__(self, h) -> None: super().__init__() self.h = h rks =", "\"\"\" sound, = batch sound = sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans =", "import summary import numpy as np import random from torch.utils.tensorboard import SummaryWriter import", "ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert len(dilation) == 3", "loss @torch.no_grad() def on_epoch_end(self) -> None: \"\"\" walk through the ratent space and", "super().__init__() assert len(dilation) == 3 self.convs1 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0],", "weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2], padding=get_padding(kernel_size,", "forward(self, x): for c1, c2 in zip(self.convs1, self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt", "uik = h.upsample_initial_kernel urs = h.upsample_rates urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len", "grad explosion ? return x def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self):", "remove_weight_norm from utils import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple", "import Tuple from torchsummaryX import summary import numpy as np import random from", "return loss @torch.no_grad() def on_epoch_end(self) -> None: \"\"\" walk through the ratent space", "out = torch.cat([out1, out2], dim=1) #.tanh() # notanh return out @staticmethod def random_sample(mean:torch.Tensor,", "/ self.num_kernels x = F.leaky_relu(x) x = self.conv_post(x) l = x.size(-1) start =", "x = self.conv_pre(x) for i in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x =", "in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i, (u, k) in enumerate(zip(drs, drks)):", "and output lengthes init_len = uik self.L_ins = [init_len] self.L_outs = [] for", "in range(self.num_downsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs = None for", "in range(len(self.dns)): ch = self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1]))", "import get_padding, get_conv1d_outlen, init_weights, get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple from torchsummaryX import", "x = F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs = None for j in", "in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h): super().__init__() self.h = h rks", "for i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels self.channels", "self.out_len = h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2,", "in self.convs1: remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self, h):", "kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ]) self.convs2.apply(init_weights) def forward(self, x): for c1, c2", "def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2) if", "r in drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1) #", "if self.silence is None: self.set_silence() previous_wave = self.silence assert len(action.shape) == 3 assert", "= SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__': from", "dilation[2]))) ]) self.convs1.apply(init_weights) self.convs2 = nn.ModuleList([ weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))),", "super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs = h.downsample_rates", "VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None: super().__init__() self.h = h self.reset_seed() self.encoder =", "self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self) -> None: \"\"\" walk through the", "def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight norm...\") for", "get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels self.channels = [uic] ch = uic for", "device=self.device,dtype=self.dtype) wave = None for act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy()", "torch.nn as nn import torch.nn.functional as F import pytorch_lightning as pl from torchsummaryX", "in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\") / out.size(0) loss = marginal_likelihood + KL * self.kl_lambda", "self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) -> torch.Tensor: \"\"\" x1: (-1, 1,", "walk through the ratent space and log audio wave. \"\"\" if self.current_epoch%self.view_interval !=0:", "self.conv_pre(x) for i in range(self.num_upsamples): x = F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs", "/ out.size(0) loss = marginal_likelihood + KL * self.kl_lambda #loss = self.kl_lambda *", "init_len = lo # get upsampling paddings self.pads = [] for i,r in", "-> torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes)", "= self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax = fig.add_subplot() ax.plot(wave)", "d_out = d_out.tanh() # grad explosion ? wave = torch.cat([previous_wave,d_out],dim=-1) return wave def", "for i, (u,k) in enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for", "for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights)", "sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out = out_.tanh()", "= marginal_likelihood + KL * self.kl_lambda #loss = self.kl_lambda * KL + mse", "self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out =", "x = F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs = None for j in", "random_gain(self, sound:torch.Tensor) -> torch.Tensor: n,c,l = sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains", "pl from torchsummaryX import summary from torch.nn.utils import weight_norm, remove_weight_norm from utils import", "n_fft) x2: (-1, 1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2 = self.encoder.forward(x2) mean,var", "torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def remove_weight_norm(self): print(\"Removing weight norm...\")", "import pytorch_lightning as pl from torchsummaryX import summary from torch.nn.utils import weight_norm, remove_weight_norm", "x1: (-1, 1, n_fft) x2: (-1, 1, n_fft) \"\"\" mean1,var1 = self.encoder.forward(x1) mean2,var2", "xs is None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs", "True): dummy = torch.randn(1,1,self.n_fft) summary(self, dummy,dummy) if tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy])", "ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor) -> torch.Tensor:", "interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action : (-1, ratent_dim, 1) previous_wave", "7,1,3)) # get expected input lengthes and output lengths init_len = h.n_fft self.L_ins", "self.current_epoch,self.h.frame_rate) fig = plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def", "lo self.L_outs.append(1) # get downsampling paddings self.pads = [] for i,r in enumerate(drs):", "lengthes and output lengthes init_len = uik self.L_ins = [init_len] self.L_outs = []", "weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x", "self.channels[i+1] for j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.conv_post_var = weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights)", "dci, 7,1,3)) # get expected input lengthes and output lengths init_len = h.n_fft", "for i in range(len(drs)+1): self.channels.append(dci*(2**i)) self.dns = nn.ModuleList() for i, (u, k) in", "range(self.num_kernels): if xs is None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x", "weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get expected input lengthes and output lengthes init_len = uik", "h.walking_resolution self.out_len = self.decoder.out_len self.view_interval = 10 self.kl_lambda = h.kl_lambda # training settings", "LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def __init__(self, channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__()", "set_view_interval(self, interval:int=None): if interval: self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action", "= plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor)", "= xt + x return x def remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l)", "h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs = h.downsample_rates drks = h.downsample_kernel_sizes", "dummy,dummy) if tensorboard: writer = SummaryWriter() writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if", "x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_upsamples): x = F.leaky_relu(x,", "\"\"\" walk through the ratent space and log audio wave. \"\"\" if self.current_epoch%self.view_interval", "SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE = 0.1 class ResBlock(nn.Module): def __init__(self, channels,", "for r in drs: lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1)", "self.MSE = nn.MSELoss() self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor)", "def on_epoch_end(self) -> None: \"\"\" walk through the ratent space and log audio", "in range(self.num_kernels): if xs is None: xs = self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x)", "= F.leaky_relu(x, LRELU_SLOPE) x = self.ups[i](x) xs = None for j in range(self.num_kernels):", "= h.upsample_rates urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels", "= h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels = len(rks) self.num_upsamples = len(urs) self.conv_pre", "random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy)", "self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in", "- self.out_len)/2) x = x[:,:,start:start+self.out_len] #x = x.tanh() # grad explosion ? return", "self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.ups)): ch = self.channels[i+1] for", "get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling channels self.channels = [] for i in range(len(drs)+1):", "plt.figure() ax = fig.add_subplot() ax.plot(wave) tb.add_figure(\"Walked wave\",fig, self.current_epoch) return def random_gain(self, sound:torch.Tensor) ->", "= [init_len] self.L_outs = [] for r in urs: lo = init_len *", "= previous_wave.shape pad = torch.zeros(n,c,pad_len,dtype=previous_wave.dtype,device=previous_wave.device) previous_wave = torch.cat([pad,previous_wave],dim=-1) enc_in = previous_wave[:,:,-self.n_fft:].to(self.dtype).to(self.device) encoded =", "var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft) summary(self, dummy) def", "= self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out = torch.cat([out1, out2], dim=1) #.tanh() #", "urks = h.upsample_kernel_sizes uic = h.upsample_initial_channel self.out_len = h.n_fft +h.hop_len self.num_kernels = len(rks)", "= self.silence assert len(action.shape) == 3 assert len(previous_wave.shape) == 3 if previous_wave.size(-1) <", "F.softplus(self.conv_post_var(x)) + 1e-8 return mean,var def dual_flow(self, x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 =", "enumerate(zip(urs,urks)): self.ups.append(weight_norm( nn.ConvTranspose1d(self.channels[i], self.channels[i+1],k,u,self.pads[i]) )) self.resblocks = nn.ModuleList() for i in range(len(self.ups)): ch", "maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes) sound = (sound/maxes) * gains", "def on_fit_start(self) -> None: self.logger.log_hyperparams(self.h) def training_step(self, batch:Tuple[torch.Tensor], batch_idx) -> torch.Tensor: \"\"\" batch", "def summary(self): dummy = torch.randn(1,self.h.ratent_dim*2,1) summary(self,dummy) def remove_weight_norm(self): print('Removing weight norm...') for l", "self.reset_seed() self.encoder = Encoder(h).type(dtype).to(self.device) self.decoder = Decoder(h).type(dtype).to(self.device) self.n_fft = h.n_fft self.ratent_dim = h.ratent_dim", "= self.forward(x1,x2) out = out_.tanh() # atanh grad explotsion mse = self.MSE(ans, out)", "get expected input lengthes and output lengthes init_len = uik self.L_ins = [init_len]", "l in self.convs1: remove_weight_norm(l) for l in self.convs2: remove_weight_norm(l) class Encoder(nn.Module): def __init__(self,", "act in self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter =", "self.view_interval= interval def predict_one_step(self, action:torch.Tensor,previous_wave:torch.Tensor=None) -> torch.Tensor: \"\"\" action : (-1, ratent_dim, 1)", "x1:torch.Tensor, x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2) if with_random: out1", "x = xs / self.num_kernels x = F.leaky_relu(x) mean = self.conv_post(x) var =", "= None def set_silence(self): self.silence = torch.zeros(1,self.h.sample_ch,self.n_fft,device=self.device,dtype=self.dtype) def set_view_interval(self, interval:int=None): if interval: self.view_interval=", "in urs: lo = init_len * r self.L_ins.append(lo) self.L_outs.append(lo) init_len = lo #", "wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig =", "out1 = self.random_sample(mean1,var1) out2 = self.random_sample(mean2,var2) else: out1,out2 = mean1,mean2 out = torch.cat([out1,", "x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2) out = out_.tanh() # atanh", "torch.log(var) -1 ).sum() / out.size(0) #marginal_likelihood = self.BCEwithLogits(torch.atanh(out),0.5*ans+1) #print(True in torch.isnan(out)) marginal_likelihood= F.binary_cross_entropy_with_logits(out,0.5*ans+1,reduction=\"sum\")", "nn import torch.nn.functional as F import pytorch_lightning as pl from torchsummaryX import summary", "kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))) ])", "weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected input lengthes and output lengths init_len =", "+h.hop_len self.num_kernels = len(rks) self.num_upsamples = len(urs) self.conv_pre = weight_norm(nn.ConvTranspose1d(h.ratent_dim*2, uic,uik)) # get", "# get upsampling channels self.channels = [uic] ch = uic for i in", "= h.downsample_kernel_sizes dci = h.downsample_initial_channel self.num_kernels = len(rks) self.num_downsamples = len(drs) self.conv_pre =", "= [] for i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r) self.pads.append(pad) # get downsampling", "self.convs2): xt = F.leaky_relu(x, LRELU_SLOPE) xt = c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt", "self.ratent_dim = h.ratent_dim self.walking_steps = int(h.breath_len / h.hop_len) + 1 self.walking_resolution = h.walking_resolution", "x return x def remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l) for l in", "for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class Decoder(nn.Module): def __init__(self, h) ->", "h): super().__init__() self.h = h rks = h.resblock_kernel_sizes rds = h.resblock_dilation_sizes drs =", "channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=1, padding=get_padding(kernel_size, 1))),", "weight_norm(nn.Conv1d(self.channels[-1],h.ratent_dim,self.L_ins[-1])) self.dns.apply(init_weights) self.conv_post.apply(init_weights) self.conv_post_var.apply(init_weights) def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for", "self.L_ins = [init_len] self.L_outs = [] for r in urs: lo = init_len", "= xs / self.num_kernels x = F.leaky_relu(x) x = self.conv_post(x) l = x.size(-1)", "@staticmethod def random_sample(mean:torch.Tensor, var:torch.Tensor): return mean + torch.randn_like(mean)*torch.sqrt(var) def summary(self): dummy = torch.randn(1,1,self.h.n_fft)", "#x = x.tanh() # grad explosion ? return x def summary(self): dummy =", "h.n_fft self.L_ins = [init_len] self.L_outs = [] for r in drs: lo =", "torch.Tensor: \"\"\" x1: (-1, 1, n_fft) x2: (-1, 1, n_fft) \"\"\" mean1,var1 =", "print('Removing weight norm...') for l in self.ups: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm()", "sound = (sound/maxes) * gains return sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2])", "!=0: return self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution, device=self.device,dtype=self.dtype) wave = None for act in", "= nn.MSELoss() self.MAE = nn.L1Loss() self.actions = walk_ratent_space(self.ratent_dim, self.walking_steps,self.walking_resolution,device=device,dtype=dtype) def forward(self, x1:torch.Tensor,x2:torch.Tensor) ->", "lo = int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1) # get downsampling paddings", "l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class VoiceBand(pl.LightningModule): def __init__(self, h,dtype:torch.dtype=torch.float,device:torch.device='cpu') -> None:", "pad = get_padding_up(self.L_ins[i],self.L_outs[i],urks[i],r) self.pads.append(pad) # get upsampling channels self.channels = [uic] ch =", "writer.add_graph(self, [dummy,dummy]) def remove_weight_norm(self): self.encoder.remove_weight_norm() self.decoder.remove_weight_norm() if __name__ == '__main__': from utils import", "__init__(self, channels, kernel_size=3, dilation=(1, 3, 5)): super().__init__() assert len(dilation) == 3 self.convs1 =", "x2:torch.Tensor,with_random:bool=True) -> torch.Tensor: mean1,var1 = self.forward(x1) mean2,var2 = self.forward(x2) if with_random: out1 =", "get_padding_down, get_padding_up,walk_ratent_space from typing import Tuple from torchsummaryX import summary import numpy as", "F.leaky_relu(x, LRELU_SLOPE) x = self.dns[i](x) xs = None for j in range(self.num_kernels): if", "sound.type(self.dtype) if self.h.random_gain: sound= self.random_gain(sound) x1,x2,ans = sound[:,:,:self.h.n_fft], sound[:,:,-self.h.n_fft:], sound out_,mean,var = self.forward(x1,x2)", "mse = self.MSE(ans, out) mae = self.MAE(ans,out) KL = 0.5*torch.sum( torch.pow(mean,2) + var", "sound.shape maxes= sound.view(n,c*l).abs().max(dim=1,keepdim=True).values.unsqueeze(-1) maxes[maxes==0.0] = 1.0 gains = torch.rand_like(maxes) sound = (sound/maxes) *", "dilation=dilation[0], padding=get_padding(kernel_size, dilation[0]))), weight_norm(nn.Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1], padding=get_padding(kernel_size, dilation[1]))), weight_norm(nn.Conv1d(channels, channels, kernel_size,", "notanh dec_in = torch.cat([encoded,action],dim=1) d_out = self.decoder.forward(dec_in)[:,:,self.n_fft:].type_as(previous_wave) d_out = d_out.tanh() # grad explosion", "self.actions.unsqueeze(1): wave= self.predict_one_step(act,wave) wave = wave.squeeze(0).T.detach().cpu().numpy() # tensorboard logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent", "@torch.no_grad() def on_epoch_end(self) -> None: \"\"\" walk through the ratent space and log", "remove_weight_norm(self): print(\"Removing weight norm...\") for l in self.dns: remove_weight_norm(l) for l in self.resblocks:", "def forward(self, x:torch.Tensor) -> torch.Tensor: x = self.conv_pre(x) for i in range(self.num_upsamples): x", "return x def remove_weight_norm(self): for l in self.convs1: remove_weight_norm(l) for l in self.convs2:", "likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self) -> None: \"\"\" walk through the ratent", "j,(k,d) in enumerate(zip(rks,rds)): self.resblocks.append(ResBlock(ch,k,d)) self.conv_post = weight_norm(nn.Conv1d(self.channels[-1],1,7,1,3)) self.ups.apply(init_weights) self.conv_post.apply(init_weights) def forward(self, x:torch.Tensor) ->", "self.log(\"mae\",mae) self.log(\"KL div\",KL) self.log(\"Marginal likelihood\",marginal_likelihood) return loss @torch.no_grad() def on_epoch_end(self) -> None: \"\"\"", "xt = c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x = xt", "import random from torch.utils.tensorboard import SummaryWriter import matplotlib.pyplot as plt LRELU_SLOPE = 0.1", "= int(init_len/r) self.L_outs.append(lo) self.L_ins.append(lo) init_len = lo self.L_outs.append(1) # get downsampling paddings self.pads", "xs = self.resblocks[i*self.num_kernels+j](x) else: xs += self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x", "gains return sound def configure_optimizers(self): optim = torch.optim.AdamW(self.parameters(), self.h.lr,[self.h.adam_b1,self.h.adam_b2]) scheduler = torch.optim.lr_scheduler.ExponentialLR(optim, gamma=self.h.lr_decay)", "for l in self.ups: remove_weight_norm(l) for l in self.resblocks: l.remove_weight_norm() remove_weight_norm(self.conv_pre) remove_weight_norm(self.conv_post) class", "marginal_likelihood + KL * self.kl_lambda #loss = self.kl_lambda * KL + mse self.log(\"loss\",loss)", "= len(rks) self.num_downsamples = len(drs) self.conv_pre = weight_norm(nn.Conv1d(1, dci, 7,1,3)) # get expected", "1.0 gains = torch.rand_like(maxes) sound = (sound/maxes) * gains return sound def configure_optimizers(self):", "None: \"\"\" walk through the ratent space and log audio wave. \"\"\" if", "c1(xt) xt = F.leaky_relu(xt, LRELU_SLOPE) xt = c2(xt) x = xt + x", "get downsampling paddings self.pads = [] for i,r in enumerate(drs): pad = get_padding_down(self.L_ins[i],self.L_outs[i],drks[i],r)", "logging tb:SummaryWriter = self.logger.experiment tb.add_audio(\"Ratent space audio\",wave, self.current_epoch,self.h.frame_rate) fig = plt.figure() ax =", "+= self.resblocks[i*self.num_kernels+j](x) x = xs / self.num_kernels x = F.leaky_relu(x) mean = self.conv_post(x)" ]
[ "perceptron.\"\"\" # could be useful later self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO", "Student side autograding was added by <NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\" import", "autograders were primarily created by <NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student side autograding", "train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" # could be useful later", "super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def classify(self, data): \"\"\"Classify the data points. Data", "consequences of plagiarism and acknowledge that the assessor of this assignment may, for", "in zip(training_data, training_labels): # *** YOUR CODE HERE *** # Gets the guess", "assessing this assignment: - Reproduce this assignment and provide a copy to another", "The Pacman AI projects were developed at UC Berkeley. The core projects and", "clear attribution to UC Berkeley, including a link to http://ai.berkeley.edu. Attribution Information: The", "CSI-480, Fall 2018 The following code was adapted by <NAME> (<EMAIL>) from the", "by <NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\" import util from perceptron import PerceptronClassifier", "training_labels): # *** YOUR CODE HERE *** # Gets the guess action, then", "were developed at UC Berkeley. The core projects and autograders were primarily created", "Date: Nov 30, 2018 11:59 PM Certification of Authenticity: I certify that this", "legal_moves in data: vectors = util.Counter() for l in legal_moves: vectors[l] = self.weights", "2018 11:59 PM Certification of Authenticity: I certify that this is entirely my", "a copy of this assignment on its database for - the purpose of", "= util.Counter() for l in legal_moves: vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max()) return", "- the purpose of future plagiarism checking) Champlain College CSI-480, Fall 2018 The", "free to use or extend these projects for educational purposes provided that (1)", "# DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR # THE", "Learning Due Date: Nov 30, 2018 11:59 PM Certification of Authenticity: I certify", "side autograding was added by <NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\" import util", "that the assessor of this assignment may, for the purpose of assessing this", "NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR # THE AUTOGRADER WILL", "work, except where I have given fully-documented references to the work of others.", "and attribution below). ---------------------- Licensing Information: You are free to use or extend", "apprenticeeship learning in pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels:", "data points. Data contains a list of (datum, legal moves) Datum is a", "given fully-documented references to the work of others. I understand the definition and", "training_data, training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" # could be useful later self.features", "useful later self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT YOUR WEIGHTS BEFORE", "weights guess = self.classify([(datum, legal_moves)])[0] if guess != label: self.weights += datum[label] self.weights", "label in zip(training_data, training_labels): # *** YOUR CODE HERE *** # Gets the", "\"\"\"Initialize the perceptron. Args: legal_labels: list of legal_labels max_iterations: the max number of", "a list of (datum, legal moves) Datum is a Counter representing the features", "perceptron import PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning", "then updates the weights guess = self.classify([(datum, legal_moves)])[0] if guess != label: self.weights", "\"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def classify(self, data): \"\"\"Classify the data points.", "BEFORE STARTING TRAINING, OR # THE AUTOGRADER WILL LIKELY DEDUCT POINTS. for iteration", "11:59 PM Certification of Authenticity: I certify that this is entirely my own", "zip(training_data, training_labels): # *** YOUR CODE HERE *** # Gets the guess action,", "the purpose of assessing this assignment: - Reproduce this assignment and provide a", "iteration in range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\") for (datum, legal_moves), label in", "another member of academic - staff; and/or Communicate a copy of this assignment", "of Authenticity: I certify that this is entirely my own work, except where", "list of (datum, legal moves) Datum is a Counter representing the features of", "for educational purposes provided that (1) you do not distribute or publish solutions,", "except where I have given fully-documented references to the work of others. I", "database for - the purpose of future plagiarism checking) Champlain College CSI-480, Fall", "def train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" # could be useful", "provide a copy to another member of academic - staff; and/or Communicate a", "of assessing this assignment: - Reproduce this assignment and provide a copy to", "PerceptronClassifier for apprenticeeship learning in pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron.", "core projects and autograders were primarily created by <NAME> (<EMAIL>) and <NAME> (<EMAIL>).", "is a Counter representing the features of each GameState. legal_moves is a list", "member of academic - staff; and/or Communicate a copy of this assignment to", "legal_moves is a list of legal moves for that GameState. \"\"\" guesses =", "Datum is a Counter representing the features of each GameState. legal_moves is a", "(<EMAIL>). \"\"\" import util from perceptron import PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier):", "self.weights = util.Counter() def classify(self, data): \"\"\"Classify the data points. Data contains a", "provide clear attribution to UC Berkeley, including a link to http://ai.berkeley.edu. Attribution Information:", "You are free to use or extend these projects for educational purposes provided", "for that GameState. \"\"\" guesses = [] for datum, legal_moves in data: vectors", "WEIGHTS BEFORE STARTING TRAINING, OR # THE AUTOGRADER WILL LIKELY DEDUCT POINTS. for", "Class: CSI-480-01 Assignment: PA 5 -- Supervised Learning Due Date: Nov 30, 2018", "for apprenticeeship learning in pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args:", "classify(self, data): \"\"\"Classify the data points. Data contains a list of (datum, legal", "primarily created by <NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student side autograding was added", "WILL LIKELY DEDUCT POINTS. for iteration in range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\")", "for l in legal_moves: vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses def", "assessor of this assignment may, for the purpose of assessing this assignment: -", "# could be useful later self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT", "= list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR", "Attribution Information: The Pacman AI projects were developed at UC Berkeley. The core", "features of each GameState. legal_moves is a list of legal moves for that", "Berkeley, including a link to http://ai.berkeley.edu. Attribution Information: The Pacman AI projects were", "educational purposes provided that (1) you do not distribute or publish solutions, (2)", "the data points. Data contains a list of (datum, legal moves) Datum is", "Data contains a list of (datum, legal moves) Datum is a Counter representing", "guesses = [] for datum, legal_moves in data: vectors = util.Counter() for l", "of future plagiarism checking) Champlain College CSI-480, Fall 2018 The following code was", "assignment to a plagiarism checking - service (which may then retain a copy", "work of others. I understand the definition and consequences of plagiarism and acknowledge", "\"...\") for (datum, legal_moves), label in zip(training_data, training_labels): # *** YOUR CODE HERE", "# THE AUTOGRADER WILL LIKELY DEDUCT POINTS. for iteration in range(self.max_iterations): print(\"Starting iteration", "GameState. \"\"\" guesses = [] for datum, legal_moves in data: vectors = util.Counter()", "future plagiarism checking) Champlain College CSI-480, Fall 2018 The following code was adapted", "train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def classify(self, data): \"\"\"Classify the", "added by <NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\" import util from perceptron import", "(which may then retain a copy of this assignment on its database for", "to http://ai.berkeley.edu. Attribution Information: The Pacman AI projects were developed at UC Berkeley.", "assignment: - Reproduce this assignment and provide a copy to another member of", "Berkeley Pacman Projects (see license and attribution below). ---------------------- Licensing Information: You are", "of plagiarism and acknowledge that the assessor of this assignment may, for the", "PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning in pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize", "below). ---------------------- Licensing Information: You are free to use or extend these projects", "*** YOUR CODE HERE *** # Gets the guess action, then updates the", "developed at UC Berkeley. The core projects and autograders were primarily created by", "Licensing Information: You are free to use or extend these projects for educational", "STARTING TRAINING, OR # THE AUTOGRADER WILL LIKELY DEDUCT POINTS. for iteration in", "Authenticity: I certify that this is entirely my own work, except where I", "DEDUCT POINTS. for iteration in range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\") for (datum,", "its database for - the purpose of future plagiarism checking) Champlain College CSI-480,", "the perceptron.\"\"\" # could be useful later self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT", "and acknowledge that the assessor of this assignment may, for the purpose of", "adapted by <NAME> (<EMAIL>) from the UC Berkeley Pacman Projects (see license and", "def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels: list of legal_labels max_iterations:", "of this assignment may, for the purpose of assessing this assignment: - Reproduce", "PA 5 -- Supervised Learning Due Date: Nov 30, 2018 11:59 PM Certification", "guesses def train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" # could be", "= util.Counter() def classify(self, data): \"\"\"Classify the data points. Data contains a list", "entirely my own work, except where I have given fully-documented references to the", "staff; and/or Communicate a copy of this assignment to a plagiarism checking -", "OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR # THE AUTOGRADER WILL LIKELY DEDUCT", "autograding was added by <NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\" import util from", "class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning in pacman.\"\"\" def __init__(self, legal_labels, max_iterations):", "is a list of legal moves for that GameState. \"\"\" guesses = []", "(datum, legal_moves), label in zip(training_data, training_labels): # *** YOUR CODE HERE *** #", "of this assignment on its database for - the purpose of future plagiarism", "# *** YOUR CODE HERE *** # Gets the guess action, then updates", "POINTS. for iteration in range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\") for (datum, legal_moves),", "Supervised Learning Due Date: Nov 30, 2018 11:59 PM Certification of Authenticity: I", "in pacman. Author: <NAME>, <NAME>, and <NAME> Class: CSI-480-01 Assignment: PA 5 --", "for (datum, legal_moves), label in zip(training_data, training_labels): # *** YOUR CODE HERE ***", "max_iterations) self.weights = util.Counter() def classify(self, data): \"\"\"Classify the data points. Data contains", "HERE *** # Gets the guess action, then updates the weights guess =", "\"\"\" guesses = [] for datum, legal_moves in data: vectors = util.Counter() for", "list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR #", "solutions, (2) you retain this notice, and (3) you provide clear attribution to", "own work, except where I have given fully-documented references to the work of", "guess = self.classify([(datum, legal_moves)])[0] if guess != label: self.weights += datum[label] self.weights -=", "for \"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def classify(self, data): \"\"\"Classify the data", "legal_moves), label in zip(training_data, training_labels): # *** YOUR CODE HERE *** # Gets", "ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR # THE AUTOGRADER WILL LIKELY", "CSI-480-01 Assignment: PA 5 -- Supervised Learning Due Date: Nov 30, 2018 11:59", "import util from perceptron import PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier", "print(\"Starting iteration \", iteration, \"...\") for (datum, legal_moves), label in zip(training_data, training_labels): #", "\"\"\"Perceptron implementation for apprenticeship learning in pacman. Author: <NAME>, <NAME>, and <NAME> Class:", "guess action, then updates the weights guess = self.classify([(datum, legal_moves)])[0] if guess !=", "PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning in pacman.\"\"\"", "2018 The following code was adapted by <NAME> (<EMAIL>) from the UC Berkeley", "__init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels: list of legal_labels max_iterations: the", "I understand the definition and consequences of plagiarism and acknowledge that the assessor", "PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning in pacman.\"\"\" def", "this assignment: - Reproduce this assignment and provide a copy to another member", "later self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING", "* datum[l] guesses.append(vectors.arg_max()) return guesses def train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train the", "in pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels: list of", "learning in pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels: list", "retain this notice, and (3) you provide clear attribution to UC Berkeley, including", "UC Berkeley. The core projects and autograders were primarily created by <NAME> (<EMAIL>)", "\", iteration, \"...\") for (datum, legal_moves), label in zip(training_data, training_labels): # *** YOUR", "l in legal_moves: vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses def train(self,", "moves for that GameState. \"\"\" guesses = [] for datum, legal_moves in data:", "academic - staff; and/or Communicate a copy of this assignment to a plagiarism", "and (3) you provide clear attribution to UC Berkeley, including a link to", "Berkeley. The core projects and autograders were primarily created by <NAME> (<EMAIL>) and", "Counter representing the features of each GameState. legal_moves is a list of legal", "-- Supervised Learning Due Date: Nov 30, 2018 11:59 PM Certification of Authenticity:", "legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels: list of legal_labels max_iterations: the max", "publish solutions, (2) you retain this notice, and (3) you provide clear attribution", "that GameState. \"\"\" guesses = [] for datum, legal_moves in data: vectors =", "I have given fully-documented references to the work of others. I understand the", "Fall 2018 The following code was adapted by <NAME> (<EMAIL>) from the UC", "\"\"\"Classify the data points. Data contains a list of (datum, legal moves) Datum", "max_iterations: the max number of iterations to train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights", "or publish solutions, (2) you retain this notice, and (3) you provide clear", "list of legal moves for that GameState. \"\"\" guesses = [] for datum,", "= self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses def train(self, training_data, training_labels, validation_data, validation_labels):", "on its database for - the purpose of future plagiarism checking) Champlain College", "legal moves for that GameState. \"\"\" guesses = [] for datum, legal_moves in", "was adapted by <NAME> (<EMAIL>) from the UC Berkeley Pacman Projects (see license", "list of legal_labels max_iterations: the max number of iterations to train for \"\"\"", "copy of this assignment on its database for - the purpose of future", "action, then updates the weights guess = self.classify([(datum, legal_moves)])[0] if guess != label:", "may then retain a copy of this assignment on its database for -", "retain a copy of this assignment on its database for - the purpose", "http://ai.berkeley.edu. Attribution Information: The Pacman AI projects were developed at UC Berkeley. The", "this assignment to a plagiarism checking - service (which may then retain a", "(datum, legal moves) Datum is a Counter representing the features of each GameState.", "number of iterations to train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def", "The following code was adapted by <NAME> (<EMAIL>) from the UC Berkeley Pacman", "you provide clear attribution to UC Berkeley, including a link to http://ai.berkeley.edu. Attribution", "projects were developed at UC Berkeley. The core projects and autograders were primarily", "purpose of assessing this assignment: - Reproduce this assignment and provide a copy", "30, 2018 11:59 PM Certification of Authenticity: I certify that this is entirely", "link to http://ai.berkeley.edu. Attribution Information: The Pacman AI projects were developed at UC", "representing the features of each GameState. legal_moves is a list of legal moves", "UC Berkeley Pacman Projects (see license and attribution below). ---------------------- Licensing Information: You", "LIKELY DEDUCT POINTS. for iteration in range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\") for", "legal_moves: vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses def train(self, training_data, training_labels,", "self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING,", "Reproduce this assignment and provide a copy to another member of academic -", "(<EMAIL>) and <NAME> (<EMAIL>). Student side autograding was added by <NAME>, <NAME>, and", "projects and autograders were primarily created by <NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student", "attribution below). ---------------------- Licensing Information: You are free to use or extend these", "Author: <NAME>, <NAME>, and <NAME> Class: CSI-480-01 Assignment: PA 5 -- Supervised Learning", "plagiarism and acknowledge that the assessor of this assignment may, for the purpose", "Projects (see license and attribution below). ---------------------- Licensing Information: You are free to", "<NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student side autograding was added by <NAME>, <NAME>,", "understand the definition and consequences of plagiarism and acknowledge that the assessor of", "max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels: list of legal_labels max_iterations: the max number", "to UC Berkeley, including a link to http://ai.berkeley.edu. Attribution Information: The Pacman AI", "the assessor of this assignment may, for the purpose of assessing this assignment:", "import PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning in", "range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\") for (datum, legal_moves), label in zip(training_data, training_labels):", "this assignment on its database for - the purpose of future plagiarism checking)", "a link to http://ai.berkeley.edu. Attribution Information: The Pacman AI projects were developed at", "this notice, and (3) you provide clear attribution to UC Berkeley, including a", "- service (which may then retain a copy of this assignment on its", "Due Date: Nov 30, 2018 11:59 PM Certification of Authenticity: I certify that", "CODE HERE *** # Gets the guess action, then updates the weights guess", "util.Counter() def classify(self, data): \"\"\"Classify the data points. Data contains a list of", "util.Counter() for l in legal_moves: vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses", "and consequences of plagiarism and acknowledge that the assessor of this assignment may,", "a list of legal moves for that GameState. \"\"\" guesses = [] for", "and provide a copy to another member of academic - staff; and/or Communicate", "code was adapted by <NAME> (<EMAIL>) from the UC Berkeley Pacman Projects (see", "<NAME> (<EMAIL>) from the UC Berkeley Pacman Projects (see license and attribution below).", "a copy of this assignment to a plagiarism checking - service (which may", "for apprenticeship learning in pacman. Author: <NAME>, <NAME>, and <NAME> Class: CSI-480-01 Assignment:", "distribute or publish solutions, (2) you retain this notice, and (3) you provide", "Pacman Projects (see license and attribution below). ---------------------- Licensing Information: You are free", "checking) Champlain College CSI-480, Fall 2018 The following code was adapted by <NAME>", "pacman. Author: <NAME>, <NAME>, and <NAME> Class: CSI-480-01 Assignment: PA 5 -- Supervised", "<NAME>, and <NAME> (<EMAIL>). \"\"\" import util from perceptron import PerceptronClassifier PRINT =", "that this is entirely my own work, except where I have given fully-documented", "Information: The Pacman AI projects were developed at UC Berkeley. The core projects", "not distribute or publish solutions, (2) you retain this notice, and (3) you", "The core projects and autograders were primarily created by <NAME> (<EMAIL>) and <NAME>", "of legal_labels max_iterations: the max number of iterations to train for \"\"\" super().__init__(legal_labels,", "vectors = util.Counter() for l in legal_moves: vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max())", "have given fully-documented references to the work of others. I understand the definition", "to train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def classify(self, data): \"\"\"Classify", "plagiarism checking - service (which may then retain a copy of this assignment", "points. Data contains a list of (datum, legal moves) Datum is a Counter", "I certify that this is entirely my own work, except where I have", "Certification of Authenticity: I certify that this is entirely my own work, except", "assignment and provide a copy to another member of academic - staff; and/or", "of others. I understand the definition and consequences of plagiarism and acknowledge that", "def classify(self, data): \"\"\"Classify the data points. Data contains a list of (datum,", "for iteration in range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\") for (datum, legal_moves), label", "apprenticeship learning in pacman. Author: <NAME>, <NAME>, and <NAME> Class: CSI-480-01 Assignment: PA", "legal_labels: list of legal_labels max_iterations: the max number of iterations to train for", "of legal moves for that GameState. \"\"\" guesses = [] for datum, legal_moves", "OR # THE AUTOGRADER WILL LIKELY DEDUCT POINTS. for iteration in range(self.max_iterations): print(\"Starting", "UC Berkeley, including a link to http://ai.berkeley.edu. Attribution Information: The Pacman AI projects", "GameState. legal_moves is a list of legal moves for that GameState. \"\"\" guesses", "a plagiarism checking - service (which may then retain a copy of this", "(see license and attribution below). ---------------------- Licensing Information: You are free to use", "5 -- Supervised Learning Due Date: Nov 30, 2018 11:59 PM Certification of", "to use or extend these projects for educational purposes provided that (1) you", "license and attribution below). ---------------------- Licensing Information: You are free to use or", "\"\"\"Train the perceptron.\"\"\" # could be useful later self.features = list(training_data[0][0]['Stop'].keys()) # DO", "certify that this is entirely my own work, except where I have given", "util from perceptron import PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for", "updates the weights guess = self.classify([(datum, legal_moves)])[0] if guess != label: self.weights +=", "the purpose of future plagiarism checking) Champlain College CSI-480, Fall 2018 The following", "checking - service (which may then retain a copy of this assignment on", "YOUR CODE HERE *** # Gets the guess action, then updates the weights", "service (which may then retain a copy of this assignment on its database", "datum, legal_moves in data: vectors = util.Counter() for l in legal_moves: vectors[l] =", "data): \"\"\"Classify the data points. Data contains a list of (datum, legal moves)", "TRAINING, OR # THE AUTOGRADER WILL LIKELY DEDUCT POINTS. for iteration in range(self.max_iterations):", "of academic - staff; and/or Communicate a copy of this assignment to a", "this assignment may, for the purpose of assessing this assignment: - Reproduce this", "were primarily created by <NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student side autograding was", "in data: vectors = util.Counter() for l in legal_moves: vectors[l] = self.weights *", "fully-documented references to the work of others. I understand the definition and consequences", "AUTOGRADER WILL LIKELY DEDUCT POINTS. for iteration in range(self.max_iterations): print(\"Starting iteration \", iteration,", "assignment may, for the purpose of assessing this assignment: - Reproduce this assignment", "including a link to http://ai.berkeley.edu. Attribution Information: The Pacman AI projects were developed", "<NAME> (<EMAIL>). Student side autograding was added by <NAME>, <NAME>, and <NAME> (<EMAIL>).", "perceptron. Args: legal_labels: list of legal_labels max_iterations: the max number of iterations to", "by <NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student side autograding was added by <NAME>,", "iteration \", iteration, \"...\") for (datum, legal_moves), label in zip(training_data, training_labels): # ***", "you do not distribute or publish solutions, (2) you retain this notice, and", "of iterations to train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def classify(self,", "notice, and (3) you provide clear attribution to UC Berkeley, including a link", "implementation for apprenticeship learning in pacman. Author: <NAME>, <NAME>, and <NAME> Class: CSI-480-01", "my own work, except where I have given fully-documented references to the work", "YOUR WEIGHTS BEFORE STARTING TRAINING, OR # THE AUTOGRADER WILL LIKELY DEDUCT POINTS.", "from the UC Berkeley Pacman Projects (see license and attribution below). ---------------------- Licensing", "the perceptron. Args: legal_labels: list of legal_labels max_iterations: the max number of iterations", "this assignment and provide a copy to another member of academic - staff;", "Assignment: PA 5 -- Supervised Learning Due Date: Nov 30, 2018 11:59 PM", "provided that (1) you do not distribute or publish solutions, (2) you retain", "THE AUTOGRADER WILL LIKELY DEDUCT POINTS. for iteration in range(self.max_iterations): print(\"Starting iteration \",", "---------------------- Licensing Information: You are free to use or extend these projects for", "= [] for datum, legal_moves in data: vectors = util.Counter() for l in", "vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses def train(self, training_data, training_labels, validation_data,", "for datum, legal_moves in data: vectors = util.Counter() for l in legal_moves: vectors[l]", "AI projects were developed at UC Berkeley. The core projects and autograders were", "use or extend these projects for educational purposes provided that (1) you do", "<NAME> (<EMAIL>). \"\"\" import util from perceptron import PerceptronClassifier PRINT = True class", "each GameState. legal_moves is a list of legal moves for that GameState. \"\"\"", "- Reproduce this assignment and provide a copy to another member of academic", "or extend these projects for educational purposes provided that (1) you do not", "<NAME>, <NAME>, and <NAME> Class: CSI-480-01 Assignment: PA 5 -- Supervised Learning Due", "# Gets the guess action, then updates the weights guess = self.classify([(datum, legal_moves)])[0]", "(3) you provide clear attribution to UC Berkeley, including a link to http://ai.berkeley.edu.", "of each GameState. legal_moves is a list of legal moves for that GameState.", "and autograders were primarily created by <NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student side", "= self.classify([(datum, legal_moves)])[0] if guess != label: self.weights += datum[label] self.weights -= datum[guess]", "to another member of academic - staff; and/or Communicate a copy of this", "PM Certification of Authenticity: I certify that this is entirely my own work,", "and/or Communicate a copy of this assignment to a plagiarism checking - service", "Communicate a copy of this assignment to a plagiarism checking - service (which", "the weights guess = self.classify([(datum, legal_moves)])[0] if guess != label: self.weights += datum[label]", "these projects for educational purposes provided that (1) you do not distribute or", "pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the perceptron. Args: legal_labels: list of legal_labels", "are free to use or extend these projects for educational purposes provided that", "copy to another member of academic - staff; and/or Communicate a copy of", "for the purpose of assessing this assignment: - Reproduce this assignment and provide", "and <NAME> (<EMAIL>). Student side autograding was added by <NAME>, <NAME>, and <NAME>", "purposes provided that (1) you do not distribute or publish solutions, (2) you", "then retain a copy of this assignment on its database for - the", "max number of iterations to train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter()", "was added by <NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\" import util from perceptron", "copy of this assignment to a plagiarism checking - service (which may then", "training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" # could be useful later self.features =", "plagiarism checking) Champlain College CSI-480, Fall 2018 The following code was adapted by", "to a plagiarism checking - service (which may then retain a copy of", "Information: You are free to use or extend these projects for educational purposes", "(<EMAIL>) from the UC Berkeley Pacman Projects (see license and attribution below). ----------------------", "a Counter representing the features of each GameState. legal_moves is a list of", "return guesses def train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" # could", "may, for the purpose of assessing this assignment: - Reproduce this assignment and", "purpose of future plagiarism checking) Champlain College CSI-480, Fall 2018 The following code", "legal_labels max_iterations: the max number of iterations to train for \"\"\" super().__init__(legal_labels, max_iterations)", "\"\"\"A PerceptronClassifier for apprenticeeship learning in pacman.\"\"\" def __init__(self, legal_labels, max_iterations): \"\"\"Initialize the", "validation_labels): \"\"\"Train the perceptron.\"\"\" # could be useful later self.features = list(training_data[0][0]['Stop'].keys()) #", "of this assignment to a plagiarism checking - service (which may then retain", "(1) you do not distribute or publish solutions, (2) you retain this notice,", "(2) you retain this notice, and (3) you provide clear attribution to UC", "acknowledge that the assessor of this assignment may, for the purpose of assessing", "legal moves) Datum is a Counter representing the features of each GameState. legal_moves", "data: vectors = util.Counter() for l in legal_moves: vectors[l] = self.weights * datum[l]", "Args: legal_labels: list of legal_labels max_iterations: the max number of iterations to train", "is entirely my own work, except where I have given fully-documented references to", "that (1) you do not distribute or publish solutions, (2) you retain this", "the UC Berkeley Pacman Projects (see license and attribution below). ---------------------- Licensing Information:", "could be useful later self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT YOUR", "this is entirely my own work, except where I have given fully-documented references", "assignment on its database for - the purpose of future plagiarism checking) Champlain", "following code was adapted by <NAME> (<EMAIL>) from the UC Berkeley Pacman Projects", "at UC Berkeley. The core projects and autograders were primarily created by <NAME>", "Champlain College CSI-480, Fall 2018 The following code was adapted by <NAME> (<EMAIL>)", "moves) Datum is a Counter representing the features of each GameState. legal_moves is", "DO NOT ZERO OUT YOUR WEIGHTS BEFORE STARTING TRAINING, OR # THE AUTOGRADER", "attribution to UC Berkeley, including a link to http://ai.berkeley.edu. Attribution Information: The Pacman", "definition and consequences of plagiarism and acknowledge that the assessor of this assignment", "and <NAME> (<EMAIL>). \"\"\" import util from perceptron import PerceptronClassifier PRINT = True", "by <NAME> (<EMAIL>) from the UC Berkeley Pacman Projects (see license and attribution", "(<EMAIL>). Student side autograding was added by <NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\"", "the work of others. I understand the definition and consequences of plagiarism and", "of (datum, legal moves) Datum is a Counter representing the features of each", "be useful later self.features = list(training_data[0][0]['Stop'].keys()) # DO NOT ZERO OUT YOUR WEIGHTS", "self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses def train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train", "projects for educational purposes provided that (1) you do not distribute or publish", "<NAME> Class: CSI-480-01 Assignment: PA 5 -- Supervised Learning Due Date: Nov 30,", "a copy to another member of academic - staff; and/or Communicate a copy", "in legal_moves: vectors[l] = self.weights * datum[l] guesses.append(vectors.arg_max()) return guesses def train(self, training_data,", "validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" # could be useful later self.features = list(training_data[0][0]['Stop'].keys())", "extend these projects for educational purposes provided that (1) you do not distribute", "Pacman AI projects were developed at UC Berkeley. The core projects and autograders", "= True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning in pacman.\"\"\" def __init__(self,", "learning in pacman. Author: <NAME>, <NAME>, and <NAME> Class: CSI-480-01 Assignment: PA 5", "*** # Gets the guess action, then updates the weights guess = self.classify([(datum,", "created by <NAME> (<EMAIL>) and <NAME> (<EMAIL>). Student side autograding was added by", "College CSI-480, Fall 2018 The following code was adapted by <NAME> (<EMAIL>) from", "in range(self.max_iterations): print(\"Starting iteration \", iteration, \"...\") for (datum, legal_moves), label in zip(training_data,", "iterations to train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights = util.Counter() def classify(self, data):", "Gets the guess action, then updates the weights guess = self.classify([(datum, legal_moves)])[0] if", "where I have given fully-documented references to the work of others. I understand", "[] for datum, legal_moves in data: vectors = util.Counter() for l in legal_moves:", "<NAME>, <NAME>, and <NAME> (<EMAIL>). \"\"\" import util from perceptron import PerceptronClassifier PRINT", "do not distribute or publish solutions, (2) you retain this notice, and (3)", "from perceptron import PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship", "- staff; and/or Communicate a copy of this assignment to a plagiarism checking", "to the work of others. I understand the definition and consequences of plagiarism", "references to the work of others. I understand the definition and consequences of", "iteration, \"...\") for (datum, legal_moves), label in zip(training_data, training_labels): # *** YOUR CODE", "the guess action, then updates the weights guess = self.classify([(datum, legal_moves)])[0] if guess", "guesses.append(vectors.arg_max()) return guesses def train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\" #", "you retain this notice, and (3) you provide clear attribution to UC Berkeley,", "\"\"\" import util from perceptron import PerceptronClassifier PRINT = True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A", "True class PerceptronClassifierPacman(PerceptronClassifier): \"\"\"A PerceptronClassifier for apprenticeeship learning in pacman.\"\"\" def __init__(self, legal_labels,", "Nov 30, 2018 11:59 PM Certification of Authenticity: I certify that this is", "the features of each GameState. legal_moves is a list of legal moves for", "the definition and consequences of plagiarism and acknowledge that the assessor of this", "datum[l] guesses.append(vectors.arg_max()) return guesses def train(self, training_data, training_labels, validation_data, validation_labels): \"\"\"Train the perceptron.\"\"\"", "others. I understand the definition and consequences of plagiarism and acknowledge that the", "contains a list of (datum, legal moves) Datum is a Counter representing the", "for - the purpose of future plagiarism checking) Champlain College CSI-480, Fall 2018", "the max number of iterations to train for \"\"\" super().__init__(legal_labels, max_iterations) self.weights =", "<NAME>, and <NAME> Class: CSI-480-01 Assignment: PA 5 -- Supervised Learning Due Date:", "and <NAME> Class: CSI-480-01 Assignment: PA 5 -- Supervised Learning Due Date: Nov" ]
[ "(28 chars max) :param max_queue_depth: The target port queue depth for this target", "- operation is rdb_modify False - operation is rdb_create :param array_id: Primary key", "The queue depth assigned to array LUNs from this array. :param model: The", "'None' ], False ], }, { 'config-summary': [ StorageArrayConfigSummary, True ], } )", "# 6 properties from arrayfailovertype import Arrayfailovertype # 0 properties from arrayerrortype import", "storage_array_port import StorageArrayPort # 9 properties from connectiontype import Connectiontype # 0 properties", "port (64 chars). :param array_name: The name of the array profile to update.", "'None' ], False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False", "for. (28 chars) :param ownership_type: Option that allows the user to select which", "will be returned. (28 chars) \"\"\" return self.request( \"storage-array-list-info\", { 'array_name': [ array_name,", "port to remove. :param wwnn: The WWNN of the array port to remove.", "\"\"\" Remove one port from an array profile record :param wwpn: The WWPN", "port table to the D-Blade :param wwpn: The WWPN of the array port", "and their associated arrays :param array_name: When supplied, only port records for the", "in array profile RDB to the D-Blade :param is_modify: A boolean value which", "], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_id': [", "The model of the array. (16 chars max) :param options: Array profile specific", "array_id, 'array-id', [ int, 'None' ], False ], }, { } ) def", "} ) def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list of online array ports", "'array-stat-info': [ StorageArrayStatsInfo, True ], } ) def storage_array_profile_sync(self): \"\"\" Purge a node's", "if it's an rdb_modify operation. True - operation is rdb_modify False - operation", "int, 'None' ], False ], }, { } ) def storage_array_stats_list_info(self): \"\"\" Used", "World wide node name of array's target port (64 chars). :param array_name: The", "wwnn, 'wwnn', [ basestring, 'None' ], False ], 'is_modify': [ is_modify, 'is-modify', [", "update. :param vendor: The name of the array's manufacturer. (8 chars) :param network_address:", "wwnn, array_name, max_queue_depth=None): \"\"\" Update an array port with new or changed information.", "from storage_array_profile import StorageArrayProfile # 16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19", "to update. (28 chars max) :param new_name: The new name to assign to", "of the array port whose attributes changed :param wwnn: The WWNN of the", "list of array LUNs associated with the named array. :param array_name: The name", "'None' ], False ], 'array_id': [ array_id, 'array-id', [ int, 'None' ], False", "properties from connectiontype import Connectiontype # 0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo #", "chars) :param prefix: A unique 5 character user defined code used to refer", "the D-Blade :param is_modify: A boolean value which indicates if it's an rdb_modify", "[ array_name, 'array-name', [ basestring, 'None' ], False ], }, { 'array-profiles': [", "(28 chars max) :param max_queue_depth: The target port queue depth for all target", "new or changed information. :param array_name: The name of the array profile to", "DiskDetailInfo, True ], } ) def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list of", "} ) def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list of all array profiles", "[ network_address, 'network-address', [ basestring, 'None' ], False ], 'firmware': [ firmware, 'firmware',", "arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info': [ StorageArrayStatsInfo, True ],", "\"\"\" return self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [ StorageArrayPortStatsInfo, True ], }", "[ prefix, 'prefix', [ basestring, 'None' ], False ], 'new_array_name': [ new_array_name, 'new-array-name',", "to the D-Blade :param wwpn: The WWPN of the array port whose attributes", "an array profile :param array_name: The name of the array profile to update.", "[ basestring, 'None' ], False ], 'options': [ options, 'options', [ basestring, 'None'", "a list of online array ports and their associated arrays :param array_name: When", "node. :param ownership_type: Option that allows the user to select which array LUNs", "} ) def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a list of array LUNs", "{ 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ], 'network_address': [", "Generate a list of array LUNs associated with the named array. :param array_name:", "properties from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties from storage_array_port import StorageArrayPort #", "associated arrays :param array_name: When supplied, only port records for the named array", "The name of the array profile to update. (28 chars max) :param max_queue_depth:", "import StorageArrayProfile # 16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties from", "basestring, 'None' ], False ], 'new_array_name': [ new_array_name, 'new-array-name', [ basestring, 'None' ],", "for the array record. \"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn',", "(system defined) for the array record. \"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn': [", "], False ], 'new_array_name': [ new_array_name, 'new-array-name', [ basestring, 'None' ], False ],", "chars) :param ownership_type: Option that allows the user to select which array LUNs", "array. :param model: The model of the array. (16 chars max) :param options:", "], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False ], 'new_array_name': [", "'all', all array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", {", "'prefix', [ basestring, 'None' ], False ], 'new_array_name': [ new_array_name, 'new-array-name', [ basestring,", "'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None' ], False ], }, { 'array-luns':", "False ], 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ], 'is_upgrade_pending':", "no spaces The new name to assign to this array profile. :param model:", "def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\" Signal the changes made in array", "array_name=None): \"\"\" generate a list of online array ports and their associated arrays", "'None' ], False ], 'firmware': [ firmware, 'firmware', [ basestring, 'None' ], False", "supplied 4 character code used to refer to this array and used in", "vendor, 'vendor', [ basestring, 'None' ], False ], 'network_address': [ network_address, 'network-address', [", "are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name',", "'new-array-name', [ basestring, 'None' ], False ], 'model': [ model, 'model', [ basestring,", "from lunownershipfiltertype import Lunownershipfiltertype # 0 properties from storage_array_profile import StorageArrayProfile # 16", "(28 chars max) \"\"\" return self.request( \"storage-array-rename\", { 'array_name': [ array_name, 'array-name', [", "[ basestring, 'None' ], False ], }, { } ) def storage_array_profile_change_notification(self, is_modify,", "The new name to assign to this array profile. :param model: The model", "depth for this target port. \"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn,", "WWPN of the array port whose attributes changed :param wwnn: The WWNN of", "}, { 'array-profile': [ StorageArrayProfile, False ], } ) def storage_array_get_config_summary(self, node=None, ownership_type=None):", "queue depth assigned to array LUNs from this array. :param model: The model", "], 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'max_queue_depth': [", "[ array_name, 'array-name', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type',", "to the controller. :param array_name: When specified, only the named array profile record", "3 properties from storage_array_error_info import StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self,", "this array and used in naming the array's LUNs. :param lun_queue_depth: The queue", "import StorageArrayPort # 9 properties from connectiontype import Connectiontype # 0 properties from", "int, 'None' ], False ], 'model': [ model, 'model', [ basestring, 'None' ],", "\"\"\" Generates a high level summary of array LUN pathing (connectivity) information. :param", "manufacturer. (8 chars) :param network_address: The I/O address of the array's SNMP management", "array and used in naming the array's LUNs. :param lun_queue_depth: The queue depth", "StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info import StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection):", "changed :param wwnn: The WWNN of the array port whose attributes changed :param", "max) :param is_upgrade_pending: Used to indicate that the specified array will under go", "the RDB. \"\"\" return self.request( \"storage-array-profile-sync\", { }, { } ) def storage_array_port_stats_list_info(self):", "profile specific options. (comma separated list of name/value pairs) (127 chars max) \"\"\"", "about backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info': [ StorageArrayStatsInfo,", "Primary key (system defined) for the array record. \"\"\" return self.request( \"storage-array-port-change-notification\", {", "array LUN pathing information for a specified node. :param ownership_type: Option that allows", "this array. :param vendor: The name of the array's manufacturer. (8 chars max)", "unique 5 character user defined code used to refer to this array. :param", "LUN pathing information for a specified node. :param ownership_type: Option that allows the", "self.request( \"storage-array-get-config-summary\", { 'node': [ node, 'node', [ basestring, 'None' ], False ],", "[ basestring, 'None' ], False ], }, { 'array-profiles': [ StorageArrayProfile, True ],", "Primary key (system defined) for the array record. \"\"\" return self.request( \"storage-array-port-remove\", {", "properties from storage_array_stats_info import StorageArrayStatsInfo # 6 properties from arrayfailovertype import Arrayfailovertype #", "changed information. :param array_name: The name of the array profile to update. (28", "\"\"\" generate a list of online array ports and their associated arrays :param", "'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], }, { }", "vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update an array profile with", "specified array will under go an upgrade in the near future. :param prefix:", "array. :param vendor: The name of the array's manufacturer. (8 chars max) :param", "named array profile record will be returned. (28 chars) \"\"\" return self.request( \"storage-array-list-info\",", "False ], 'model': [ model, 'model', [ basestring, 'None' ], False ], 'array_name':", "prefix, 'prefix', [ basestring, 'None' ], False ], 'new_array_name': [ new_array_name, 'new-array-name', [", "LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name,", "{ 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'ownership_type': [", "array's manufacturer. (8 chars) :param network_address: The I/O address of the array's SNMP", "13 properties from storage_array_stats_info import StorageArrayStatsInfo # 6 properties from arrayfailovertype import Arrayfailovertype", "of name/value pairs) (127 chars max) \"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth': [", "properties from storage_array_port import StorageArrayPort # 9 properties from connectiontype import Connectiontype #", ":param firmware: The firmware revision of the array being entered. (64 chars) :param", "Update an array profile with new or changed information. Arguments passed in will", "name of array's target port (64 chars). :param wwnn: World wide node name", "in dotted-decimal format (for example, \"192.168.11.12\"). :param firmware: The firmware revision of the", "], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_name': [", "When supplied, only port records for the named array are returned. (28 chars)", "record. \"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None'", "displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name', [", "name/value pairs) (128 chars) \"\"\" return self.request( \"storage-array-update\", { 'vendor': [ vendor, 'vendor',", "dynamic information about backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info':", "array LUN pathing (connectivity) information. :param node: Obtain array LUN pathing information for", "no spaces The name of the array profile to update. :param vendor: The", "model=None, options=None): \"\"\" Update an array profile with new or changed information. Arguments", "[ basestring, 'None' ], False ], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None'", "array's target port (64 chars). :param wwnn: World wide node name of array's", "one port from an array profile record :param wwpn: The WWPN of the", "The target port queue depth for this target port. \"\"\" return self.request( \"storage-array-port-modify\",", "chars max) :param max_queue_depth: The target port queue depth for this target port.", "def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list of online array ports and their", "the array profile to list array LUN information for. (28 chars) :param ownership_type:", "list of name/value pairs) (127 chars max) \"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth':", "array_name, 'array-name', [ basestring, 'None' ], False ], }, { 'array-ports': [ StorageArrayPort,", "'None' ], False ], 'new_array_name': [ new_array_name, 'new-array-name', [ basestring, 'None' ], False", "for all target ports on this array. :param vendor: The name of the", ":param max_queue_depth: The target port queue depth for all target ports on this", "firmware: The firmware revision of the array being entered. (64 chars) :param prefix:", "array profile record will be returned. (28 chars) \"\"\" return self.request( \"storage-array-list-info\", {", "list of name/value pairs) (128 chars) \"\"\" return self.request( \"storage-array-update\", { 'vendor': [", "array port whose attributes changed :param is_modify: A boolean value which indicates if", "def storage_array_rename(self, array_name, new_name): \"\"\" Rename an array profile :param array_name: The name", "], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], }, {", ":param model: The model number of the array. (16 chars) :param options: Array", "[ DiskDetailInfo, True ], } ) def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list", "wwnn: The WWNN of the array port to remove. :param array_id: Primary key", "synchronizing it with the RDB. \"\"\" return self.request( \"storage-array-profile-sync\", { }, { }", "\"\"\" return self.request( \"storage-array-get-config-summary\", { 'node': [ node, 'node', [ basestring, 'None' ],", "'None' ], False ], 'options': [ options, 'options', [ basestring, 'None' ], False", "'model', [ basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name', [ basestring,", "import StorageArrayPortStats # 13 properties from storage_array_stats_info import StorageArrayStatsInfo # 6 properties from", "storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update an array port with new or", "# 0 properties from lunownershipfiltertype import Lunownershipfiltertype # 0 properties from storage_array_profile import", "an array port with new or changed information. :param wwpn: World wide port", "separated list of name/value pairs) (128 chars) \"\"\" return self.request( \"storage-array-update\", { 'vendor':", ":param array_name: The name of the array profile to list array LUN information", "The I/O address of the array's SNMP management service in dotted-decimal format (for", "array_name, 'array-name', [ basestring, 'None' ], False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [", "The name of the array profile to update. :param vendor: The name of", "existing values. :param array_name: 28 character string, no spaces The name of the", "], False ], 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ],", "} ) def storage_array_rename(self, array_name, new_name): \"\"\" Rename an array profile :param array_name:", "array_name, ownership_type=None): \"\"\" Generate a list of array LUNs associated with the named", "def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update an", "target port queue depth for all target ports on this array. :param vendor:", ":param max_queue_depth: The target port queue depth for this target port. \"\"\" return", ":param options: Array profile specific options. (comma separated list of name/value pairs) (128", "wide port name of array's target port (64 chars). :param wwnn: World wide", "'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False ], 'new_array_name': [ new_array_name,", "max) \"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None'", "False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], },", "update. (28 chars max) :param max_queue_depth: The target port queue depth for all", "assign to this array profile. :param model: The model number of the array.", "from storage_array_config_summary import StorageArrayConfigSummary # 8 properties from storage_array_port_stats import StorageArrayPortStats # 13", "], False ], 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False ],", "ownership-type is set to 'assigned' only assigned array LUNs are displayed. If ownership-type", "StorageArrayPort, True ], } ) def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a list", "{ 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], 'vendor': [", "} ) def storage_array_profile_sync(self): \"\"\" Purge a node's array profile database, thereby synchronizing", "to this array and used in naming the array's LUNs. :param lun_queue_depth: The", "(28 chars) \"\"\" return self.request( \"storage-array-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring,", "profile to update. (28 chars max) :param max_queue_depth: The target port queue depth", "max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], }, { } ) def", "], False ], }, { } ) def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id):", "\"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], 'vendor':", "to assign to this array profile. (28 chars max) \"\"\" return self.request( \"storage-array-rename\",", ") def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update an array port with", ":param wwpn: World wide port name of array's target port (64 chars). :param", "array_name, 'array-name', [ basestring, 'None' ], False ], 'new_name': [ new_name, 'new-name', [", "} ) def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove one port from an", "network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update an array profile with new", "(connectivity) information. :param node: Obtain array LUN pathing information for a specified node.", "this target port. \"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn', [", "'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], }, { 'array-ports':", "from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties from storage_array_port import StorageArrayPort # 9", "information. Arguments passed in will be used to update the profile. Arguments not", "service in dotted-decimal format (for example, \"192.168.11.12\"). :param firmware: The firmware revision of", "array_name=None): \"\"\" Retrieves a list of all array profiles known to the controller.", "8 properties from storage_array_port_stats import StorageArrayPortStats # 13 properties from storage_array_stats_info import StorageArrayStatsInfo", "will keep their existing values. :param array_name: 28 character string, no spaces The", "pathing information for a specified node. :param ownership_type: Option that allows the user", "import StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info import StorageArrayErrorInfo # 3 properties class", "port name of array's target port (64 chars). :param wwnn: World wide node", "profile :param array_name: The name of the array profile to update. (28 chars", ":param vendor: The name of the array's manufacturer. (8 chars) :param network_address: The", "lunownershipfiltertype import Lunownershipfiltertype # 0 properties from storage_array_profile import StorageArrayProfile # 16 properties", "'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'options': [ options,", "set to 'unassigned' only unassigned array LUNs are displayed. If ownership-type is set", "wide node name of array's target port (64 chars). :param array_name: The name", "\"\"\" return self.request( \"storage-array-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ],", "character code used to refer to this array and used in naming the", "in array port table to the D-Blade :param wwpn: The WWPN of the", "operation is rdb_create :param array_id: Primary key (system defined) for the array record.", "used to refer to this array and used in naming the array's LUNs.", "StorageArrayStatsInfo, True ], } ) def storage_array_profile_sync(self): \"\"\" Purge a node's array profile", "for ownership-type are 'assigned', 'unassigned' and 'all'. If ownership-type is set to 'assigned'", "information. :param node: Obtain array LUN pathing information for a specified node. :param", "information. :param array_name: The name of the array profile to update. (28 chars", "StorageArrayPortStatsInfo, True ], } ) def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None,", "def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a high level summary of array LUN", "storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove one port from an array profile record", ":param array_name: The name of the array profile to update. (28 chars max)", "with the RDB. \"\"\" return self.request( \"storage-array-profile-sync\", { }, { } ) def", "[ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name',", "max) :param new_name: The new name to assign to this array profile. (28", "that the specified array will under go an upgrade in the near future.", "specific options. (comma separated list of name/value pairs) (128 chars) \"\"\" return self.request(", "False ], }, { } ) def storage_array_rename(self, array_name, new_name): \"\"\" Rename an", "'unassigned' and 'all'. If ownership-type is set to 'assigned' only assigned array LUNs", "of the array being entered. (64 chars) :param prefix: A unique 5 character", "return self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False", "properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info import StorageArrayErrorInfo #", "'firmware', [ basestring, 'None' ], False ], 'prefix': [ prefix, 'prefix', [ basestring,", "the array profile to update. :param vendor: The name of the array's manufacturer.", "import Arrayfailovertype # 0 properties from arrayerrortype import Arrayerrortype # 0 properties from", ") def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list of online array ports and", "'vendor', [ basestring, 'None' ], False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool,", "port whose attributes changed :param is_modify: A boolean value which indicates if it's", "specified node. :param ownership_type: Option that allows the user to select which array", "with new or changed information. :param array_name: The name of the array profile", "(28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring,", "], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'is_modify': [", "LUNs associated with the named array. :param array_name: The name of the array", "max_queue_depth: The target port queue depth for this target port. \"\"\" return self.request(", "wwpn, wwnn, is_modify, array_id): \"\"\" Signal the changes made in array port table", "the named array profile record will be returned. (28 chars) \"\"\" return self.request(", "\"storage-array-update\", { 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ], 'network_address':", "defined) for the array record. \"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn,", "[ options, 'options', [ basestring, 'None' ], False ], }, { } )", "only unassigned array LUNs are displayed. If ownership-type is set to 'all', all", "new name to assign to this array profile. (28 chars max) \"\"\" return", "\"\"\" Update an array profile with new or changed information. :param array_name: The", "basestring, 'None' ], False ], 'array_id': [ array_id, 'array-id', [ int, 'None' ],", "upgrade in the near future. :param prefix: A unique user supplied 4 character", "return self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False", "changed information. Arguments passed in will be used to update the profile. Arguments", "(comma separated list of name/value pairs) (128 chars) \"\"\" return self.request( \"storage-array-update\", {", "array. (16 chars max) :param options: Array profile specific options. (comma separated list", "profile database, thereby synchronizing it with the RDB. \"\"\" return self.request( \"storage-array-profile-sync\", {", "self.request( \"storage-array-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ],", "is rdb_create :param array_id: Primary key (system defined) for the array record. \"\"\"", "[ array_name, 'array-name', [ basestring, 'None' ], False ], }, { 'array-ports': [", "'new-name', [ basestring, 'None' ], False ], }, { } ) def storage_array_port_modify(self,", "'network_address': [ network_address, 'network-address', [ basestring, 'None' ], False ], 'firmware': [ firmware,", "], False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int, 'None' ], False ],", "}, { 'array-stat-info': [ StorageArrayStatsInfo, True ], } ) def storage_array_profile_sync(self): \"\"\" Purge", "} ) def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\"", "Remove one port from an array profile record :param wwpn: The WWPN of", "basestring, 'None' ], False ], 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ],", "max) :param max_queue_depth: The target port queue depth for this target port. \"\"\"", "storage_array_port_stats import StorageArrayPortStats # 13 properties from storage_array_stats_info import StorageArrayStatsInfo # 6 properties", "the array being entered. (64 chars) :param prefix: A unique 5 character user", "are 'assigned', 'unassigned' and 'all'. If ownership-type is set to 'assigned' only assigned", ") def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update", "False ], } ) def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a high level", "a specified node. :param ownership_type: Option that allows the user to select which", "'None' ], False ], }, { } ) def storage_array_stats_list_info(self): \"\"\" Used to", "\"\"\" Rename an array profile :param array_name: The name of the array profile", "character user defined code used to refer to this array. :param new_array_name: 28", "new or changed information. :param wwpn: World wide port name of array's target", "'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], }, { 'array-profiles':", "'None' ], False ], 'model': [ model, 'model', [ basestring, 'None' ], False", "False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False ], 'lun_queue_depth':", "'array-name', [ basestring, 'None' ], False ], 'options': [ options, 'options', [ basestring,", "(for example, \"192.168.11.12\"). :param firmware: The firmware revision of the array being entered.", "properties from arrayerrortype import Arrayerrortype # 0 properties from lunownershipfiltertype import Lunownershipfiltertype #", "4 character code used to refer to this array and used in naming", "return self.request( \"storage-array-update\", { 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False", "'options': [ options, 'options', [ basestring, 'None' ], False ], }, { }", "netapp.connection import NaConnection from storage_array_config_summary import StorageArrayConfigSummary # 8 properties from storage_array_port_stats import", "basestring, 'None' ], False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ],", "or changed information. :param wwpn: World wide port name of array's target port", "options, 'options', [ basestring, 'None' ], False ], }, { 'array-profile': [ StorageArrayProfile,", "node, 'node', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type', [", "the controller. :param array_name: When specified, only the named array profile record will", "[ firmware, 'firmware', [ basestring, 'None' ], False ], 'prefix': [ prefix, 'prefix',", "address of the array's SNMP management service in dotted-decimal format (for example, \"192.168.11.12\").", "'None' ], False ], }, { } ) def storage_array_port_remove(self, wwpn, wwnn, array_id):", "array profile RDB to the D-Blade :param is_modify: A boolean value which indicates", "chars). :param array_name: The name of the array profile to update. (28 chars", "get dynamic information about backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", { }, {", "port. \"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None'", "the array's manufacturer. (8 chars) :param network_address: The I/O address of the array's", "node: Obtain array LUN pathing information for a specified node. :param ownership_type: Option", "to 'all', all array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\",", "storage_array_error_info import StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None,", "basestring, 'None' ], False ], }, { 'array-profile': [ StorageArrayProfile, False ], }", "\"\"\" Signal the changes made in array profile RDB to the D-Blade :param", "to 'unassigned' only unassigned array LUNs are displayed. If ownership-type is set to", "chars max) :param new_name: The new name to assign to this array profile.", "is rdb_modify False - operation is rdb_create :param array_id: Primary key (system defined)", "'assigned' only assigned array LUNs are displayed. If ownership-type is set to 'unassigned'", "], False ], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None' ], False ],", "return self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [ StorageArrayPortStatsInfo, True ], } )", "of array LUN pathing (connectivity) information. :param node: Obtain array LUN pathing information", "number of the array. (16 chars) :param options: Array profile specific options. (comma", "entered. (64 chars) :param prefix: A unique 5 character user defined code used", "used to refer to this array. :param new_array_name: 28 character string, no spaces", "information for a specified node. :param ownership_type: Option that allows the user to", "The WWPN of the array port to remove. :param wwnn: The WWNN of", "port (64 chars). :param wwnn: World wide node name of array's target port", "'options', [ basestring, 'None' ], False ], }, { } ) def storage_array_profile_change_notification(self,", "{ } ) def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update an array", "of array LUNs associated with the named array. :param array_name: The name of", "basestring, 'None' ], False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ],", "], False ], 'network_address': [ network_address, 'network-address', [ basestring, 'None' ], False ],", "], False ], }, { } ) def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal", "'None' ], False ], 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False", "array_id: Primary key (system defined) for the array record. \"\"\" return self.request( \"storage-array-port-remove\",", "for a specified node. :param ownership_type: Option that allows the user to select", "return self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info': [ StorageArrayStatsInfo, True ], } )", "basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name', [ basestring, 'None' ],", "# 0 properties from arrayerrortype import Arrayerrortype # 0 properties from lunownershipfiltertype import", "'array-luns': [ DiskDetailInfo, True ], } ) def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a", "profile record :param wwpn: The WWPN of the array port to remove. :param", "\"\"\" Generate a list of array LUNs associated with the named array. :param", "be returned. (28 chars) \"\"\" return self.request( \"storage-array-list-info\", { 'array_name': [ array_name, 'array-name',", "profile. (28 chars max) \"\"\" return self.request( \"storage-array-rename\", { 'array_name': [ array_name, 'array-name',", "], False ], }, { } ) def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\"", "spaces The new name to assign to this array profile. :param model: The", "\"\"\" Update an array port with new or changed information. :param wwpn: World", "}, { 'config-summary': [ StorageArrayConfigSummary, True ], } ) def storage_array_ports_list_info(self, array_name=None): \"\"\"", "\"storage-array-stats-list-info\", { }, { 'array-stat-info': [ StorageArrayStatsInfo, True ], } ) def storage_array_profile_sync(self):", "is_modify, array_id): \"\"\" Signal the changes made in array port table to the", "generate a list of online array ports and their associated arrays :param array_name:", "allows the user to select which array LUNs are displayed. Valid values for", "storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a high level summary of array LUN pathing", "If ownership-type is set to 'all', all array LUNs are displayed. Default: 'all'.", "False ], }, { 'config-summary': [ StorageArrayConfigSummary, True ], } ) def storage_array_ports_list_info(self,", "(8 chars max) :param is_upgrade_pending: Used to indicate that the specified array will", "'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type,", "def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a list of array LUNs associated with", "[ array_name, 'array-name', [ basestring, 'None' ], False ], 'options': [ options, 'options',", "level summary of array LUN pathing (connectivity) information. :param node: Obtain array LUN", "for the named array are returned. (28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\", {", "storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a list of array LUNs associated with the", "user defined code used to refer to this array. :param new_array_name: 28 character", "], False ], 'firmware': [ firmware, 'firmware', [ basestring, 'None' ], False ],", "assigned to array LUNs from this array. :param model: The model of the", "an upgrade in the near future. :param prefix: A unique user supplied 4", "max) :param max_queue_depth: The target port queue depth for all target ports on", "Lunownershipfiltertype # 0 properties from storage_array_profile import StorageArrayProfile # 16 properties from storage_array_port_stats_info", "depth for all target ports on this array. :param vendor: The name of", "array port to remove. :param wwnn: The WWNN of the array port to", "], False ], 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ],", "basestring, 'None' ], False ], }, { } ) def storage_array_profile_change_notification(self, is_modify, array_id):", "ports on this array. :param vendor: The name of the array's manufacturer. (8", "[ StorageArrayStatsInfo, True ], } ) def storage_array_profile_sync(self): \"\"\" Purge a node's array", ") def storage_array_profile_sync(self): \"\"\" Purge a node's array profile database, thereby synchronizing it", "to 'assigned' only assigned array LUNs are displayed. If ownership-type is set to", "], False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ], False ],", "], False ], }, { } ) def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None):", "that allows the user to select which array LUNs are displayed. Valid values", "named array are returned. (28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name': [", "ownership_type=None): \"\"\" Generates a high level summary of array LUN pathing (connectivity) information.", "[ prefix, 'prefix', [ basestring, 'None' ], False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth',", "the specified array will under go an upgrade in the near future. :param", "SNMP management service in dotted-decimal format (for example, \"192.168.11.12\"). :param firmware: The firmware", ") def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\" Signal the changes made in", "[ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], }, { } )", "{ } ) def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove one port from", "storage_array_stats_info import StorageArrayStatsInfo # 6 properties from arrayfailovertype import Arrayfailovertype # 0 properties", "'lun-queue-depth', [ int, 'None' ], False ], 'model': [ model, 'model', [ basestring,", "from storage_array_port import StorageArrayPort # 9 properties from connectiontype import Connectiontype # 0", "\"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ],", "options=None): \"\"\" Update an array profile with new or changed information. Arguments passed", "The WWNN of the array port to remove. :param array_id: Primary key (system", "their associated arrays :param array_name: When supplied, only port records for the named", "], 'network_address': [ network_address, 'network-address', [ basestring, 'None' ], False ], 'firmware': [", "attributes changed :param is_modify: A boolean value which indicates if it's an rdb_modify", "], }, { } ) def storage_array_rename(self, array_name, new_name): \"\"\" Rename an array", "[ basestring, 'None' ], False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int, 'None'", "[ basestring, 'None' ], False ], 'array_id': [ array_id, 'array-id', [ int, 'None'", "of the array. (16 chars max) :param options: Array profile specific options. (comma", "int, 'None' ], False ], 'vendor': [ vendor, 'vendor', [ basestring, 'None' ],", "# 3 properties from storage_array_error_info import StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection): def", "int, 'None' ], False ], }, { } ) def storage_array_port_change_notification(self, wwpn, wwnn,", ":param wwnn: The WWNN of the array port to remove. :param array_id: Primary", "list of all array profiles known to the controller. :param array_name: When specified,", "not passed will keep their existing values. :param array_name: 28 character string, no", "name of the array's manufacturer. (8 chars max) :param is_upgrade_pending: Used to indicate", "will be used to update the profile. Arguments not passed will keep their", "lun_queue_depth=None, model=None, options=None): \"\"\" Update an array profile with new or changed information.", ":param model: The model of the array. (16 chars max) :param options: Array", "max) :param options: Array profile specific options. (comma separated list of name/value pairs)", "port from an array profile record :param wwpn: The WWPN of the array", "'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ], False ], 'prefix': [ prefix,", "'None' ], False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int, 'None' ], False", "[ StorageArrayProfile, False ], } ) def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a", "of array's target port (64 chars). :param wwnn: World wide node name of", "self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ],", "'port-stat-info': [ StorageArrayPortStatsInfo, True ], } ) def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None,", "{ } ) def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the changes made in", "'None' ], False ], 'network_address': [ network_address, 'network-address', [ basestring, 'None' ], False", "depth assigned to array LUNs from this array. :param model: The model of", "the array. (16 chars max) :param options: Array profile specific options. (comma separated", "True ], } ) def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None,", "connectiontype import Connectiontype # 0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties", "'assigned', 'unassigned' and 'all'. If ownership-type is set to 'assigned' only assigned array", "is_modify, array_id): \"\"\" Signal the changes made in array profile RDB to the", "character string, no spaces The name of the array profile to update. :param", "StorageArrayConfigSummary # 8 properties from storage_array_port_stats import StorageArrayPortStats # 13 properties from storage_array_stats_info", "return self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False", "'None' ], False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False", "}, { } ) def storage_array_port_stats_list_info(self): \"\"\" return stats for array ports \"\"\"", "(16 chars max) :param options: Array profile specific options. (comma separated list of", "(28 chars max) :param new_name: The new name to assign to this array", "[ array_name, 'array-name', [ basestring, 'None' ], False ], 'new_name': [ new_name, 'new-name',", "StorageArrayProfile, False ], } ) def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a high", "only assigned array LUNs are displayed. If ownership-type is set to 'unassigned' only", "update the profile. Arguments not passed will keep their existing values. :param array_name:", "'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'is_modify': [ is_modify,", "basestring, 'None' ], False ], 'new_name': [ new_name, 'new-name', [ basestring, 'None' ],", "of all array profiles known to the controller. :param array_name: When specified, only", "ports and their associated arrays :param array_name: When supplied, only port records for", ":param vendor: The name of the array's manufacturer. (8 chars max) :param is_upgrade_pending:", "'ownership-type', [ basestring, 'None' ], False ], }, { 'array-luns': [ DiskDetailInfo, True", "'None' ], False ], }, { 'array-profile': [ StorageArrayProfile, False ], } )", "chars max) :param options: Array profile specific options. (comma separated list of name/value", "World wide port name of array's target port (64 chars). :param wwnn: World", "character string, no spaces The new name to assign to this array profile.", "{ } ) def storage_array_stats_list_info(self): \"\"\" Used to get dynamic information about backend", "of the array port to remove. :param wwnn: The WWNN of the array", "port queue depth for this target port. \"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn':", "'wwnn', [ basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name', [ basestring,", "The name of the array profile to list array LUN information for. (28", "Used to get dynamic information about backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", {", "[ basestring, 'None' ], False ], }, { 'array-profile': [ StorageArrayProfile, False ],", "target ports on this array. :param vendor: The name of the array's manufacturer.", "to remove. :param wwnn: The WWNN of the array port to remove. :param", "set to 'assigned' only assigned array LUNs are displayed. If ownership-type is set", "are displayed. If ownership-type is set to 'unassigned' only unassigned array LUNs are", "{ 'array-ports': [ StorageArrayPort, True ], } ) def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\"", "array. :param new_array_name: 28 character string, no spaces The new name to assign", "basestring, 'None' ], False ], 'network_address': [ network_address, 'network-address', [ basestring, 'None' ],", "target port queue depth for this target port. \"\"\" return self.request( \"storage-array-port-modify\", {", "array profile :param array_name: The name of the array profile to update. (28", "array_name, max_queue_depth=None): \"\"\" Update an array port with new or changed information. :param", "name of the array profile to update. :param vendor: The name of the", "made in array profile RDB to the D-Blade :param is_modify: A boolean value", "model number of the array. (16 chars) :param options: Array profile specific options.", "this array. :param model: The model of the array. (16 chars max) :param", "], 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False ], 'array_id': [", "prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update an array profile with new or changed", "select which array LUNs are displayed. Valid values for ownership-type are 'assigned', 'unassigned'", ") def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list of all array profiles known", "wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update an array port with new or changed", "False ], 'new_array_name': [ new_array_name, 'new-array-name', [ basestring, 'None' ], False ], 'model':", "array_name: When specified, only the named array profile record will be returned. (28", "array profile with new or changed information. Arguments passed in will be used", "wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name', [", "to update. (28 chars max) :param max_queue_depth: The target port queue depth for", "'array-id', [ int, 'None' ], False ], }, { } ) def storage_array_stats_list_info(self):", "class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\"", "array_id): \"\"\" Signal the changes made in array profile RDB to the D-Blade", "network_address, 'network-address', [ basestring, 'None' ], False ], 'firmware': [ firmware, 'firmware', [", "], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ], False ], 'prefix': [", "(127 chars max) \"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [", "ownership-type is set to 'all', all array LUNs are displayed. Default: 'all'. \"\"\"", "\"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False ], 'array_id':", "{ 'config-summary': [ StorageArrayConfigSummary, True ], } ) def storage_array_ports_list_info(self, array_name=None): \"\"\" generate", "array_name, 'array-name', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type', [", "only port records for the named array are returned. (28 chars) \"\"\" return", "A unique 5 character user defined code used to refer to this array.", "online array ports and their associated arrays :param array_name: When supplied, only port", "and 'all'. If ownership-type is set to 'assigned' only assigned array LUNs are", "basestring, 'None' ], False ], 'firmware': [ firmware, 'firmware', [ basestring, 'None' ],", "{ 'array-stat-info': [ StorageArrayStatsInfo, True ], } ) def storage_array_profile_sync(self): \"\"\" Purge a", "0 properties from lunownershipfiltertype import Lunownershipfiltertype # 0 properties from storage_array_profile import StorageArrayProfile", ":param is_upgrade_pending: Used to indicate that the specified array will under go an", "\"\"\" return self.request( \"storage-array-rename\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ],", "], } ) def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list of online array", "D-Blade :param is_modify: A boolean value which indicates if it's an rdb_modify operation.", "\"storage-array-profile-sync\", { }, { } ) def storage_array_port_stats_list_info(self): \"\"\" return stats for array", "chars) \"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None'", "database, thereby synchronizing it with the RDB. \"\"\" return self.request( \"storage-array-profile-sync\", { },", "to 'all', all array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\",", "target port (64 chars). :param array_name: The name of the array profile to", "prefix, 'prefix', [ basestring, 'None' ], False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [", "'all', all array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", {", "chars max) :param max_queue_depth: The target port queue depth for all target ports", "\"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ],", "array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update an array profile", "array_name: The name of the array profile to list array LUN information for.", "}, { 'port-stat-info': [ StorageArrayPortStatsInfo, True ], } ) def storage_array_update(self, array_name, vendor=None,", "True ], } ) def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list of all", "'None' ], False ], }, { 'array-profiles': [ StorageArrayProfile, True ], } )", "wwpn, 'wwpn', [ basestring, 'None' ], False ], 'wwnn': [ wwnn, 'wwnn', [", "defined code used to refer to this array. :param new_array_name: 28 character string,", "to list array LUN information for. (28 chars) :param ownership_type: Option that allows", "the array's LUNs. :param lun_queue_depth: The queue depth assigned to array LUNs from", "[ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ], False ], 'prefix': [ prefix, 'prefix',", "\"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ],", "are displayed. Valid values for ownership-type are 'assigned', 'unassigned' and 'all'. If ownership-type", ":param array_name: 28 character string, no spaces The name of the array profile", "'array-name', [ basestring, 'None' ], False ], 'new_name': [ new_name, 'new-name', [ basestring,", "], }, { } ) def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the changes", "prefix: A unique 5 character user defined code used to refer to this", "Signal the changes made in array port table to the D-Blade :param wwpn:", "StorageArrayProfile # 16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties from storage_array_port", "{ 'port-stat-info': [ StorageArrayPortStatsInfo, True ], } ) def storage_array_update(self, array_name, vendor=None, network_address=None,", "displayed. If ownership-type is set to 'unassigned' only unassigned array LUNs are displayed.", "], False ], 'options': [ options, 'options', [ basestring, 'None' ], False ],", "ownership_type, 'ownership-type', [ basestring, 'None' ], False ], }, { 'config-summary': [ StorageArrayConfigSummary,", "wwpn: The WWPN of the array port whose attributes changed :param wwnn: The", "[ basestring, 'None' ], False ], 'new_name': [ new_name, 'new-name', [ basestring, 'None'", ":param array_name: When specified, only the named array profile record will be returned.", "array profiles known to the controller. :param array_name: When specified, only the named", "{ 'node': [ node, 'node', [ basestring, 'None' ], False ], 'ownership_type': [", "array will under go an upgrade in the near future. :param prefix: A", "The firmware revision of the array being entered. (64 chars) :param prefix: A", "changes made in array port table to the D-Blade :param wwpn: The WWPN", "'None' ], False ], }, { } ) def storage_array_port_modify(self, wwpn, wwnn, array_name,", "{ } ) def storage_array_port_stats_list_info(self): \"\"\" return stats for array ports \"\"\" return", "True - operation is rdb_modify False - operation is rdb_create :param array_id: Primary", "# 8 properties from storage_array_port_stats import StorageArrayPortStats # 13 properties from storage_array_stats_info import", "When specified, only the named array profile record will be returned. (28 chars)", "'max-queue-depth', [ int, 'None' ], False ], 'vendor': [ vendor, 'vendor', [ basestring,", "of online array ports and their associated arrays :param array_name: When supplied, only", "'None' ], False ], 'new_name': [ new_name, 'new-name', [ basestring, 'None' ], False", "Arrayfailovertype # 0 properties from arrayerrortype import Arrayerrortype # 0 properties from lunownershipfiltertype", "{ 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'new_name': [", "whose attributes changed :param is_modify: A boolean value which indicates if it's an", "refer to this array and used in naming the array's LUNs. :param lun_queue_depth:", "dotted-decimal format (for example, \"192.168.11.12\"). :param firmware: The firmware revision of the array", "False ], 'firmware': [ firmware, 'firmware', [ basestring, 'None' ], False ], 'prefix':", "False ], }, { } ) def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\"", "array_name, new_name): \"\"\" Rename an array profile :param array_name: The name of the", "], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None' ], False ], }, {", "to indicate that the specified array will under go an upgrade in the", "max_queue_depth: The target port queue depth for all target ports on this array.", ":param prefix: A unique user supplied 4 character code used to refer to", "'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int, 'None' ], False ], 'model': [ model,", "LUNs. :param lun_queue_depth: The queue depth assigned to array LUNs from this array.", "], }, { 'array-ports': [ StorageArrayPort, True ], } ) def storage_array_luns_list_info(self, array_name,", "to refer to this array. :param new_array_name: 28 character string, no spaces The", "(128 chars) \"\"\" return self.request( \"storage-array-update\", { 'vendor': [ vendor, 'vendor', [ basestring,", "[ basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None'", "wwpn: World wide port name of array's target port (64 chars). :param wwnn:", "lun_queue_depth, 'lun-queue-depth', [ int, 'None' ], False ], 'model': [ model, 'model', [", "port with new or changed information. :param wwpn: World wide port name of", "Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", { 'node': [ node, 'node', [ basestring,", "False ], 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'max_queue_depth':", "operation is rdb_modify False - operation is rdb_create :param array_id: Primary key (system", "the named array are returned. (28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name':", "backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info': [ StorageArrayStatsInfo, True", "port records for the named array are returned. (28 chars) \"\"\" return self.request(", "to get dynamic information about backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", { },", "16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties from storage_array_port import StorageArrayPort", "wwpn, wwnn, array_id): \"\"\" Remove one port from an array profile record :param", "Generates a high level summary of array LUN pathing (connectivity) information. :param node:", "it with the RDB. \"\"\" return self.request( \"storage-array-profile-sync\", { }, { } )", "'array-id', [ int, 'None' ], False ], }, { } ) def storage_array_port_change_notification(self,", "options. (comma separated list of name/value pairs) (127 chars max) \"\"\" return self.request(", "array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update an array profile", "the profile. Arguments not passed will keep their existing values. :param array_name: 28", "}, { 'array-luns': [ DiskDetailInfo, True ], } ) def storage_array_list_info(self, array_name=None): \"\"\"", "Connectiontype # 0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info", "array_id): \"\"\" Remove one port from an array profile record :param wwpn: The", "], }, { } ) def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update", "chars). :param wwnn: World wide node name of array's target port (64 chars).", "passed will keep their existing values. :param array_name: 28 character string, no spaces", "\"\"\" Signal the changes made in array port table to the D-Blade :param", "thereby synchronizing it with the RDB. \"\"\" return self.request( \"storage-array-profile-sync\", { }, {", "Option that allows the user to select which array LUNs are displayed. Valid", "array LUNs are displayed. If ownership-type is set to 'unassigned' only unassigned array", "defined) for the array record. \"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify,", "for the array record. \"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn',", "chars) \"\"\" return self.request( \"storage-array-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None'", "'wwnn', [ basestring, 'None' ], False ], 'is_modify': [ is_modify, 'is-modify', [ bool,", "a high level summary of array LUN pathing (connectivity) information. :param node: Obtain", "model of the array. (16 chars max) :param options: Array profile specific options.", "[ basestring, 'None' ], False ], }, { } ) def storage_array_port_modify(self, wwpn,", "array's LUNs. :param lun_queue_depth: The queue depth assigned to array LUNs from this", "prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update an array profile with new or changed", "array port whose attributes changed :param wwnn: The WWNN of the array port", "'array_id': [ array_id, 'array-id', [ int, 'None' ], False ], }, { }", "], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int, 'None' ], False ], 'model': [", "profile. :param model: The model number of the array. (16 chars) :param options:", "[ options, 'options', [ basestring, 'None' ], False ], }, { 'array-profile': [", "basestring, 'None' ], False ], }, { 'config-summary': [ StorageArrayConfigSummary, True ], }", "A boolean value which indicates if it's an rdb_modify operation. True - operation", "storage_array_port_stats_list_info(self): \"\"\" return stats for array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", { },", "self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [ StorageArrayPortStatsInfo, True ], } ) def", "options, 'options', [ basestring, 'None' ], False ], }, { } ) def", "basestring, 'None' ], False ], }, { 'array-ports': [ StorageArrayPort, True ], }", "array port with new or changed information. :param wwpn: World wide port name", ") def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove one port from an array", "Array profile specific options. (comma separated list of name/value pairs) (128 chars) \"\"\"", "port whose attributes changed :param wwnn: The WWNN of the array port whose", "'None' ], False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ], False", "an array profile with new or changed information. Arguments passed in will be", "# 0 properties from storage_array_profile import StorageArrayProfile # 16 properties from storage_array_port_stats_info import", "NaConnection from storage_array_config_summary import StorageArrayConfigSummary # 8 properties from storage_array_port_stats import StorageArrayPortStats #", "returned. (28 chars) \"\"\" return self.request( \"storage-array-list-info\", { 'array_name': [ array_name, 'array-name', [", "from storage_array_error_info import StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None,", "associated with the named array. :param array_name: The name of the array profile", "], False ], }, { 'array-luns': [ DiskDetailInfo, True ], } ) def", "return self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False", "name of the array profile to list array LUN information for. (28 chars)", "{ 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ], 'wwnn': [", "firmware revision of the array being entered. (64 chars) :param prefix: A unique", ":param ownership_type: Option that allows the user to select which array LUNs are", "self.request( \"storage-array-update\", { 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ],", "ownership_type: Option that allows the user to select which array LUNs are displayed.", "], False ], }, { 'array-ports': [ StorageArrayPort, True ], } ) def", "record. \"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None'", "new_array_name: 28 character string, no spaces The new name to assign to this", "arrayerrortype import Arrayerrortype # 0 properties from lunownershipfiltertype import Lunownershipfiltertype # 0 properties", "{ 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], }, {", "self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ],", "], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False ], 'lun_queue_depth': [", "'new_array_name': [ new_array_name, 'new-array-name', [ basestring, 'None' ], False ], 'model': [ model,", ") def storage_array_port_stats_list_info(self): \"\"\" return stats for array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\",", "'array-name', [ basestring, 'None' ], False ], }, { 'array-ports': [ StorageArrayPort, True", "from this array. :param model: The model of the array. (16 chars max)", "or changed information. Arguments passed in will be used to update the profile.", "StorageArrayPort # 9 properties from connectiontype import Connectiontype # 0 properties from storage_array_stats_error_info", "\"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ], 'wwnn':", "from netapp.connection import NaConnection from storage_array_config_summary import StorageArrayConfigSummary # 8 properties from storage_array_port_stats", "self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info': [ StorageArrayStatsInfo, True ], } ) def", "], } ) def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list of all array", "is set to 'all', all array LUNs are displayed. Default: 'all'. \"\"\" return", "\"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ],", "'options': [ options, 'options', [ basestring, 'None' ], False ], }, { 'array-profile':", "is set to 'assigned' only assigned array LUNs are displayed. If ownership-type is", "unassigned array LUNs are displayed. If ownership-type is set to 'all', all array", "If ownership-type is set to 'assigned' only assigned array LUNs are displayed. If", "'wwpn', [ basestring, 'None' ], False ], 'wwnn': [ wwnn, 'wwnn', [ basestring,", "array profile. :param model: The model number of the array. (16 chars) :param", "], }, { 'array-profile': [ StorageArrayProfile, False ], } ) def storage_array_get_config_summary(self, node=None,", "LUNs are displayed. Valid values for ownership-type are 'assigned', 'unassigned' and 'all'. If", "is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ], False ], 'prefix': [ prefix, 'prefix', [", "\"\"\" Retrieves a list of all array profiles known to the controller. :param", "False ], }, { 'array-ports': [ StorageArrayPort, True ], } ) def storage_array_luns_list_info(self,", "def storage_array_stats_list_info(self): \"\"\" Used to get dynamic information about backend arrays. \"\"\" return", "}, { } ) def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update an", "for this target port. \"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn',", "storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the changes made in array profile RDB to", "WWNN of the array port whose attributes changed :param is_modify: A boolean value", "'None' ], False ], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False", "'config-summary': [ StorageArrayConfigSummary, True ], } ) def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a", "False ], }, { } ) def storage_array_stats_list_info(self): \"\"\" Used to get dynamic", "'array-profile': [ StorageArrayProfile, False ], } ) def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates", "[ basestring, 'None' ], False ], }, { 'array-luns': [ DiskDetailInfo, True ],", "\"\"\" return stats for array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", { }, {", "options: Array profile specific options. (comma separated list of name/value pairs) (128 chars)", "user to select which array LUNs are displayed. Valid values for ownership-type are", "'array-ports': [ StorageArrayPort, True ], } ) def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate", "array_name: When supplied, only port records for the named array are returned. (28", "basestring, 'None' ], False ], }, { } ) def storage_array_port_modify(self, wwpn, wwnn,", "ownership_type=None): \"\"\" Generate a list of array LUNs associated with the named array.", "StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update", "record :param wwpn: The WWPN of the array port to remove. :param wwnn:", "StorageArrayPortStats # 13 properties from storage_array_stats_info import StorageArrayStatsInfo # 6 properties from arrayfailovertype", "LUN information for. (28 chars) :param ownership_type: Option that allows the user to", "[ int, 'None' ], False ], }, { } ) def storage_array_rename(self, array_name,", "Array profile specific options. (comma separated list of name/value pairs) (127 chars max)", "self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ],", "vendor, 'vendor', [ basestring, 'None' ], False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [", "of the array. (16 chars) :param options: Array profile specific options. (comma separated", "LUNs are displayed. If ownership-type is set to 'all', all array LUNs are", "LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", { 'node': [ node,", "return self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False", "], False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False ],", "False ], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'is_modify':", "[ is_modify, 'is-modify', [ bool, 'None' ], False ], 'array_id': [ array_id, 'array-id',", "StorageArrayStatsInfo # 6 properties from arrayfailovertype import Arrayfailovertype # 0 properties from arrayerrortype", "set to 'all', all array LUNs are displayed. Default: 'all'. \"\"\" return self.request(", "[ basestring, 'None' ], False ], }, { 'array-ports': [ StorageArrayPort, True ],", "'None' ], False ], }, { } ) def storage_array_port_change_notification(self, wwpn, wwnn, is_modify,", "in will be used to update the profile. Arguments not passed will keep", "rdb_modify operation. True - operation is rdb_modify False - operation is rdb_create :param", ":param array_name: When supplied, only port records for the named array are returned.", "Signal the changes made in array profile RDB to the D-Blade :param is_modify:", "changed information. :param wwpn: World wide port name of array's target port (64", "'array-name', [ basestring, 'None' ], False ], }, { 'array-profiles': [ StorageArrayProfile, True", "The target port queue depth for all target ports on this array. :param", ":param wwpn: The WWPN of the array port whose attributes changed :param wwnn:", "storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update an array", "storage_array_rename(self, array_name, new_name): \"\"\" Rename an array profile :param array_name: The name of", "new_array_name, 'new-array-name', [ basestring, 'None' ], False ], 'model': [ model, 'model', [", "[ ownership_type, 'ownership-type', [ basestring, 'None' ], False ], }, { 'array-luns': [", "\"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [ StorageArrayPortStatsInfo, True ], } ) def storage_array_update(self,", "[ basestring, 'None' ], False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None'", "False ], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_name':", "is_modify: A boolean value which indicates if it's an rdb_modify operation. True -", "array record. \"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn', [ basestring,", "basestring, 'None' ], False ], }, { 'array-profiles': [ StorageArrayProfile, True ], }", "changed :param is_modify: A boolean value which indicates if it's an rdb_modify operation.", "from arrayerrortype import Arrayerrortype # 0 properties from lunownershipfiltertype import Lunownershipfiltertype # 0", "the array. (16 chars) :param options: Array profile specific options. (comma separated list", "list array LUN information for. (28 chars) :param ownership_type: Option that allows the", "the user to select which array LUNs are displayed. Valid values for ownership-type", "array profile to update. (28 chars max) :param new_name: The new name to", "prefix: A unique user supplied 4 character code used to refer to this", "28 character string, no spaces The name of the array profile to update.", "The WWNN of the array port whose attributes changed :param is_modify: A boolean", "key (system defined) for the array record. \"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify':", "'options', [ basestring, 'None' ], False ], }, { 'array-profile': [ StorageArrayProfile, False", "3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None,", "'node', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring,", "True ], } ) def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a list of", "options. (comma separated list of name/value pairs) (128 chars) \"\"\" return self.request( \"storage-array-update\",", "properties from storage_array_port_stats import StorageArrayPortStats # 13 properties from storage_array_stats_info import StorageArrayStatsInfo #", "array_name: The name of the array profile to update. (28 chars max) :param", "'None' ], False ], }, { 'array-luns': [ DiskDetailInfo, True ], } )", "Arguments not passed will keep their existing values. :param array_name: 28 character string,", "name of the array profile to update. (28 chars max) :param max_queue_depth: The", "assign to this array profile. (28 chars max) \"\"\" return self.request( \"storage-array-rename\", {", "displayed. Valid values for ownership-type are 'assigned', 'unassigned' and 'all'. If ownership-type is", "array profile with new or changed information. :param array_name: The name of the", "new or changed information. Arguments passed in will be used to update the", "[ basestring, 'None' ], False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None'", "import NaConnection from storage_array_config_summary import StorageArrayConfigSummary # 8 properties from storage_array_port_stats import StorageArrayPortStats", "the array profile to update. (28 chars max) :param new_name: The new name", "ownership-type are 'assigned', 'unassigned' and 'all'. If ownership-type is set to 'assigned' only", "The WWPN of the array port whose attributes changed :param wwnn: The WWNN", "array profile to list array LUN information for. (28 chars) :param ownership_type: Option", "separated list of name/value pairs) (127 chars max) \"\"\" return self.request( \"storage-array-modify\", {", "- operation is rdb_create :param array_id: Primary key (system defined) for the array", "firmware, 'firmware', [ basestring, 'None' ], False ], 'prefix': [ prefix, 'prefix', [", "for array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [ StorageArrayPortStatsInfo,", "self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False ],", "profile with new or changed information. :param array_name: The name of the array", "operation. True - operation is rdb_modify False - operation is rdb_create :param array_id:", "WWPN of the array port to remove. :param wwnn: The WWNN of the", "], False ], }, { } ) def storage_array_rename(self, array_name, new_name): \"\"\" Rename", "'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None'", "properties from arrayfailovertype import Arrayfailovertype # 0 properties from arrayerrortype import Arrayerrortype #", "port to remove. :param array_id: Primary key (system defined) for the array record.", "} ) def storage_array_port_stats_list_info(self): \"\"\" return stats for array ports \"\"\" return self.request(", "(28 chars) :param ownership_type: Option that allows the user to select which array", "'new_name': [ new_name, 'new-name', [ basestring, 'None' ], False ], }, { }", ":param wwpn: The WWPN of the array port to remove. :param wwnn: The", "WWNN of the array port to remove. :param array_id: Primary key (system defined)", "profile RDB to the D-Blade :param is_modify: A boolean value which indicates if", "name of array's target port (64 chars). :param array_name: The name of the", "name of the array's manufacturer. (8 chars) :param network_address: The I/O address of", "all target ports on this array. :param vendor: The name of the array's", "}, { } ) def storage_array_stats_list_info(self): \"\"\" Used to get dynamic information about", "\"\"\" return self.request( \"storage-array-update\", { 'vendor': [ vendor, 'vendor', [ basestring, 'None' ],", "unique user supplied 4 character code used to refer to this array and", "the array record. \"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify', [", "which array LUNs are displayed. Valid values for ownership-type are 'assigned', 'unassigned' and", "future. :param prefix: A unique user supplied 4 character code used to refer", "Used to indicate that the specified array will under go an upgrade in", "value which indicates if it's an rdb_modify operation. True - operation is rdb_modify", "values for ownership-type are 'assigned', 'unassigned' and 'all'. If ownership-type is set to", "basestring, 'None' ], False ], 'options': [ options, 'options', [ basestring, 'None' ],", "the changes made in array port table to the D-Blade :param wwpn: The", "changes made in array profile RDB to the D-Blade :param is_modify: A boolean", "'None' ], False ], }, { } ) def storage_array_rename(self, array_name, new_name): \"\"\"", "array ports and their associated arrays :param array_name: When supplied, only port records", "False ], }, { 'array-profile': [ StorageArrayProfile, False ], } ) def storage_array_get_config_summary(self,", "[ vendor, 'vendor', [ basestring, 'None' ], False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending',", "import StorageArrayStatsInfo # 6 properties from arrayfailovertype import Arrayfailovertype # 0 properties from", "array LUNs from this array. :param model: The model of the array. (16", "Arguments passed in will be used to update the profile. Arguments not passed", "[ basestring, 'None' ], False ], 'network_address': [ network_address, 'network-address', [ basestring, 'None'", "the array port whose attributes changed :param is_modify: A boolean value which indicates", "the named array. :param array_name: The name of the array profile to list", "lun_queue_depth: The queue depth assigned to array LUNs from this array. :param model:", "is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update an array profile with new or", "False ], }, { 'array-luns': [ DiskDetailInfo, True ], } ) def storage_array_list_info(self,", "I/O address of the array's SNMP management service in dotted-decimal format (for example,", "'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None' ], False", "# 0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info import", "naming the array's LUNs. :param lun_queue_depth: The queue depth assigned to array LUNs", "'None' ], False ], }, { } ) def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\"", "array LUNs are displayed. If ownership-type is set to 'all', all array LUNs", "int, 'None' ], False ], }, { } ) def storage_array_port_remove(self, wwpn, wwnn,", "target port (64 chars). :param wwnn: World wide node name of array's target", "basestring, 'None' ], False ], }, { 'array-luns': [ DiskDetailInfo, True ], }", "return self.request( \"storage-array-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False", "string, no spaces The name of the array profile to update. :param vendor:", "], } ) def storage_array_profile_sync(self): \"\"\" Purge a node's array profile database, thereby", "False ], 'array_id': [ array_id, 'array-id', [ int, 'None' ], False ], },", "an array profile record :param wwpn: The WWPN of the array port to", "properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None):", "used to update the profile. Arguments not passed will keep their existing values.", "from an array profile record :param wwpn: The WWPN of the array port", "The new name to assign to this array profile. (28 chars max) \"\"\"", "def storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list of all array profiles known to", ":param is_modify: A boolean value which indicates if it's an rdb_modify operation. True", "of the array port whose attributes changed :param is_modify: A boolean value which", "return stats for array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info':", "ownership_type, 'ownership-type', [ basestring, 'None' ], False ], }, { 'array-luns': [ DiskDetailInfo,", "], False ], 'new_name': [ new_name, 'new-name', [ basestring, 'None' ], False ],", "network_address: The I/O address of the array's SNMP management service in dotted-decimal format", "values. :param array_name: 28 character string, no spaces The name of the array", "{ } ) def storage_array_rename(self, array_name, new_name): \"\"\" Rename an array profile :param", "to update the profile. Arguments not passed will keep their existing values. :param", "from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info import StorageArrayErrorInfo # 3", "LUNs are displayed. If ownership-type is set to 'unassigned' only unassigned array LUNs", "{ }, { 'array-stat-info': [ StorageArrayStatsInfo, True ], } ) def storage_array_profile_sync(self): \"\"\"", "], }, { } ) def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove one", "{ 'array-luns': [ DiskDetailInfo, True ], } ) def storage_array_list_info(self, array_name=None): \"\"\" Retrieves", "6 properties from arrayfailovertype import Arrayfailovertype # 0 properties from arrayerrortype import Arrayerrortype", "], } ) def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a high level summary", "keep their existing values. :param array_name: 28 character string, no spaces The name", "\"\"\" Used to get dynamic information about backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\",", "to this array profile. :param model: The model number of the array. (16", "Update an array port with new or changed information. :param wwpn: World wide", "'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_id': [ array_id,", "False ], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_id':", "near future. :param prefix: A unique user supplied 4 character code used to", "to array LUNs from this array. :param model: The model of the array.", "arrays :param array_name: When supplied, only port records for the named array are", "'None' ], False ], 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False", "} ) def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the changes made in array", "code used to refer to this array. :param new_array_name: 28 character string, no", "], 'firmware': [ firmware, 'firmware', [ basestring, 'None' ], False ], 'prefix': [", "\"\"\" return self.request( \"storage-array-profile-sync\", { }, { } ) def storage_array_port_stats_list_info(self): \"\"\" return", "[ basestring, 'None' ], False ], 'new_array_name': [ new_array_name, 'new-array-name', [ basestring, 'None'", ") def storage_array_stats_list_info(self): \"\"\" Used to get dynamic information about backend arrays. \"\"\"", "StorageArrayPortStatsInfo # 19 properties from storage_array_port import StorageArrayPort # 9 properties from connectiontype", "storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info import StorageArrayErrorInfo # 3 properties", "defined) for the array record. \"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn,", "named array. :param array_name: The name of the array profile to list array", "array's SNMP management service in dotted-decimal format (for example, \"192.168.11.12\"). :param firmware: The", "the array record. \"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn': [ wwpn, 'wwpn', [", "[ basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name', [ basestring, 'None'", "node name of array's target port (64 chars). :param array_name: The name of", "}, { } ) def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the changes made", "], 'new_name': [ new_name, 'new-name', [ basestring, 'None' ], False ], }, {", "[ StorageArrayPortStatsInfo, True ], } ) def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None,", "being entered. (64 chars) :param prefix: A unique 5 character user defined code", "array LUN information for. (28 chars) :param ownership_type: Option that allows the user", "Arrayerrortype # 0 properties from lunownershipfiltertype import Lunownershipfiltertype # 0 properties from storage_array_profile", "def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove one port from an array profile", "of the array's SNMP management service in dotted-decimal format (for example, \"192.168.11.12\"). :param", "], False ], 'array_id': [ array_id, 'array-id', [ int, 'None' ], False ],", "summary of array LUN pathing (connectivity) information. :param node: Obtain array LUN pathing", "code used to refer to this array and used in naming the array's", "'wwnn', [ basestring, 'None' ], False ], 'array_id': [ array_id, 'array-id', [ int,", "name to assign to this array profile. :param model: The model number of", ":param network_address: The I/O address of the array's SNMP management service in dotted-decimal", "revision of the array being entered. (64 chars) :param prefix: A unique 5", "False ], }, { } ) def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\"", "], 'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ], 'is_upgrade_pending': [", "\"storage-array-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], },", "False ], 'options': [ options, 'options', [ basestring, 'None' ], False ], },", "firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update an array profile with new or", "of the array profile to update. :param vendor: The name of the array's", "basestring, 'None' ], False ], 'model': [ model, 'model', [ basestring, 'None' ],", "\"\"\" Purge a node's array profile database, thereby synchronizing it with the RDB.", "'ownership-type', [ basestring, 'None' ], False ], }, { 'config-summary': [ StorageArrayConfigSummary, True", "from arrayfailovertype import Arrayfailovertype # 0 properties from arrayerrortype import Arrayerrortype # 0", "wwnn: The WWNN of the array port whose attributes changed :param is_modify: A", "False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False ], 'new_array_name':", "supplied, only port records for the named array are returned. (28 chars) \"\"\"", "'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ], 'wwnn': [ wwnn,", "new_name, 'new-name', [ basestring, 'None' ], False ], }, { } ) def", "array port table to the D-Blade :param wwpn: The WWPN of the array", "def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update an", "import StorageArrayConfigSummary # 8 properties from storage_array_port_stats import StorageArrayPortStats # 13 properties from", "[ basestring, 'None' ], False ], }, { 'config-summary': [ StorageArrayConfigSummary, True ],", "(system defined) for the array record. \"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn': [", "\"storage-array-rename\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'new_name':", "int, 'None' ], False ], }, { } ) def storage_array_rename(self, array_name, new_name):", "rdb_create :param array_id: Primary key (system defined) for the array record. \"\"\" return", "{ 'array-profile': [ StorageArrayProfile, False ], } ) def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\"", "[ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'is_modify': [ is_modify, 'is-modify',", "target port. \"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn', [ basestring,", "(system defined) for the array record. \"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [", "} ) def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a high level summary of", "array being entered. (64 chars) :param prefix: A unique 5 character user defined", ":param new_name: The new name to assign to this array profile. (28 chars", "'is-modify', [ bool, 'None' ], False ], 'array_id': [ array_id, 'array-id', [ int,", "[ StorageArrayConfigSummary, True ], } ) def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list", "], 'model': [ model, 'model', [ basestring, 'None' ], False ], 'array_name': [", "a list of array LUNs associated with the named array. :param array_name: The", "'node': [ node, 'node', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type,", "[ bool, 'None' ], False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None'", "indicates if it's an rdb_modify operation. True - operation is rdb_modify False -", "from storage_array_stats_info import StorageArrayStatsInfo # 6 properties from arrayfailovertype import Arrayfailovertype # 0", "], False ], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ],", "to remove. :param array_id: Primary key (system defined) for the array record. \"\"\"", "[ vendor, 'vendor', [ basestring, 'None' ], False ], 'network_address': [ network_address, 'network-address',", "model: The model number of the array. (16 chars) :param options: Array profile", "vendor: The name of the array's manufacturer. (8 chars) :param network_address: The I/O", "is_upgrade_pending: Used to indicate that the specified array will under go an upgrade", "storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties from storage_array_port import StorageArrayPort # 9 properties", "'network-address', [ basestring, 'None' ], False ], 'firmware': [ firmware, 'firmware', [ basestring,", "'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_name': [ array_name,", "'max-queue-depth', [ int, 'None' ], False ], }, { } ) def storage_array_port_remove(self,", "Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring,", "profile with new or changed information. Arguments passed in will be used to", ") def storage_array_get_config_summary(self, node=None, ownership_type=None): \"\"\" Generates a high level summary of array", "}, { } ) def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\" Signal the", "The model number of the array. (16 chars) :param options: Array profile specific", "self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ],", "storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\" Signal the changes made in array port", "], 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'options': [", "to this array. :param new_array_name: 28 character string, no spaces The new name", "array profile record :param wwpn: The WWPN of the array port to remove.", "array. (16 chars) :param options: Array profile specific options. (comma separated list of", "of the array profile to update. (28 chars max) :param max_queue_depth: The target", "0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties from storage_array_error_info import StorageArrayErrorInfo", "storage_array_stats_list_info(self): \"\"\" Used to get dynamic information about backend arrays. \"\"\" return self.request(", "], False ], }, { 'config-summary': [ StorageArrayConfigSummary, True ], } ) def", "queue depth for this target port. \"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn': [", ") def storage_array_rename(self, array_name, new_name): \"\"\" Rename an array profile :param array_name: The", "bool, 'None' ], False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ],", "update. (28 chars max) :param max_queue_depth: The target port queue depth for this", "array LUNs associated with the named array. :param array_name: The name of the", "False ], 'new_name': [ new_name, 'new-name', [ basestring, 'None' ], False ], },", "on this array. :param vendor: The name of the array's manufacturer. (8 chars", "vendor: The name of the array's manufacturer. (8 chars max) :param is_upgrade_pending: Used", "will under go an upgrade in the near future. :param prefix: A unique", "information. :param wwpn: World wide port name of array's target port (64 chars).", "}, { } ) def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove one port", "max_queue_depth=None): \"\"\" Update an array port with new or changed information. :param wwpn:", "], False ], 'model': [ model, 'model', [ basestring, 'None' ], False ],", "be used to update the profile. Arguments not passed will keep their existing", ":param wwnn: World wide node name of array's target port (64 chars). :param", "return self.request( \"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False", "used in naming the array's LUNs. :param lun_queue_depth: The queue depth assigned to", "the near future. :param prefix: A unique user supplied 4 character code used", "wwnn, is_modify, array_id): \"\"\" Signal the changes made in array port table to", "model: The model of the array. (16 chars max) :param options: Array profile", "array_id: Primary key (system defined) for the array record. \"\"\" return self.request( \"storage-array-port-change-notification\",", "[ int, 'None' ], False ], 'vendor': [ vendor, 'vendor', [ basestring, 'None'", "[ int, 'None' ], False ], }, { } ) def storage_array_port_remove(self, wwpn,", "table to the D-Blade :param wwpn: The WWPN of the array port whose", "'is-upgrade-pending', [ bool, 'None' ], False ], 'prefix': [ prefix, 'prefix', [ basestring,", "(8 chars) :param network_address: The I/O address of the array's SNMP management service", "are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", { 'node': [ node, 'node',", "information for. (28 chars) :param ownership_type: Option that allows the user to select", "[ node, 'node', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type',", "} ) def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update an array port", "'None' ], False ], 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False", ":param options: Array profile specific options. (comma separated list of name/value pairs) (127", "string, no spaces The new name to assign to this array profile. :param", "[ wwpn, 'wwpn', [ basestring, 'None' ], False ], 'wwnn': [ wwnn, 'wwnn',", "[ basestring, 'None' ], False ], 'firmware': [ firmware, 'firmware', [ basestring, 'None'", "\"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ],", "properties from storage_array_profile import StorageArrayProfile # 16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo #", "key (system defined) for the array record. \"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn':", "for the array record. \"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify',", "boolean value which indicates if it's an rdb_modify operation. True - operation is", ") def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a list of array LUNs associated", "StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None,", "return self.request( \"storage-array-get-config-summary\", { 'node': [ node, 'node', [ basestring, 'None' ], False", "array_name, 'array-name', [ basestring, 'None' ], False ], }, { 'array-profiles': [ StorageArrayProfile,", "array's manufacturer. (8 chars max) :param is_upgrade_pending: Used to indicate that the specified", "'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ], 'is_upgrade_pending': [ is_upgrade_pending,", "a node's array profile database, thereby synchronizing it with the RDB. \"\"\" return", "False ], 'is_upgrade_pending': [ is_upgrade_pending, 'is-upgrade-pending', [ bool, 'None' ], False ], 'prefix':", "example, \"192.168.11.12\"). :param firmware: The firmware revision of the array being entered. (64", "high level summary of array LUN pathing (connectivity) information. :param node: Obtain array", "], False ], }, { } ) def storage_array_stats_list_info(self): \"\"\" Used to get", "of the array's manufacturer. (8 chars) :param network_address: The I/O address of the", "[ model, 'model', [ basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name',", "import Connectiontype # 0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3 properties from", "displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", { 'node': [ node, 'node', [", "wwnn: World wide node name of array's target port (64 chars). :param array_name:", "stats for array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [", "'prefix': [ prefix, 'prefix', [ basestring, 'None' ], False ], 'lun_queue_depth': [ lun_queue_depth,", "def storage_array_port_modify(self, wwpn, wwnn, array_name, max_queue_depth=None): \"\"\" Update an array port with new", "[ new_array_name, 'new-array-name', [ basestring, 'None' ], False ], 'model': [ model, 'model',", "'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], 'vendor': [ vendor,", "'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", { 'node': [ node, 'node', [ basestring, 'None'", "array's target port (64 chars). :param array_name: The name of the array profile", "with the named array. :param array_name: The name of the array profile to", "[ int, 'None' ], False ], 'model': [ model, 'model', [ basestring, 'None'", "self.request( \"storage-array-rename\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ],", "def storage_array_profile_sync(self): \"\"\" Purge a node's array profile database, thereby synchronizing it with", "[ int, 'None' ], False ], }, { } ) def storage_array_stats_list_info(self): \"\"\"", "pairs) (128 chars) \"\"\" return self.request( \"storage-array-update\", { 'vendor': [ vendor, 'vendor', [", "False ], 'network_address': [ network_address, 'network-address', [ basestring, 'None' ], False ], 'firmware':", "a list of all array profiles known to the controller. :param array_name: When", "LUN pathing (connectivity) information. :param node: Obtain array LUN pathing information for a", "], 'options': [ options, 'options', [ basestring, 'None' ], False ], }, {", "'array-id', [ int, 'None' ], False ], }, { } ) def storage_array_rename(self,", "array record. \"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn', [ basestring,", "[ basestring, 'None' ], False ], 'is_modify': [ is_modify, 'is-modify', [ bool, 'None'", "9 properties from connectiontype import Connectiontype # 0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo", "made in array port table to the D-Blade :param wwpn: The WWPN of", "returned. (28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name', [", "[ basestring, 'None' ], False ], 'model': [ model, 'model', [ basestring, 'None'", "wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_id': [ array_id, 'array-id', [", "array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [ StorageArrayPortStatsInfo, True", "format (for example, \"192.168.11.12\"). :param firmware: The firmware revision of the array being", "self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ],", "array. :param array_name: The name of the array profile to list array LUN", "The name of the array profile to update. (28 chars max) :param new_name:", "self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ],", "this array profile. (28 chars max) \"\"\" return self.request( \"storage-array-rename\", { 'array_name': [", ":param new_array_name: 28 character string, no spaces The new name to assign to", "], 'new_array_name': [ new_array_name, 'new-array-name', [ basestring, 'None' ], False ], 'model': [", "storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list of online array ports and their associated", "storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None): \"\"\" Update an array", "all array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name':", ":param lun_queue_depth: The queue depth assigned to array LUNs from this array. :param", "in the near future. :param prefix: A unique user supplied 4 character code", "'prefix', [ basestring, 'None' ], False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int,", "array profile database, thereby synchronizing it with the RDB. \"\"\" return self.request( \"storage-array-profile-sync\",", "'model': [ model, 'model', [ basestring, 'None' ], False ], 'array_name': [ array_name,", "basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None' ],", "information about backend arrays. \"\"\" return self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info': [", "attributes changed :param wwnn: The WWNN of the array port whose attributes changed", "[ new_name, 'new-name', [ basestring, 'None' ], False ], }, { } )", "max) \"\"\" return self.request( \"storage-array-rename\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None'", "is set to 'unassigned' only unassigned array LUNs are displayed. If ownership-type is", "'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'new_name': [ new_name,", "array are returned. (28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name,", "array profile to update. (28 chars max) :param max_queue_depth: The target port queue", "], } ) def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a list of array", "the array profile to update. (28 chars max) :param max_queue_depth: The target port", "to this array profile. (28 chars max) \"\"\" return self.request( \"storage-array-rename\", { 'array_name':", "\"storage-array-get-config-summary\", { 'node': [ node, 'node', [ basestring, 'None' ], False ], 'ownership_type':", "profile to list array LUN information for. (28 chars) :param ownership_type: Option that", "record. \"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify', [ bool, 'None'", "the array's SNMP management service in dotted-decimal format (for example, \"192.168.11.12\"). :param firmware:", ":param array_id: Primary key (system defined) for the array record. \"\"\" return self.request(", "], False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ],", "def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the changes made in array profile RDB", "new_name): \"\"\" Rename an array profile :param array_name: The name of the array", "return self.request( \"storage-array-rename\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False", "], 'array_id': [ array_id, 'array-id', [ int, 'None' ], False ], }, {", "], }, { 'array-luns': [ DiskDetailInfo, True ], } ) def storage_array_list_info(self, array_name=None):", "storage_array_config_summary import StorageArrayConfigSummary # 8 properties from storage_array_port_stats import StorageArrayPortStats # 13 properties", "model, 'model', [ basestring, 'None' ], False ], 'array_name': [ array_name, 'array-name', [", "Valid values for ownership-type are 'assigned', 'unassigned' and 'all'. If ownership-type is set", "'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'max_queue_depth': [ max_queue_depth,", "'array-name', [ basestring, 'None' ], False ], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring,", "spaces The name of the array profile to update. :param vendor: The name", "and used in naming the array's LUNs. :param lun_queue_depth: The queue depth assigned", "displayed. If ownership-type is set to 'all', all array LUNs are displayed. Default:", "pairs) (127 chars max) \"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth',", "the array port whose attributes changed :param wwnn: The WWNN of the array", "this array profile. :param model: The model number of the array. (16 chars)", "known to the controller. :param array_name: When specified, only the named array profile", "rdb_modify False - operation is rdb_create :param array_id: Primary key (system defined) for", "chars max) \"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int,", "refer to this array. :param new_array_name: 28 character string, no spaces The new", "the array port to remove. :param wwnn: The WWNN of the array port", "array record. \"\"\" return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify', [ bool,", "\"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], },", "port queue depth for all target ports on this array. :param vendor: The", "28 character string, no spaces The new name to assign to this array", "RDB to the D-Blade :param is_modify: A boolean value which indicates if it's", "options=None): \"\"\" Update an array profile with new or changed information. :param array_name:", "from storage_array_port_stats import StorageArrayPortStats # 13 properties from storage_array_stats_info import StorageArrayStatsInfo # 6", "# 3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None,", "'None' ], False ], }, { 'array-ports': [ StorageArrayPort, True ], } )", "management service in dotted-decimal format (for example, \"192.168.11.12\"). :param firmware: The firmware revision", "user supplied 4 character code used to refer to this array and used", "properties from storage_array_error_info import StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name,", "False ], 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False ], 'array_id':", "vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update an array profile with new", "this array. :param new_array_name: 28 character string, no spaces The new name to", "list of online array ports and their associated arrays :param array_name: When supplied,", ":param node: Obtain array LUN pathing information for a specified node. :param ownership_type:", "], False ], }, { 'array-profile': [ StorageArrayProfile, False ], } ) def", "}, { 'array-ports': [ StorageArrayPort, True ], } ) def storage_array_luns_list_info(self, array_name, ownership_type=None):", "False ], }, { } ) def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the", "of array's target port (64 chars). :param array_name: The name of the array", "# 13 properties from storage_array_stats_info import StorageArrayStatsInfo # 6 properties from arrayfailovertype import", "chars max) \"\"\" return self.request( \"storage-array-rename\", { 'array_name': [ array_name, 'array-name', [ basestring,", "their existing values. :param array_name: 28 character string, no spaces The name of", "True ], } ) def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list of online", "def storage_array_port_stats_list_info(self): \"\"\" return stats for array ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", {", "0 properties from arrayerrortype import Arrayerrortype # 0 properties from lunownershipfiltertype import Lunownershipfiltertype", "\"\"\" Update an array profile with new or changed information. Arguments passed in", "specified, only the named array profile record will be returned. (28 chars) \"\"\"", "remove. :param wwnn: The WWNN of the array port to remove. :param array_id:", "Update an array profile with new or changed information. :param array_name: The name", "5 character user defined code used to refer to this array. :param new_array_name:", "assigned array LUNs are displayed. If ownership-type is set to 'unassigned' only unassigned", "to the D-Blade :param is_modify: A boolean value which indicates if it's an", "name of the array profile to update. (28 chars max) :param new_name: The", "max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], 'vendor': [ vendor, 'vendor', [", "'vendor', [ basestring, 'None' ], False ], 'network_address': [ network_address, 'network-address', [ basestring,", "basestring, 'None' ], False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int, 'None' ],", "[ StorageArrayPort, True ], } ) def storage_array_luns_list_info(self, array_name, ownership_type=None): \"\"\" Generate a", "[ bool, 'None' ], False ], 'array_id': [ array_id, 'array-id', [ int, 'None'", "ownership-type is set to 'unassigned' only unassigned array LUNs are displayed. If ownership-type", "basestring, 'None' ], False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None' ],", "array_id): \"\"\" Signal the changes made in array port table to the D-Blade", "manufacturer. (8 chars max) :param is_upgrade_pending: Used to indicate that the specified array", "\"\"\" return self.request( \"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ],", ":param prefix: A unique 5 character user defined code used to refer to", "of the array port to remove. :param array_id: Primary key (system defined) for", "'array-name', [ basestring, 'None' ], False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int,", "(comma separated list of name/value pairs) (127 chars max) \"\"\" return self.request( \"storage-array-modify\",", "], }, { 'config-summary': [ StorageArrayConfigSummary, True ], } ) def storage_array_ports_list_info(self, array_name=None):", "If ownership-type is set to 'unassigned' only unassigned array LUNs are displayed. If", "the D-Blade :param wwpn: The WWPN of the array port whose attributes changed", "indicate that the specified array will under go an upgrade in the near", "chars) :param network_address: The I/O address of the array's SNMP management service in", "array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-luns-list-info\", { 'array_name': [", "model=None, options=None): \"\"\" Update an array profile with new or changed information. :param", "(64 chars). :param wwnn: World wide node name of array's target port (64", "[ int, 'None' ], False ], }, { } ) def storage_array_port_change_notification(self, wwpn,", "[ basestring, 'None' ], False ], 'prefix': [ prefix, 'prefix', [ basestring, 'None'", "are returned. (28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\", { 'array_name': [ array_name, 'array-name',", "of the array profile to update. (28 chars max) :param new_name: The new", "# 19 properties from storage_array_port import StorageArrayPort # 9 properties from connectiontype import", ") def storage_array_profile_change_notification(self, is_modify, array_id): \"\"\" Signal the changes made in array profile", "new name to assign to this array profile. :param model: The model number", "from connectiontype import Connectiontype # 0 properties from storage_array_stats_error_info import StorageArrayStatsErrorInfo # 3", "\"\"\" return self.request( \"storage-array-stats-list-info\", { }, { 'array-stat-info': [ StorageArrayStatsInfo, True ], }", "'firmware': [ firmware, 'firmware', [ basestring, 'None' ], False ], 'prefix': [ prefix,", "chars) \"\"\" return self.request( \"storage-array-update\", { 'vendor': [ vendor, 'vendor', [ basestring, 'None'", "is_modify, 'is-modify', [ bool, 'None' ], False ], 'array_id': [ array_id, 'array-id', [", "controller. :param array_name: When specified, only the named array profile record will be", "\"192.168.11.12\"). :param firmware: The firmware revision of the array being entered. (64 chars)", "Primary key (system defined) for the array record. \"\"\" return self.request( \"storage-array-profile-change-notification\", {", "arrayfailovertype import Arrayfailovertype # 0 properties from arrayerrortype import Arrayerrortype # 0 properties", "{ 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False ], 'array_id': [", "storage_array_profile_sync(self): \"\"\" Purge a node's array profile database, thereby synchronizing it with the", "to assign to this array profile. :param model: The model number of the", "profile specific options. (comma separated list of name/value pairs) (128 chars) \"\"\" return", "to refer to this array and used in naming the array's LUNs. :param", "import Lunownershipfiltertype # 0 properties from storage_array_profile import StorageArrayProfile # 16 properties from", "in naming the array's LUNs. :param lun_queue_depth: The queue depth assigned to array", "pathing (connectivity) information. :param node: Obtain array LUN pathing information for a specified", "[ lun_queue_depth, 'lun-queue-depth', [ int, 'None' ], False ], 'model': [ model, 'model',", "[ ownership_type, 'ownership-type', [ basestring, 'None' ], False ], }, { 'config-summary': [", "storage_array_profile import StorageArrayProfile # 16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties", "storage_array_list_info(self, array_name=None): \"\"\" Retrieves a list of all array profiles known to the", "Rename an array profile :param array_name: The name of the array profile to", "the changes made in array profile RDB to the D-Blade :param is_modify: A", "'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None' ], False ], }, { 'config-summary':", "ports \"\"\" return self.request( \"storage-array-port-stats-list-info\", { }, { 'port-stat-info': [ StorageArrayPortStatsInfo, True ],", "passed in will be used to update the profile. Arguments not passed will", "import Arrayerrortype # 0 properties from lunownershipfiltertype import Lunownershipfiltertype # 0 properties from", "self.request( \"storage-array-profile-sync\", { }, { } ) def storage_array_port_stats_list_info(self): \"\"\" return stats for", "array profile to update. :param vendor: The name of the array's manufacturer. (8", "the array's manufacturer. (8 chars max) :param is_upgrade_pending: Used to indicate that the", "with new or changed information. :param wwpn: World wide port name of array's", "profile. Arguments not passed will keep their existing values. :param array_name: 28 character", "specific options. (comma separated list of name/value pairs) (127 chars max) \"\"\" return", "True ], } ) def storage_array_profile_sync(self): \"\"\" Purge a node's array profile database,", "profiles known to the controller. :param array_name: When specified, only the named array", "} ) def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\" Signal the changes made", "The name of the array's manufacturer. (8 chars max) :param is_upgrade_pending: Used to", "an array profile with new or changed information. :param array_name: The name of", "profile to update. :param vendor: The name of the array's manufacturer. (8 chars)", "(16 chars) :param options: Array profile specific options. (comma separated list of name/value", "The name of the array's manufacturer. (8 chars) :param network_address: The I/O address", "], } ) def storage_array_update(self, array_name, vendor=None, network_address=None, firmware=None, prefix=None, new_array_name=None, model=None, options=None):", "return self.request( \"storage-array-profile-change-notification\", { 'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False", "array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", { 'node': [", "node=None, ownership_type=None): \"\"\" Generates a high level summary of array LUN pathing (connectivity)", "node's array profile database, thereby synchronizing it with the RDB. \"\"\" return self.request(", "update. (28 chars max) :param new_name: The new name to assign to this", "to select which array LUNs are displayed. Valid values for ownership-type are 'assigned',", "the array port to remove. :param array_id: Primary key (system defined) for the", "'vendor': [ vendor, 'vendor', [ basestring, 'None' ], False ], 'network_address': [ network_address,", "of name/value pairs) (128 chars) \"\"\" return self.request( \"storage-array-update\", { 'vendor': [ vendor,", "options: Array profile specific options. (comma separated list of name/value pairs) (127 chars", "StorageArrayConfigSummary, True ], } ) def storage_array_ports_list_info(self, array_name=None): \"\"\" generate a list of", "LUNs from this array. :param model: The model of the array. (16 chars", "bool, 'None' ], False ], 'array_id': [ array_id, 'array-id', [ int, 'None' ],", ":param wwnn: The WWNN of the array port whose attributes changed :param is_modify:", "{ } ) def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\" Signal the changes", "wwpn: The WWPN of the array port to remove. :param wwnn: The WWNN", "chars max) :param is_upgrade_pending: Used to indicate that the specified array will under", "D-Blade :param wwpn: The WWPN of the array port whose attributes changed :param", "an rdb_modify operation. True - operation is rdb_modify False - operation is rdb_create", "only the named array profile record will be returned. (28 chars) \"\"\" return", "queue depth for all target ports on this array. :param vendor: The name", "profile record will be returned. (28 chars) \"\"\" return self.request( \"storage-array-list-info\", { 'array_name':", "\"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth, 'max-queue-depth', [ int, 'None' ],", "all array LUNs are displayed. Default: 'all'. \"\"\" return self.request( \"storage-array-get-config-summary\", { 'node':", "remove. :param array_id: Primary key (system defined) for the array record. \"\"\" return", "array profile. (28 chars max) \"\"\" return self.request( \"storage-array-rename\", { 'array_name': [ array_name,", "array_name: 28 character string, no spaces The name of the array profile to", "Purge a node's array profile database, thereby synchronizing it with the RDB. \"\"\"", "wwnn, array_id): \"\"\" Remove one port from an array profile record :param wwpn:", "records for the named array are returned. (28 chars) \"\"\" return self.request( \"storage-array-ports-list-info\",", "it's an rdb_modify operation. True - operation is rdb_modify False - operation is", "False ], }, { } ) def storage_array_port_remove(self, wwpn, wwnn, array_id): \"\"\" Remove", "RDB. \"\"\" return self.request( \"storage-array-profile-sync\", { }, { } ) def storage_array_port_stats_list_info(self): \"\"\"", "new_name: The new name to assign to this array profile. (28 chars max)", "key (system defined) for the array record. \"\"\" return self.request( \"storage-array-port-change-notification\", { 'wwpn':", "import StorageArrayPortStatsInfo # 19 properties from storage_array_port import StorageArrayPort # 9 properties from", "\"storage-array-port-modify\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ], 'wwnn':", "# 9 properties from connectiontype import Connectiontype # 0 properties from storage_array_stats_error_info import", "'unassigned' only unassigned array LUNs are displayed. If ownership-type is set to 'all',", "array LUNs are displayed. Valid values for ownership-type are 'assigned', 'unassigned' and 'all'.", "{ }, { 'port-stat-info': [ StorageArrayPortStatsInfo, True ], } ) def storage_array_update(self, array_name,", "(64 chars) :param prefix: A unique 5 character user defined code used to", "19 properties from storage_array_port import StorageArrayPort # 9 properties from connectiontype import Connectiontype", "[ max_queue_depth, 'max-queue-depth', [ int, 'None' ], False ], 'vendor': [ vendor, 'vendor',", "array_id: Primary key (system defined) for the array record. \"\"\" return self.request( \"storage-array-profile-change-notification\",", "max_queue_depth=None, vendor=None, is_upgrade_pending=None, prefix=None, lun_queue_depth=None, model=None, options=None): \"\"\" Update an array profile with", "whose attributes changed :param wwnn: The WWNN of the array port whose attributes", "A unique user supplied 4 character code used to refer to this array", "return self.request( \"storage-array-profile-sync\", { }, { } ) def storage_array_port_stats_list_info(self): \"\"\" return stats", "with new or changed information. Arguments passed in will be used to update", "name to assign to this array profile. (28 chars max) \"\"\" return self.request(", "False ], 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'options':", "'is_modify': [ is_modify, 'is-modify', [ bool, 'None' ], False ], 'array_id': [ array_id,", "all array profiles known to the controller. :param array_name: When specified, only the", "False ], 'ownership_type': [ ownership_type, 'ownership-type', [ basestring, 'None' ], False ], },", "to update. :param vendor: The name of the array's manufacturer. (8 chars) :param", "profile to update. (28 chars max) :param new_name: The new name to assign", "} ) def storage_array_stats_list_info(self): \"\"\" Used to get dynamic information about backend arrays.", "of the array's manufacturer. (8 chars max) :param is_upgrade_pending: Used to indicate that", "[ wwnn, 'wwnn', [ basestring, 'None' ], False ], 'array_id': [ array_id, 'array-id',", "under go an upgrade in the near future. :param prefix: A unique user", "array_name, 'array-name', [ basestring, 'None' ], False ], 'options': [ options, 'options', [", "Retrieves a list of all array profiles known to the controller. :param array_name:", "[ array_name, 'array-name', [ basestring, 'None' ], False ], 'max_queue_depth': [ max_queue_depth, 'max-queue-depth',", "\"storage-array-luns-list-info\", { 'array_name': [ array_name, 'array-name', [ basestring, 'None' ], False ], 'ownership_type':", "record will be returned. (28 chars) \"\"\" return self.request( \"storage-array-list-info\", { 'array_name': [", "import StorageArrayErrorInfo # 3 properties class StorageArrayConnection(NaConnection): def storage_array_modify(self, array_name, max_queue_depth=None, vendor=None, is_upgrade_pending=None,", "array port to remove. :param array_id: Primary key (system defined) for the array", "chars) :param options: Array profile specific options. (comma separated list of name/value pairs)", "are displayed. If ownership-type is set to 'all', all array LUNs are displayed.", "properties from lunownershipfiltertype import Lunownershipfiltertype # 0 properties from storage_array_profile import StorageArrayProfile #", "go an upgrade in the near future. :param prefix: A unique user supplied", "basestring, 'None' ], False ], 'wwnn': [ wwnn, 'wwnn', [ basestring, 'None' ],", "{ }, { } ) def storage_array_port_stats_list_info(self): \"\"\" return stats for array ports", "], }, { } ) def storage_array_port_change_notification(self, wwpn, wwnn, is_modify, array_id): \"\"\" Signal", "'all'. If ownership-type is set to 'assigned' only assigned array LUNs are displayed.", "}, { } ) def storage_array_rename(self, array_name, new_name): \"\"\" Rename an array profile", "name/value pairs) (127 chars max) \"\"\" return self.request( \"storage-array-modify\", { 'max_queue_depth': [ max_queue_depth,", "the array record. \"\"\" return self.request( \"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn', [", "Obtain array LUN pathing information for a specified node. :param ownership_type: Option that", "], }, { } ) def storage_array_stats_list_info(self): \"\"\" Used to get dynamic information", "[ array_id, 'array-id', [ int, 'None' ], False ], }, { } )", "of the array profile to list array LUN information for. (28 chars) :param", "(64 chars). :param array_name: The name of the array profile to update. (28", "0 properties from storage_array_profile import StorageArrayProfile # 16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo", "\"storage-array-port-remove\", { 'wwpn': [ wwpn, 'wwpn', [ basestring, 'None' ], False ], 'wwnn':", "or changed information. :param array_name: The name of the array profile to update.", "new_array_name=None, model=None, options=None): \"\"\" Update an array profile with new or changed information.", "False ], 'lun_queue_depth': [ lun_queue_depth, 'lun-queue-depth', [ int, 'None' ], False ], 'model':", "False - operation is rdb_create :param array_id: Primary key (system defined) for the", "which indicates if it's an rdb_modify operation. True - operation is rdb_modify False", "# 16 properties from storage_array_port_stats_info import StorageArrayPortStatsInfo # 19 properties from storage_array_port import" ]
[ "print('Test Score: %.2f RMSE' % (testScore)) # shift train predictions for plotting trainPredictPlot", "= epochs, verbose = 1, shuffle = True) #make predictions trainPredict = model.predict(trainX,", "from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error from keras.models import Sequential import", "= dataset[train_size:, :] #convert an array values into a dataset matrix for LSTM", "# plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most important", "dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY = np.array(dataY) return dataX, dataY", "- train_size #print(test_size) train_set = dataset[0:train_size, :] test_set = dataset[train_size:, :] #convert an", "MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset) #split the dataset into training and test", "RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE' % (testScore))", "\"\"\" Created on Tue Oct 6 16:23:04 2020 @author: Admin \"\"\" import numpy", "import pandas as pd import math import os from keras.layers import Dense from", "Score: %.2f RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE'", "time steps trainX, trainY = create_dataset(train_set, look_back) testX, testY = create_dataset(test_set, look_back) #reshape", "= 1 epochs = 100 lr = 0.001 optimizer = Adam(lr = lr,", "# -*- coding: utf-8 -*- from __future__ import print_function \"\"\" Created on Tue", "import matplotlib.pyplot as plt #load data filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename)", "6 16:23:04 2020 @author: Admin \"\"\" import numpy as np import pandas as", "create_dataset(dataset, look_back): dataX = [] dataY = [] for i in range(len(dataset) -", "os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols = [1], engine = 'python') dataset =", "trainX, trainY = create_dataset(train_set, look_back) testX, testY = create_dataset(test_set, look_back) #reshape input to", "LSTM input_shape = (time_steps, features) lstm_neurons = 4 #lstm_neurons is a hyper-parameter dense_neurons", "keras.optimizers import Adam from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error from keras.models", "trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0],", "test predictions for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :]", "as pd import math import os from keras.layers import Dense from keras.layers import", "predictions for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] =", "= Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation = 'linear'))", "dataY = np.array(dataY) return dataX, dataY look_back = 1 #look_back = time_steps: the", "dataY look_back = 1 #look_back = time_steps: the number of previous time steps", "= lr, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-8, decay =", "Sequential import matplotlib.pyplot as plt #load data filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(),", "True) #make predictions trainPredict = model.predict(trainX, batch_size = batch_size) testPredict = model.predict(testX, batch_size", "dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY =", "\"\"\" import numpy as np import pandas as pd import math import os", "dataframe.values #convert dataframe to numpy array dataset = dataset.astype('float32') #the shape of dataset:", "is look_back and batch_size researchers should try few times to determine the best", "look_back features = dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX = np.reshape(testX,", "np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift test predictions for plotting testPredictPlot =", "[1], engine = 'python') dataset = dataframe.values #convert dataframe to numpy array dataset", "os from keras.layers import Dense from keras.layers import LSTM from keras.optimizers import Adam", "= Adam(lr = lr, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-8,", "LSTM def create_dataset(dataset, look_back): dataX = [] dataY = [] for i in", "testY = create_dataset(test_set, look_back) #reshape input to be [samples, time_steps, features] time_steps =", "print('Train Score: %.2f RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f", "math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test", "and fit the LSTM input_shape = (time_steps, features) lstm_neurons = 4 #lstm_neurons is", "= scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate", "= model.predict(testX, batch_size = batch_size) # invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY =", "for i in range(len(dataset) - look_back - 1): a = dataset[i:(i+look_back), 0] b", "from keras.layers import Dense from keras.layers import LSTM from keras.optimizers import Adam from", "keras.layers import Dense from keras.layers import LSTM from keras.optimizers import Adam from sklearn.preprocessing", "testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show()", "matplotlib.pyplot as plt #load data filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe", "import math import os from keras.layers import Dense from keras.layers import LSTM from", "predictions for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] =", "%.2f RMSE' % (testScore)) # shift train predictions for plotting trainPredictPlot = np.empty_like(dataset)", "time_steps, features)) testX = np.reshape(testX, (testX.shape[0], time_steps, features)) #create and fit the LSTM", "batch_size) # invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict)", "dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX = np.reshape(testX, (testX.shape[0], time_steps, features))", "#the shape of dataset: num_samples, features #normalise the dataset feature_range = (0, 1)", "a dataset matrix for LSTM def create_dataset(dataset, look_back): dataX = [] dataY =", "testPredict # plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most", "from keras.optimizers import Adam from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error from", "= 1, shuffle = True) #make predictions trainPredict = model.predict(trainX, batch_size = batch_size)", "dataframe = pd.read_csv(filepath, usecols = [1], engine = 'python') dataset = dataframe.values #convert", "= input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss = 'mean_squared_error', optimizer", "Adam from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error from keras.models import Sequential", "* i_split) #print(train_size) test_size = len(dataset) - train_size #print(test_size) train_set = dataset[0:train_size, :]", "values into a dataset matrix for LSTM def create_dataset(dataset, look_back): dataX = []", "shuffle = True) #make predictions trainPredict = model.predict(trainX, batch_size = batch_size) testPredict =", "of trainY(= 1) batch_size = 1 epochs = 100 lr = 0.001 optimizer", "= 1 #dense_neurions is equal to the shape of trainY(= 1) batch_size =", "False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss = 'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY,", "plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most important hyper-parameter is look_back and batch_size", "set i_split = 0.8 train_size = int(len(dataset) * i_split) #print(train_size) test_size = len(dataset)", "trainPredict = model.predict(trainX, batch_size = batch_size) testPredict = model.predict(testX, batch_size = batch_size) #", "trainPredict[:,0])) print('Train Score: %.2f RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score:", "an array values into a dataset matrix for LSTM def create_dataset(dataset, look_back): dataX", "np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot baseline and", "[] for i in range(len(dataset) - look_back - 1): a = dataset[i:(i+look_back), 0]", "0.0, amsgrad = True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences =", "def create_dataset(dataset, look_back): dataX = [] dataY = [] for i in range(len(dataset)", "input_shape = (time_steps, features) lstm_neurons = 4 #lstm_neurons is a hyper-parameter dense_neurons =", "return dataX, dataY look_back = 1 #look_back = time_steps: the number of previous", "model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss = 'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY, batch_size", "dataX = np.array(dataX) dataY = np.array(dataY) return dataX, dataY look_back = 1 #look_back", ":] = testPredict # plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() '''", "root mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE' %", "look_back) #reshape input to be [samples, time_steps, features] time_steps = look_back features =", "#lstm_neurons is a hyper-parameter dense_neurons = 1 #dense_neurions is equal to the shape", "data filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols =", "shift test predictions for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1,", "as plt #load data filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe =", "is a hyper-parameter dense_neurons = 1 #dense_neurions is equal to the shape of", "#reshape input to be [samples, time_steps, features] time_steps = look_back features = dataset.shape[1]", "(time_steps, features) lstm_neurons = 4 #lstm_neurons is a hyper-parameter dense_neurons = 1 #dense_neurions", "model.fit(trainX, trainY, batch_size = batch_size, epochs = epochs, verbose = 1, shuffle =", "= os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols = [1], engine = 'python') dataset", "np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot)", "= testPredict # plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the", "= scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate root mean squared", "previous time steps trainX, trainY = create_dataset(train_set, look_back) testX, testY = create_dataset(test_set, look_back)", "i in range(len(dataset) - look_back - 1): a = dataset[i:(i+look_back), 0] b =", "time_steps, features] time_steps = look_back features = dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps,", "create_dataset(test_set, look_back) #reshape input to be [samples, time_steps, features] time_steps = look_back features", "a hyper-parameter dense_neurons = 1 #dense_neurions is equal to the shape of trainY(=", "= [] for i in range(len(dataset) - look_back - 1): a = dataset[i:(i+look_back),", "predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most important hyper-parameter is look_back and", "= dataset[0:train_size, :] test_set = dataset[train_size:, :] #convert an array values into a", "return_sequences = False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss = 'mean_squared_error', optimizer = optimizer)", "__future__ import print_function \"\"\" Created on Tue Oct 6 16:23:04 2020 @author: Admin", "= scaler.inverse_transform([testY]) # calculate root mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train", "#look_back = time_steps: the number of previous time steps trainX, trainY = create_dataset(train_set,", "time_steps = look_back features = dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX", "'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols = [1], engine =", "scaler = MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset) #split the dataset into training", "-*- from __future__ import print_function \"\"\" Created on Tue Oct 6 16:23:04 2020", "lr, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-8, decay = 0.0,", "plt.show() ''' the most important hyper-parameter is look_back and batch_size researchers should try", "(trainX.shape[0], time_steps, features)) testX = np.reshape(testX, (testX.shape[0], time_steps, features)) #create and fit the", "testY = scaler.inverse_transform([testY]) # calculate root mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0]))", "Adam(lr = lr, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-8, decay", "% (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE' % (testScore)) #", ":] test_set = dataset[train_size:, :] #convert an array values into a dataset matrix", "= batch_size) testPredict = model.predict(testX, batch_size = batch_size) # invert predictions trainPredict =", "for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict", "1, shuffle = True) #make predictions trainPredict = model.predict(trainX, batch_size = batch_size) testPredict", "- 1): a = dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX", "= time_steps: the number of previous time steps trainX, trainY = create_dataset(train_set, look_back)", "from sklearn.metrics import mean_squared_error from keras.models import Sequential import matplotlib.pyplot as plt #load", "activation = 'linear')) model.compile(loss = 'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY, batch_size =", "model.predict(testX, batch_size = batch_size) # invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY])", "input to be [samples, time_steps, features] time_steps = look_back features = dataset.shape[1] trainX", "True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation", "= math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0]))", "dataframe to numpy array dataset = dataset.astype('float32') #the shape of dataset: num_samples, features", "trainY, batch_size = batch_size, epochs = epochs, verbose = 1, shuffle = True)", "batch_size = batch_size, epochs = epochs, verbose = 1, shuffle = True) #make", "= 0.001 optimizer = Adam(lr = lr, beta_1 = 0.9, beta_2 = 0.999,", "the dataset into training and test set i_split = 0.8 train_size = int(len(dataset)", "import os from keras.layers import Dense from keras.layers import LSTM from keras.optimizers import", "np.array(dataY) return dataX, dataY look_back = 1 #look_back = time_steps: the number of", "Score: %.2f RMSE' % (testScore)) # shift train predictions for plotting trainPredictPlot =", "input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss = 'mean_squared_error', optimizer =", "invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY =", "# invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY", "0] b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY = np.array(dataY)", "testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate root mean squared error trainScore", "shift train predictions for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back,", "RMSE' % (testScore)) # shift train predictions for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:,", "import LSTM from keras.optimizers import Adam from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import", "number of previous time steps trainX, trainY = create_dataset(train_set, look_back) testX, testY =", "testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot baseline and predictions", ":] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot baseline and predictions plt.plot(scaler.inverse_transform(dataset))", "#convert an array values into a dataset matrix for LSTM def create_dataset(dataset, look_back):", "trainY = create_dataset(train_set, look_back) testX, testY = create_dataset(test_set, look_back) #reshape input to be", "beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-8, decay = 0.0, amsgrad", "= 1e-8, decay = 0.0, amsgrad = True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape", "epsilon = 1e-8, decay = 0.0, amsgrad = True) model = Sequential() model.add(LSTM(lstm_neurons,", "0.9, beta_2 = 0.999, epsilon = 1e-8, decay = 0.0, amsgrad = True)", "scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate root", "and test set i_split = 0.8 train_size = int(len(dataset) * i_split) #print(train_size) test_size", "batch_size) testPredict = model.predict(testX, batch_size = batch_size) # invert predictions trainPredict = scaler.inverse_transform(trainPredict)", "pd import math import os from keras.layers import Dense from keras.layers import LSTM", "= 1 #look_back = time_steps: the number of previous time steps trainX, trainY", "dataset into training and test set i_split = 0.8 train_size = int(len(dataset) *", "trainY(= 1) batch_size = 1 epochs = 100 lr = 0.001 optimizer =", "features] time_steps = look_back features = dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features))", "from keras.models import Sequential import matplotlib.pyplot as plt #load data filename = 'international-airline-passengers.csv'", "= math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE' % (testScore)) # shift train predictions", "= len(dataset) - train_size #print(test_size) train_set = dataset[0:train_size, :] test_set = dataset[train_size:, :]", "trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift test predictions for plotting testPredictPlot = np.empty_like(dataset)", "plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict #", "= trainPredict # shift test predictions for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :]", "LSTM from keras.optimizers import Adam from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error", ":] #convert an array values into a dataset matrix for LSTM def create_dataset(dataset,", "from __future__ import print_function \"\"\" Created on Tue Oct 6 16:23:04 2020 @author:", "= dataframe.values #convert dataframe to numpy array dataset = dataset.astype('float32') #the shape of", "= 'python') dataset = dataframe.values #convert dataframe to numpy array dataset = dataset.astype('float32')", "Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss", "dataset[0:train_size, :] test_set = dataset[train_size:, :] #convert an array values into a dataset", "epochs = 100 lr = 0.001 optimizer = Adam(lr = lr, beta_1 =", "test set i_split = 0.8 train_size = int(len(dataset) * i_split) #print(train_size) test_size =", "optimizer) model.fit(trainX, trainY, batch_size = batch_size, epochs = epochs, verbose = 1, shuffle", "predictions trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY])", "= [] dataY = [] for i in range(len(dataset) - look_back - 1):", "testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE' % (testScore)) # shift train", "look_back = 1 #look_back = time_steps: the number of previous time steps trainX,", "the most important hyper-parameter is look_back and batch_size researchers should try few times", "= dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY = np.array(dataY) return dataX,", "keras.models import Sequential import matplotlib.pyplot as plt #load data filename = 'international-airline-passengers.csv' filepath", "time_steps: the number of previous time steps trainX, trainY = create_dataset(train_set, look_back) testX,", "np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift test predictions", "#load data filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols", "trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) #", "MinMaxScaler from sklearn.metrics import mean_squared_error from keras.models import Sequential import matplotlib.pyplot as plt", "= np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift test predictions for plotting testPredictPlot", "b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY = np.array(dataY) return", "for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict", "usecols = [1], engine = 'python') dataset = dataframe.values #convert dataframe to numpy", "Admin \"\"\" import numpy as np import pandas as pd import math import", "numpy as np import pandas as pd import math import os from keras.layers", "print_function \"\"\" Created on Tue Oct 6 16:23:04 2020 @author: Admin \"\"\" import", "train_size #print(test_size) train_set = dataset[0:train_size, :] test_set = dataset[train_size:, :] #convert an array", "= (time_steps, features) lstm_neurons = 4 #lstm_neurons is a hyper-parameter dense_neurons = 1", "model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss =", "1 #dense_neurions is equal to the shape of trainY(= 1) batch_size = 1", "filepath = os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols = [1], engine = 'python')", "= np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX = np.reshape(testX, (testX.shape[0], time_steps, features)) #create and", "plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most important hyper-parameter is look_back and batch_size researchers", "baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most important hyper-parameter is", "testPredict[:,0])) print('Test Score: %.2f RMSE' % (testScore)) # shift train predictions for plotting", "scaler.inverse_transform([testY]) # calculate root mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score:", "%.2f RMSE' % (trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE' %", "important hyper-parameter is look_back and batch_size researchers should try few times to determine", "training and test set i_split = 0.8 train_size = int(len(dataset) * i_split) #print(train_size)", "= True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences = False)) model.add(Dense(dense_neurons,", "pandas as pd import math import os from keras.layers import Dense from keras.layers", "dataset[train_size:, :] #convert an array values into a dataset matrix for LSTM def", "= True) #make predictions trainPredict = model.predict(trainX, batch_size = batch_size) testPredict = model.predict(testX,", "testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot", "= 0.8 train_size = int(len(dataset) * i_split) #print(train_size) test_size = len(dataset) - train_size", "(testScore)) # shift train predictions for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] =", "model.compile(loss = 'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY, batch_size = batch_size, epochs =", "in range(len(dataset) - look_back - 1): a = dataset[i:(i+look_back), 0] b = dataset[i+look_back,", "#convert dataframe to numpy array dataset = dataset.astype('float32') #the shape of dataset: num_samples,", "range(len(dataset) - look_back - 1): a = dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0]", "= scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate root mean squared error trainScore =", "= np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift test", "0.999, epsilon = 1e-8, decay = 0.0, amsgrad = True) model = Sequential()", "shape of trainY(= 1) batch_size = 1 epochs = 100 lr = 0.001", "calculate root mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE'", "batch_size = batch_size) testPredict = model.predict(testX, batch_size = batch_size) # invert predictions trainPredict", "look_back and batch_size researchers should try few times to determine the best values", "of previous time steps trainX, trainY = create_dataset(train_set, look_back) testX, testY = create_dataset(test_set,", "= batch_size) # invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict =", "for LSTM def create_dataset(dataset, look_back): dataX = [] dataY = [] for i", "beta_2 = 0.999, epsilon = 1e-8, decay = 0.0, amsgrad = True) model", "= int(len(dataset) * i_split) #print(train_size) test_size = len(dataset) - train_size #print(test_size) train_set =", "features) lstm_neurons = 4 #lstm_neurons is a hyper-parameter dense_neurons = 1 #dense_neurions is", "= create_dataset(train_set, look_back) testX, testY = create_dataset(test_set, look_back) #reshape input to be [samples,", ":] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift test predictions for plotting", "= look_back features = dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX =", "num_samples, features #normalise the dataset feature_range = (0, 1) scaler = MinMaxScaler(feature_range =", "train predictions for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :]", "of dataset: num_samples, features #normalise the dataset feature_range = (0, 1) scaler =", "int(len(dataset) * i_split) #print(train_size) test_size = len(dataset) - train_size #print(test_size) train_set = dataset[0:train_size,", "import Adam from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error from keras.models import", ":] = trainPredict # shift test predictions for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:,", "= feature_range) dataset = scaler.fit_transform(dataset) #split the dataset into training and test set", "feature_range) dataset = scaler.fit_transform(dataset) #split the dataset into training and test set i_split", "to numpy array dataset = dataset.astype('float32') #the shape of dataset: num_samples, features #normalise", "dataset: num_samples, features #normalise the dataset feature_range = (0, 1) scaler = MinMaxScaler(feature_range", "import numpy as np import pandas as pd import math import os from", "engine = 'python') dataset = dataframe.values #convert dataframe to numpy array dataset =", "(0, 1) scaler = MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset) #split the dataset", "mean_squared_error from keras.models import Sequential import matplotlib.pyplot as plt #load data filename =", "model.predict(trainX, batch_size = batch_size) testPredict = model.predict(testX, batch_size = batch_size) # invert predictions", "array values into a dataset matrix for LSTM def create_dataset(dataset, look_back): dataX =", "optimizer = Adam(lr = lr, beta_1 = 0.9, beta_2 = 0.999, epsilon =", "= 4 #lstm_neurons is a hyper-parameter dense_neurons = 1 #dense_neurions is equal to", "dataX, dataY look_back = 1 #look_back = time_steps: the number of previous time", "np import pandas as pd import math import os from keras.layers import Dense", "dataset feature_range = (0, 1) scaler = MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset)", "the dataset feature_range = (0, 1) scaler = MinMaxScaler(feature_range = feature_range) dataset =", "- look_back - 1): a = dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0] dataX.append(a)", "trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift test predictions for", "testX = np.reshape(testX, (testX.shape[0], time_steps, features)) #create and fit the LSTM input_shape =", "= MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset) #split the dataset into training and", "= 'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY, batch_size = batch_size, epochs = epochs,", "= 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols = [1], engine", "testPredict = model.predict(testX, batch_size = batch_size) # invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY", "be [samples, time_steps, features] time_steps = look_back features = dataset.shape[1] trainX = np.reshape(trainX,", "trainY = scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate root mean", "model = Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation =", "lr = 0.001 optimizer = Adam(lr = lr, beta_1 = 0.9, beta_2 =", "and batch_size researchers should try few times to determine the best values '''", "look_back - 1): a = dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b)", "0.8 train_size = int(len(dataset) * i_split) #print(train_size) test_size = len(dataset) - train_size #print(test_size)", "coding: utf-8 -*- from __future__ import print_function \"\"\" Created on Tue Oct 6", "import print_function \"\"\" Created on Tue Oct 6 16:23:04 2020 @author: Admin \"\"\"", "sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error from keras.models import Sequential import matplotlib.pyplot", "#normalise the dataset feature_range = (0, 1) scaler = MinMaxScaler(feature_range = feature_range) dataset", "create_dataset(train_set, look_back) testX, testY = create_dataset(test_set, look_back) #reshape input to be [samples, time_steps,", "'python') dataset = dataframe.values #convert dataframe to numpy array dataset = dataset.astype('float32') #the", "lstm_neurons = 4 #lstm_neurons is a hyper-parameter dense_neurons = 1 #dense_neurions is equal", "#split the dataset into training and test set i_split = 0.8 train_size =", "= 0.0, amsgrad = True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences", "on Tue Oct 6 16:23:04 2020 @author: Admin \"\"\" import numpy as np", "pd.read_csv(filepath, usecols = [1], engine = 'python') dataset = dataframe.values #convert dataframe to", "= False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss = 'mean_squared_error', optimizer = optimizer) model.fit(trainX,", "1e-8, decay = 0.0, amsgrad = True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape =", "np.reshape(testX, (testX.shape[0], time_steps, features)) #create and fit the LSTM input_shape = (time_steps, features)", "into training and test set i_split = 0.8 train_size = int(len(dataset) * i_split)", "= 'linear')) model.compile(loss = 'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY, batch_size = batch_size,", "2020 @author: Admin \"\"\" import numpy as np import pandas as pd import", "steps trainX, trainY = create_dataset(train_set, look_back) testX, testY = create_dataset(test_set, look_back) #reshape input", "plt #load data filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath,", "import mean_squared_error from keras.models import Sequential import matplotlib.pyplot as plt #load data filename", "'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY, batch_size = batch_size, epochs = epochs, verbose", "is equal to the shape of trainY(= 1) batch_size = 1 epochs =", "= np.array(dataX) dataY = np.array(dataY) return dataX, dataY look_back = 1 #look_back =", "@author: Admin \"\"\" import numpy as np import pandas as pd import math", "import Sequential import matplotlib.pyplot as plt #load data filename = 'international-airline-passengers.csv' filepath =", "trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX = np.reshape(testX, (testX.shape[0], time_steps, features)) #create", "trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict # shift", "= np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot)", "the LSTM input_shape = (time_steps, features) lstm_neurons = 4 #lstm_neurons is a hyper-parameter", "dataset = scaler.fit_transform(dataset) #split the dataset into training and test set i_split =", "features = dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX = np.reshape(testX, (testX.shape[0],", "#make predictions trainPredict = model.predict(trainX, batch_size = batch_size) testPredict = model.predict(testX, batch_size =", "feature_range = (0, 1) scaler = MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset) #split", "test_set = dataset[train_size:, :] #convert an array values into a dataset matrix for", "4 #lstm_neurons is a hyper-parameter dense_neurons = 1 #dense_neurions is equal to the", "predictions trainPredict = model.predict(trainX, batch_size = batch_size) testPredict = model.predict(testX, batch_size = batch_size)", "filename) dataframe = pd.read_csv(filepath, usecols = [1], engine = 'python') dataset = dataframe.values", "and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most important hyper-parameter is look_back", "train_set = dataset[0:train_size, :] test_set = dataset[train_size:, :] #convert an array values into", "error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE' % (trainScore)) testScore =", "''' the most important hyper-parameter is look_back and batch_size researchers should try few", "= np.array(dataY) return dataX, dataY look_back = 1 #look_back = time_steps: the number", "Tue Oct 6 16:23:04 2020 @author: Admin \"\"\" import numpy as np import", "= [1], engine = 'python') dataset = dataframe.values #convert dataframe to numpy array", "amsgrad = True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape, return_sequences = False))", "len(dataset) - train_size #print(test_size) train_set = dataset[0:train_size, :] test_set = dataset[train_size:, :] #convert", "train_size = int(len(dataset) * i_split) #print(train_size) test_size = len(dataset) - train_size #print(test_size) train_set", "Dense from keras.layers import LSTM from keras.optimizers import Adam from sklearn.preprocessing import MinMaxScaler", "#dense_neurions is equal to the shape of trainY(= 1) batch_size = 1 epochs", "dataY.append(b) dataX = np.array(dataX) dataY = np.array(dataY) return dataX, dataY look_back = 1", "as np import pandas as pd import math import os from keras.layers import", "scaler.fit_transform(dataset) #split the dataset into training and test set i_split = 0.8 train_size", "(testX.shape[0], time_steps, features)) #create and fit the LSTM input_shape = (time_steps, features) lstm_neurons", "hyper-parameter dense_neurons = 1 #dense_neurions is equal to the shape of trainY(= 1)", "i_split = 0.8 train_size = int(len(dataset) * i_split) #print(train_size) test_size = len(dataset) -", "dataset.astype('float32') #the shape of dataset: num_samples, features #normalise the dataset feature_range = (0,", "= (0, 1) scaler = MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset) #split the", "from keras.layers import LSTM from keras.optimizers import Adam from sklearn.preprocessing import MinMaxScaler from", "(trainScore)) testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE' % (testScore)) # shift", "trainPredict # shift test predictions for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] =", "# shift train predictions for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] = np.nan", "math import os from keras.layers import Dense from keras.layers import LSTM from keras.optimizers", "# calculate root mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f", "equal to the shape of trainY(= 1) batch_size = 1 epochs = 100", "the shape of trainY(= 1) batch_size = 1 epochs = 100 lr =", "to the shape of trainY(= 1) batch_size = 1 epochs = 100 lr", "= dataset.astype('float32') #the shape of dataset: num_samples, features #normalise the dataset feature_range =", "= create_dataset(test_set, look_back) #reshape input to be [samples, time_steps, features] time_steps = look_back", "= batch_size, epochs = epochs, verbose = 1, shuffle = True) #make predictions", "Created on Tue Oct 6 16:23:04 2020 @author: Admin \"\"\" import numpy as", "the number of previous time steps trainX, trainY = create_dataset(train_set, look_back) testX, testY", "-*- coding: utf-8 -*- from __future__ import print_function \"\"\" Created on Tue Oct", "16:23:04 2020 @author: Admin \"\"\" import numpy as np import pandas as pd", "time_steps, features)) #create and fit the LSTM input_shape = (time_steps, features) lstm_neurons =", "# shift test predictions for plotting testPredictPlot = np.empty_like(dataset) testPredictPlot[:, :] = np.nan", "= pd.read_csv(filepath, usecols = [1], engine = 'python') dataset = dataframe.values #convert dataframe", "= dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX = np.reshape(testX, (testX.shape[0], time_steps,", "'linear')) model.compile(loss = 'mean_squared_error', optimizer = optimizer) model.fit(trainX, trainY, batch_size = batch_size, epochs", "= model.predict(trainX, batch_size = batch_size) testPredict = model.predict(testX, batch_size = batch_size) # invert", "0] dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY = np.array(dataY) return dataX, dataY look_back", "look_back): dataX = [] dataY = [] for i in range(len(dataset) - look_back", "1): a = dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX =", "sklearn.metrics import mean_squared_error from keras.models import Sequential import matplotlib.pyplot as plt #load data", "#create and fit the LSTM input_shape = (time_steps, features) lstm_neurons = 4 #lstm_neurons", "batch_size, epochs = epochs, verbose = 1, shuffle = True) #make predictions trainPredict", "features #normalise the dataset feature_range = (0, 1) scaler = MinMaxScaler(feature_range = feature_range)", "= dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY", "dataset = dataframe.values #convert dataframe to numpy array dataset = dataset.astype('float32') #the shape", "shape of dataset: num_samples, features #normalise the dataset feature_range = (0, 1) scaler", "1) batch_size = 1 epochs = 100 lr = 0.001 optimizer = Adam(lr", "dataY = [] for i in range(len(dataset) - look_back - 1): a =", "1) scaler = MinMaxScaler(feature_range = feature_range) dataset = scaler.fit_transform(dataset) #split the dataset into", "import MinMaxScaler from sklearn.metrics import mean_squared_error from keras.models import Sequential import matplotlib.pyplot as", "[samples, time_steps, features] time_steps = look_back features = dataset.shape[1] trainX = np.reshape(trainX, (trainX.shape[0],", "1 #look_back = time_steps: the number of previous time steps trainX, trainY =", "#print(train_size) test_size = len(dataset) - train_size #print(test_size) train_set = dataset[0:train_size, :] test_set =", "into a dataset matrix for LSTM def create_dataset(dataset, look_back): dataX = [] dataY", "testX, testY = create_dataset(test_set, look_back) #reshape input to be [samples, time_steps, features] time_steps", "#print(test_size) train_set = dataset[0:train_size, :] test_set = dataset[train_size:, :] #convert an array values", "= 100 lr = 0.001 optimizer = Adam(lr = lr, beta_1 = 0.9,", "= 0.999, epsilon = 1e-8, decay = 0.0, amsgrad = True) model =", "[] dataY = [] for i in range(len(dataset) - look_back - 1): a", "100 lr = 0.001 optimizer = Adam(lr = lr, beta_1 = 0.9, beta_2", "epochs, verbose = 1, shuffle = True) #make predictions trainPredict = model.predict(trainX, batch_size", "scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate root mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0],", "hyper-parameter is look_back and batch_size researchers should try few times to determine the", "batch_size = batch_size) # invert predictions trainPredict = scaler.inverse_transform(trainPredict) trainY = scaler.inverse_transform([trainY]) testPredict", "batch_size = 1 epochs = 100 lr = 0.001 optimizer = Adam(lr =", "dataX = [] dataY = [] for i in range(len(dataset) - look_back -", "% (testScore)) # shift train predictions for plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :]", "np.array(dataX) dataY = np.array(dataY) return dataX, dataY look_back = 1 #look_back = time_steps:", "= np.reshape(testX, (testX.shape[0], time_steps, features)) #create and fit the LSTM input_shape = (time_steps,", "plot baseline and predictions plt.plot(scaler.inverse_transform(dataset)) plt.plot(trainPredictPlot) plt.plot(testPredictPlot) plt.show() ''' the most important hyper-parameter", "= np.empty_like(dataset) testPredictPlot[:, :] = np.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # plot baseline", "array dataset = dataset.astype('float32') #the shape of dataset: num_samples, features #normalise the dataset", "dataset = dataset.astype('float32') #the shape of dataset: num_samples, features #normalise the dataset feature_range", "= optimizer) model.fit(trainX, trainY, batch_size = batch_size, epochs = epochs, verbose = 1,", "fit the LSTM input_shape = (time_steps, features) lstm_neurons = 4 #lstm_neurons is a", "dense_neurons = 1 #dense_neurions is equal to the shape of trainY(= 1) batch_size", "features)) #create and fit the LSTM input_shape = (time_steps, features) lstm_neurons = 4", "look_back) testX, testY = create_dataset(test_set, look_back) #reshape input to be [samples, time_steps, features]", "Oct 6 16:23:04 2020 @author: Admin \"\"\" import numpy as np import pandas", "np.reshape(trainX, (trainX.shape[0], time_steps, features)) testX = np.reshape(testX, (testX.shape[0], time_steps, features)) #create and fit", "1 epochs = 100 lr = 0.001 optimizer = Adam(lr = lr, beta_1", "decay = 0.0, amsgrad = True) model = Sequential() model.add(LSTM(lstm_neurons, input_shape = input_shape,", "dataX.append(a) dataY.append(b) dataX = np.array(dataX) dataY = np.array(dataY) return dataX, dataY look_back =", "0.001 optimizer = Adam(lr = lr, beta_1 = 0.9, beta_2 = 0.999, epsilon", "input_shape = input_shape, return_sequences = False)) model.add(Dense(dense_neurons, activation = 'linear')) model.compile(loss = 'mean_squared_error',", "= 0.9, beta_2 = 0.999, epsilon = 1e-8, decay = 0.0, amsgrad =", "optimizer = optimizer) model.fit(trainX, trainY, batch_size = batch_size, epochs = epochs, verbose =", "i_split) #print(train_size) test_size = len(dataset) - train_size #print(test_size) train_set = dataset[0:train_size, :] test_set", "utf-8 -*- from __future__ import print_function \"\"\" Created on Tue Oct 6 16:23:04", "mean squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE' % (trainScore))", "keras.layers import LSTM from keras.optimizers import Adam from sklearn.preprocessing import MinMaxScaler from sklearn.metrics", "import Dense from keras.layers import LSTM from keras.optimizers import Adam from sklearn.preprocessing import", "matrix for LSTM def create_dataset(dataset, look_back): dataX = [] dataY = [] for", "to be [samples, time_steps, features] time_steps = look_back features = dataset.shape[1] trainX =", "scaler.inverse_transform([trainY]) testPredict = scaler.inverse_transform(testPredict) testY = scaler.inverse_transform([testY]) # calculate root mean squared error", "plt.plot(testPredictPlot) plt.show() ''' the most important hyper-parameter is look_back and batch_size researchers should", "filename = 'international-airline-passengers.csv' filepath = os.path.join(os.getcwd(), filename) dataframe = pd.read_csv(filepath, usecols = [1],", "= scaler.fit_transform(dataset) #split the dataset into training and test set i_split = 0.8", "features)) testX = np.reshape(testX, (testX.shape[0], time_steps, features)) #create and fit the LSTM input_shape", "epochs = epochs, verbose = 1, shuffle = True) #make predictions trainPredict =", "verbose = 1, shuffle = True) #make predictions trainPredict = model.predict(trainX, batch_size =", "squared error trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0])) print('Train Score: %.2f RMSE' % (trainScore)) testScore", "numpy array dataset = dataset.astype('float32') #the shape of dataset: num_samples, features #normalise the", "most important hyper-parameter is look_back and batch_size researchers should try few times to", "math.sqrt(mean_squared_error(testY[0], testPredict[:,0])) print('Test Score: %.2f RMSE' % (testScore)) # shift train predictions for", "test_size = len(dataset) - train_size #print(test_size) train_set = dataset[0:train_size, :] test_set = dataset[train_size:,", "a = dataset[i:(i+look_back), 0] b = dataset[i+look_back, 0] dataX.append(a) dataY.append(b) dataX = np.array(dataX)", "dataset matrix for LSTM def create_dataset(dataset, look_back): dataX = [] dataY = []", "plotting trainPredictPlot = np.empty_like(dataset) trainPredictPlot[:, :] = np.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict #" ]
[ "Created Date: 2021-08-08 Last Modified: 2021-08-28 content: ''' from .base import BaseSegmentor from", ".semi import Semi from .semi_v2 import SemiV2 from .reco import ReCo # __all__", ".base import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from .semi", "Semi from .semi_v2 import SemiV2 from .reco import ReCo # __all__ = ['BaseSegmentor',", ".encoder_decoder import EncoderDecoder from .semi import Semi from .semi_v2 import SemiV2 from .reco", "''' from .base import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder", "content: ''' from .base import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import", "from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from .semi import Semi from", "Author: <NAME> Created Date: 2021-08-08 Last Modified: 2021-08-28 content: ''' from .base import", "Date: 2021-08-08 Last Modified: 2021-08-28 content: ''' from .base import BaseSegmentor from .cascade_encoder_decoder", "BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from .semi import Semi", "2021-08-08 Last Modified: 2021-08-28 content: ''' from .base import BaseSegmentor from .cascade_encoder_decoder import", "import Semi from .semi_v2 import SemiV2 from .reco import ReCo # __all__ =", "import EncoderDecoder from .semi import Semi from .semi_v2 import SemiV2 from .reco import", ".semi_v2 import SemiV2 from .reco import ReCo # __all__ = ['BaseSegmentor', 'EncoderDecoder', 'CascadeEncoderDecoder']", "EncoderDecoder from .semi import Semi from .semi_v2 import SemiV2 from .reco import ReCo", "Modified: 2021-08-28 content: ''' from .base import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from", "from .semi_v2 import SemiV2 from .reco import ReCo # __all__ = ['BaseSegmentor', 'EncoderDecoder',", "import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from .semi import Semi from .semi_v2 import", "Last Modified: 2021-08-28 content: ''' from .base import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder", "CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from .semi import Semi from .semi_v2 import SemiV2", "''' Author: <NAME> Created Date: 2021-08-08 Last Modified: 2021-08-28 content: ''' from .base", "2021-08-28 content: ''' from .base import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder", "from .encoder_decoder import EncoderDecoder from .semi import Semi from .semi_v2 import SemiV2 from", "<NAME> Created Date: 2021-08-08 Last Modified: 2021-08-28 content: ''' from .base import BaseSegmentor", "import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from .semi import", ".cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from .semi import Semi from .semi_v2", "from .semi import Semi from .semi_v2 import SemiV2 from .reco import ReCo #", "from .base import BaseSegmentor from .cascade_encoder_decoder import CascadeEncoderDecoder from .encoder_decoder import EncoderDecoder from" ]
[ "class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created', 'modifield'] search_fields = ['title', 'author__email', 'body']", "list_display = ['title', 'author', 'created', 'modifield'] search_fields = ['title', 'author__email', 'body'] prepopulated_fields =", "class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created', 'modifield'] search_fields = ['thread__title', 'author__email', 'reply']", "'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created', 'modifield'] search_fields", "['title', 'author', 'created', 'modifield'] search_fields = ['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class", "list_display = ['thread', 'author', 'created', 'modifield'] search_fields = ['thread__title', 'author__email', 'reply'] admin.site.register(Thread,ThreadAdmin) admin.site.register(Reply,ReplyAdmin)", "search_fields = ['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread',", "Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created', 'modifield'] search_fields = ['title', 'author__email',", "ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created', 'modifield'] search_fields = ['thread__title', 'author__email', 'reply'] admin.site.register(Thread,ThreadAdmin)", "import admin from .models import Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author',", "= {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created', 'modifield'] search_fields = ['thread__title',", "ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created', 'modifield'] search_fields = ['title', 'author__email', 'body'] prepopulated_fields", "import Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created', 'modifield'] search_fields =", "'author', 'created', 'modifield'] search_fields = ['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin):", "admin from .models import Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created',", "{'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created', 'modifield'] search_fields = ['thread__title', 'author__email',", "'modifield'] search_fields = ['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display =", ".models import Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created', 'modifield'] search_fields", "'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created', 'modifield']", "= ['title', 'author', 'created', 'modifield'] search_fields = ['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)}", "from django.contrib import admin from .models import Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display =", "['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created',", "prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author', 'created', 'modifield'] search_fields =", "'created', 'modifield'] search_fields = ['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display", "= ['title', 'author__email', 'body'] prepopulated_fields = {'slug':('title',)} class ReplyAdmin(admin.ModelAdmin): list_display = ['thread', 'author',", "Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created', 'modifield'] search_fields = ['title',", "django.contrib import admin from .models import Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title',", "from .models import Thread, Reply class ThreadAdmin(admin.ModelAdmin): list_display = ['title', 'author', 'created', 'modifield']" ]
[ "utils import get from digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return", "python3 import sys import json # add parent directory sys.path.append(\".\") from utils import", "sys import json # add parent directory sys.path.append(\".\") from utils import get from", "get from digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\",", "from digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\":", "create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"] = row if row[\"point\"] is not None:", "not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features = [] for", "import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features} def", "feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features = [] for row in", "wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row):", "= \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row): feature =", "features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): # if file local with open(fn) as file:", "create_feature_collection(features) def test_convert(fn): # if file local with open(fn) as file: data =", "sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row): feature", "row in data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): # if file local with", "local with open(fn) as file: data = json.load(file) gjson = convert_json_to_geojson(data) with open(", "directory sys.path.append(\".\") from utils import get from digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\"", "# if file local with open(fn) as file: data = json.load(file) gjson =", "= row if row[\"point\"] is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def", "{\"type\": \"Feature\"} feature[\"properties\"] = row if row[\"point\"] is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"])", "import sys import json # add parent directory sys.path.append(\".\") from utils import get", "def convert_json_to_geojson(data): features = [] for row in data: features.append(create_feature(row)) return create_feature_collection(features) def", "{\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"] = row", "feature = {\"type\": \"Feature\"} feature[\"properties\"] = row if row[\"point\"] is not None: feature[\"geometry\"]", "= wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features = [] for row in data:", "return feature def convert_json_to_geojson(data): features = [] for row in data: features.append(create_feature(row)) return", "def test_convert(fn): # if file local with open(fn) as file: data = json.load(file)", "from utils import get from digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features):", "file: data = json.load(file) gjson = convert_json_to_geojson(data) with open( f\"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.geojson\", \"w\" ) as", "with open(fn) as file: data = json.load(file) gjson = convert_json_to_geojson(data) with open( f\"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.geojson\",", "\"features\": features} def create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"] = row if row[\"point\"]", "= json.load(file) gjson = convert_json_to_geojson(data) with open( f\"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.geojson\", \"w\" ) as file: file.write(json.dumps(gjson))", "digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features}", "None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features = [] for row", "test_convert(fn): # if file local with open(fn) as file: data = json.load(file) gjson", "def create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"] = row if row[\"point\"] is not", "create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"]", "features} def create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"] = row if row[\"point\"] is", "sys.path.append(\".\") from utils import get from digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def", "\"Feature\"} feature[\"properties\"] = row if row[\"point\"] is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return", "data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): # if file local with open(fn) as", "return create_feature_collection(features) def test_convert(fn): # if file local with open(fn) as file: data", "if file local with open(fn) as file: data = json.load(file) gjson = convert_json_to_geojson(data)", "= [] for row in data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): # if", "#!/usr/bin/env python3 import sys import json # add parent directory sys.path.append(\".\") from utils", "parent directory sys.path.append(\".\") from utils import get from digital_land_frontend.render import wkt_to_json_geometry sample_file =", "= {\"type\": \"Feature\"} feature[\"properties\"] = row if row[\"point\"] is not None: feature[\"geometry\"] =", "in data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): # if file local with open(fn)", "return {\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"] =", "row if row[\"point\"] is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data):", "import get from digital_land_frontend.render import wkt_to_json_geometry sample_file = \"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\":", "def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row): feature = {\"type\": \"Feature\"}", "add parent directory sys.path.append(\".\") from utils import get from digital_land_frontend.render import wkt_to_json_geometry sample_file", "feature[\"properties\"] = row if row[\"point\"] is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature", "features = [] for row in data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): #", "as file: data = json.load(file) gjson = convert_json_to_geojson(data) with open( f\"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.geojson\", \"w\" )", "file local with open(fn) as file: data = json.load(file) gjson = convert_json_to_geojson(data) with", "is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features = []", "\"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.json\" def create_feature_collection(features): return {\"type\": \"FeatureCollection\", \"features\": features} def create_feature(row): feature = {\"type\":", "if row[\"point\"] is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features", "row[\"point\"] is not None: feature[\"geometry\"] = wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features =", "import json # add parent directory sys.path.append(\".\") from utils import get from digital_land_frontend.render", "json # add parent directory sys.path.append(\".\") from utils import get from digital_land_frontend.render import", "[] for row in data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): # if file", "# add parent directory sys.path.append(\".\") from utils import get from digital_land_frontend.render import wkt_to_json_geometry", "convert_json_to_geojson(data): features = [] for row in data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn):", "feature def convert_json_to_geojson(data): features = [] for row in data: features.append(create_feature(row)) return create_feature_collection(features)", "open(fn) as file: data = json.load(file) gjson = convert_json_to_geojson(data) with open( f\"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.geojson\", \"w\"", "data = json.load(file) gjson = convert_json_to_geojson(data) with open( f\"docs/brownfield-land/organisation/local-authority-eng/HAG/sites.geojson\", \"w\" ) as file:", "for row in data: features.append(create_feature(row)) return create_feature_collection(features) def test_convert(fn): # if file local", "wkt_to_json_geometry(row[\"point\"]) return feature def convert_json_to_geojson(data): features = [] for row in data: features.append(create_feature(row))", "\"FeatureCollection\", \"features\": features} def create_feature(row): feature = {\"type\": \"Feature\"} feature[\"properties\"] = row if" ]
[ "Vol Compare fast1 \"SDVCf2 D\": if (prior is None): # first row pass", "SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 - YDVC f1 d *", "u * ( DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1']", "YDVCf1 u * ( DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0],", "= df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till", "'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds = [] def day_calculate(self, df: pd.DataFrame, share_num: str,", "DVD > SDVBm D-1 # SDVBm D = SDVBm D-1 + YDVBm u", "# first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: # Case 1:", "SDVCf1 D-1 - YDVC f1 d * (SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0],", "u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] -", "'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol Compare slow \"SDVCsl D\": if", "- at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily", "up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing", "-> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this row, write the count", "df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0],", "\"SDVBsl D\" increased. up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]}", "+ YDVBf u * ( DVD - SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] =", "+ at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0],", "D-1 - YDVC f1 d * (SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1']", "DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] *", "- DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0],", "less than 1 df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup = 1 df.at[idx[0], 'DaysDVup']", "'SDVBsl']: # Case 2: DVD < SDVBsl D-1 # SDVBsl D = SDVBsl", "* (SDVB sl D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] -", "curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\")", "import at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self):", "df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1']", "df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: # Case 1: DVD > SDVBf D-1 #", "df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) **", "1st 5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg", "DVD < SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 - YDVC f2", "As in the old ShW, we need figures to show a volumes constellation,", "wrap_up calculation should only run at stage 2' # assign into Ov SDVBf.D-1,", "min(up_tillDF.shape[0], 50) # not more than 50 daysDVup = max(1, daysDVup) # not", "idx: Index, prior: Index, top_up: bool, stage: int): ''' Implementation per Gunther's 210209", "df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] - df.at[prior[0],", "the number of days in a row the Slow Daily Vol Basic slow", "column we assume exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1',", "row # \"DaysDVupD\" is the number of days in a row the Slow", "# 1f) Slow Daily Vol Compare fast1 \"SDVCf1 D\": if (prior is None):", "df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: # Case 2:", ">= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm'", "1d) Slow Daily Vol Compare slow \"SDVCsl D\": if (prior is None): #", "* (SDVBf D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting']", "df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d)", "SDVCsl D-1 # SDVCsl D = SDVCsl D-1 - YDVC sl d *", "elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: # Case 1: DVD > SDVCf2 D-1", "2' # assign into Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num,", "df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting']", "df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c)", "Index import pandas as pd from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params", "df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif", "curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] #", "= df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) **", "'DV'] <= df.at[prior[0], 'SDVCf2']: # Case 2: DVD < SDVCf2 D-1 # SDVCf2", "# SDVCm D = SDVCm D-1 + YDVCm u * ( DVD -", "SDVBf D = SDVBf D-1 - YDVB f d * (SDVBf D-1 -", "if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf']", "medium \"SDVB m D\": if (prior is None): # first row pass elif", "= df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] #", "= SDVCf1 D-1 + YDVCf1 u * ( DVD - SDVCf1 D-1)eSDVCf1 u", "'DV'] <= df.at[prior[0], 'SDVCsl']: # Case 2: DVD < SDVCsl D-1 # SDVCsl", "average of 1st 5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm']", "3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0],", "'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError as exc: logging.error(f'_2StVols_SlowDailyVols wrap_up exception {exc}')", "at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV']", "d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV'])", "= [] def day_calculate(self, df: pd.DataFrame, share_num: str, idx: Index, prior: Index, top_up:", "'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: #", "Index, prior: Index, top_up: bool, stage: int): ''' Implementation per Gunther's 210209 Calc", "df ''' assert stage == 2, f'{self.name} calculation should only run at stage", "day_calculate(self, df: pd.DataFrame, share_num: str, idx: Index, prior: Index, top_up: bool, stage: int):", "SDVCsl D = SDVCsl D-1 + YDVCsl u * ( DVD - SDVCsl", "df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']:", "at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol Compare fast1 \"SDVCf2 D\": if (prior is", "= df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) **", "D\": if (prior is None): # first row pass elif df.at[idx[0], 'DV'] >", "* (SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting']", "d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV'])", "'DaysDVup'] = 0 # compute starting SlowVols figures by using average of 1st", "row of the df ''' assert stage == 2, f'{self.name} calculation should only", "df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting']", "if (prior is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0],", "Daily Vols: Calculates the 'computeds' of single (daily) row of the df '''", "Daily Vol Basic medium \"SDVB m D\": if (prior is None): # first", "* ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <=", "df.at[prior[0], 'SDVCf2']: # Case 1: DVD > SDVCf2 D-1 # SDVCf2 D =", "D-1 # SDVCsl D = SDVCsl D-1 + YDVCsl u * ( DVD", "'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: #", "( DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting']", "YDVCsl u * ( DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0],", "df.at[prior[0], 'SDVBf']: # Case 2: DVD < SDVBf D-1 # SDVBf D =", "DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0],", "first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: # Case 1: DVD", "# this column we assume exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl',", "DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] *", "df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm']", "SDVCsl D-1 - YDVC sl d * (SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0],", "elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: # Case 2: DVD < SDVCf1 D-1", "df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting']", "df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: #", "df.at[prior[0], 'SDVCsl']: # Case 2: DVD < SDVCsl D-1 # SDVCsl D =", "/ df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm']", "df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0],", "f2 d * (SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2']", "'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol Compare medium \"SDVCm D\": if", "calculation should only run at stage 2' # assign into Ov SDVBf.D-1, and", "the old ShW, we need figures to show a volumes constellation, the Daily", "['DV'] # this column we assume exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf',", "number of days in a row the Slow Daily Vol Basic slow \"SDVBsl", "_2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal", "df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']:", "= SDVBsl D-1 + YDVBsl u / DaysDVupD * ( DVD - SDVB", "* ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the old", "df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) **", "pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: # Case 1: DVD > SDVBm", "'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow", "= df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] #", "/ df.at[idx[0], 'SDVBsl'] ''' additional calcs performed AFTER day by day operations '''", "# first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: # Case 1:", "DV D > SDVBsl D-1 # we're not on the very first row", "df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: # Case 2: DVD < SDVBsl D-1 #", "D-1 + YDVCm u * ( DVD - SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm']", "is assumed daily since stage 2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0])", "if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf']", "Daily Vol 1. Make Slow Daily Vols: Calculates the 'computeds' of single (daily)", "1: DVD > SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 + YDVCf1", "run at stage 2' # assign into Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num,", "- at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily", "Slow Daily Vol Compare medium \"SDVCm D\": if (prior is None): # first", "D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV']", "= daysDVup # SDVB sl D = SDVBsl D-1 + YDVBsl u /", "> df.at[prior[0], 'SDVCf2']: # Case 1: DVD > SDVCf2 D-1 # SDVCf2 D", "* ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <=", "increased. up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} ->", "'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back,", "df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal >=", "elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: # Case 1: DV D > SDVBsl", "elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: # Case 2: DVD < SDVBf D-1", "D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * (", "50) # not more than 50 daysDVup = max(1, daysDVup) # not less", "+ (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0],", "daysDVup = min(up_tillDF.shape[0], 50) # not more than 50 daysDVup = max(1, daysDVup)", "- SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * (", "row df.at[idx[0], 'DaysDVup'] = 0 # compute starting SlowVols figures by using average", "= DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] =", "DVD - SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup)", "DVD > SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 + YDVCf2 u", "we need figures to show a volumes constellation, the Daily Vols Figure, \"DVFxx\"", "daysDVup else: daysDVup = 1 df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB sl D", "] self.ov_computeds = [] def day_calculate(self, df: pd.DataFrame, share_num: str, idx: Index, prior:", "D-1 # SDVCm D = SDVCm D-1 + YDVCm u * ( DVD", "2: DVD < SDVCsl D-1 # SDVCsl D = SDVCsl D-1 - YDVC", "= df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) **", "None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: # Case", "df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the old ShW, we", "DVD < SDVBm D-1 # SDVBm D = SDVBm D-1 - YDVB m", "single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume calculation' self.dependents", "# 1a) Slow Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if", "D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV']", "bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index,", "str, idx: Index, prior: Index, top_up: bool, stage: int): ''' Implementation per Gunther's", "curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol Basic slow \"SDVBsl\":", "- at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily", "''' assert stage == 2, f'{self.name} calculation should only run at stage 2'", "d * (SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] -", "'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: # Case 2: DVD", "- at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily", "# SDVBm D = SDVBm D-1 + YDVBm u * ( DVD -", "(SDVCm D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] *", "not prior is None: # 'DVFDf' ??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV']", "SDVBm D = SDVBm D-1 + YDVBm u * ( DVD - SDVBm", "Vol Compare slow \"SDVCsl D\": if (prior is None): # first row pass", "# SDVB sl D = SDVBsl D-1 + YDVBsl u / DaysDVupD *", "bool, stage: int): ''' Implementation per Gunther's 210209 Calc Daily Vol Initial Stage.odt", "= df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) **", "in the old ShW, we need figures to show a volumes constellation, the", "curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] #", "than 1 df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup = 1 df.at[idx[0], 'DaysDVup'] =", "(at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV']", "SDVCf2 D-1 - YDVC f2 d * (SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0],", "\"SDVCf2 D\": if (prior is None): # first row pass elif df.at[idx[0], 'DV']", "1. Make Slow Daily Vols: Calculates the 'computeds' of single (daily) row of", "'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting']", "SDVCsl D = SDVCsl D-1 - YDVC sl d * (SDVCsl D-1 -", "** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol Compare fast1 \"SDVCf1 D\": if (prior", "Implementation per Gunther's 210209 Calc Daily Vol Initial Stage.odt Daily Vol 1. Make", "Daily Vol Compare slow \"SDVCsl D\": if (prior is None): # first row", "( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0],", "datetime from numpy.core.numeric import NaN from pandas.core.indexes.base import Index import pandas as pd", "< SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 - YDVC f1 d", "( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol Compare fast1", "'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal >= 1:", "D = SDVCf1 D-1 - YDVC f1 d * (SDVCf1 D-1 - DVD)eSDVCf1", "'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0],", "def day_calculate(self, df: pd.DataFrame, share_num: str, idx: Index, prior: Index, top_up: bool, stage:", "u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] -", "'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3']", "up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this row, write the count in DaysDVup #print(f'up_tilDF", "at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the old ShW, we need figures to show", "'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif", "df.at[idx[0], 'DaysDVup'] = 0 # compute starting SlowVols figures by using average of", "SDVCm D = SDVCm D-1 + YDVCm u * ( DVD - SDVCm", "df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: # Case 2:", "only run at stage 2' # df is assumed daily since stage 2", "'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl'])", "prior: Index, top_up: bool, stage: int): ''' Implementation per Gunther's 210209 Calc Daily", "row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: # Case 1: DVD >", "'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif", ">= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2'", "SDVCm D-1 # SDVCm D = SDVCm D-1 + YDVCm u * (", "df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] /", "at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol", "df is assumed daily since stage 2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal =", "at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol Compare fast1 \"SDVCf1 D\": if (prior is", "Case 1: DVD > SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 +", "'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back,", "* ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <=", "< SDVBsl D-1 # SDVBsl D = SDVBsl D-1 - YDVBsl d *", "'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting']", "= 1 df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB sl D = SDVBsl D-1", "( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0],", "None: # 'DVFDf' ??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf']", "<= df.at[prior[0], 'SDVBf']: # Case 2: DVD < SDVBf D-1 # SDVBf D", "df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: # Case 2:", "= SDVCf2 D-1 + YDVCf2 u * ( DVD - SDVCf2 D-1)eSDVCf2 u", "DaysDVupD * ( DVD - SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0],", "'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol Basic medium \"SDVB m", "** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the old ShW, we need figures to", "ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError as exc: logging.error(f'_2StVols_SlowDailyVols wrap_up exception", "import Index import pandas as pd from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import", "row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: # Case 1: DVD >", "# df is assumed daily since stage 2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal", "pandas.core.indexes.base import Index import pandas as pd from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params", "Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior is None):", "( DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting']", "** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: # Case 2: DVD <", "Slow Daily Vol Basic fast \"SDVB bf D\": if (prior is None): #", "- df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: # Case", "get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume", "first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: # Case 1: DVD", "between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily", "u / DaysDVupD * ( DVD - SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl']", "- SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * (", "sl D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] *", "DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']:", "if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this row, write the count in DaysDVup", "df: pd.DataFrame, share_num: str, idx: Index, prior: Index, top_up: bool, stage: int): '''", "at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: # Case 2: DVD < SDVCf1", "Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class", "df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']:", "<= df.at[prior[0], 'SDVCsl']: # Case 2: DVD < SDVCsl D-1 # SDVCsl D", "> df.at[prior[0], 'SDVBsl']: # Case 1: DV D > SDVBsl D-1 # we're", "df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: # Case 2: DVD < SDVCf1 D-1 #", "u * ( DVD - SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm']", "d * (SDVB sl D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl']", "None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: # Case", "from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description", "Case 1: DVD > SDVBm D-1 # SDVBm D = SDVBm D-1 +", "1c) Slow Daily Vol Basic fast \"SDVB bf D\": if (prior is None):", "'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm'])", "( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol Basic medium", "df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting']", "wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage): assert stage == 2, f'{self.name} wrap_up calculation", "2: DVD < SDVCm D-1 # SDVCm D = SDVCm D-1 - YDVC", "D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * (", "<= df.at[prior[0], 'SDVCm']: # Case 2: DVD < SDVCm D-1 # SDVCm D", "Calculates the 'computeds' of single (daily) row of the df ''' assert stage", "'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm'])", "df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) **", "??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3' if", "D-1 + YDVCf1 u * ( DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1']", "medium \"SDVCm D\": if (prior is None): # first row pass elif df.at[idx[0],", "* ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <=", "'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol Basic fast \"SDVB bf D\":", "'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl'", "by using average of 1st 5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] =", "SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0],", "# been increasing till this row, write the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}')", "D = SDVBsl D-1 + YDVBsl u / DaysDVupD * ( DVD -", "1h) As in the old ShW, we need figures to show a volumes", "share_num: str, idx: Index, prior: Index, top_up: bool, stage: int): ''' Implementation per", "# we're not on the very first row # \"DaysDVupD\" is the number", "# Case 1: DVD > SDVCm D-1 # SDVCm D = SDVCm D-1", "on the very first row # \"DaysDVupD\" is the number of days in", "= daysDVup else: daysDVup = 1 df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB sl", "SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0],", "'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol Compare fast1 \"SDVCf1 D\": if", "SDVBsl D-1 + YDVBsl u / DaysDVupD * ( DVD - SDVB sl", "** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: # Case 2: DVD <", "'SDVCsl']: # Case 2: DVD < SDVCsl D-1 # SDVCsl D = SDVCsl", "row the Slow Daily Vol Basic slow \"SDVBsl D\" increased. up_till = between_dates_condition(df,", "Vol Basic slow \"SDVBsl D\" increased. up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF =", "** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol Compare fast1 \"SDVCf2 D\": if (prior", "old ShW, we need figures to show a volumes constellation, the Daily Vols", "AFTER day by day operations ''' def wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage):", "1: DV D > SDVBsl D-1 # we're not on the very first", "- SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * (", "'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow", "== 2, f'{self.name} wrap_up calculation should only run at stage 2' # assign", "* ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <=", "'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds = [] def day_calculate(self, df: pd.DataFrame, share_num:", "Compare slow \"SDVCsl D\": if (prior is None): # first row pass elif", "constellation, the Daily Vols Figure, \"DVFxx\" if not prior is None: # 'DVFDf'", "import ov_helpers import logging import math from datetime import date, datetime from numpy.core.numeric", "df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: # Case 1: DV D > SDVBsl D-1", "in a row the Slow Daily Vol Basic slow \"SDVBsl D\" increased. up_till", "is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: #", "- YDVC f1 d * (SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] =", "= df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] #", "YDVB m d * (SDVBm D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0],", "= df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]}", "df.at[prior[0], 'SDVCm']: # Case 1: DVD > SDVCm D-1 # SDVCm D =", "'SDVBf']: # Case 1: DVD > SDVBf D-1 # SDVBf D = SDVBf", "D = SDVBf D-1 - YDVB f d * (SDVBf D-1 - DVD)eSDVBf", "D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * (", "last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume calculation'", "D-1 - YDVB m d * (SDVBm D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm']", "the 'computeds' of single (daily) row of the df ''' assert stage ==", "exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1',", "SDVCm D-1 # SDVCm D = SDVCm D-1 - YDVC m d *", "df.at[prior[0], 'SDVCm']: # Case 2: DVD < SDVCm D-1 # SDVCm D =", "( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol Basic fast", "df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g)", "super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume calculation' self.dependents = ['DV'] # this column", "df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b)", "Vol Basic fast \"SDVB bf D\": if (prior is None): # first row", "only run at stage 2' # assign into Ov SDVBf.D-1, and SDVBf.D-2 try:", "# 'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] /", "DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] *", "YDVBsl d * (SDVB sl D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0],", "'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2']", "rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) # not more than 50 daysDVup = max(1,", "logging import math from datetime import date, datetime from numpy.core.numeric import NaN from", "'Modified Daily Volume calculation' self.dependents = ['DV'] # this column we assume exists", "(prior is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']:", "df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0],", "print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol Basic slow", "1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2' if", "- df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: # Case", "'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0],", "df.at[prior[0], 'SDVCf1']: # Case 2: DVD < SDVCf1 D-1 # SDVCm D =", "Figure, \"DVFxx\" if not prior is None: # 'DVFDf' ??? # df.at[idx[0], 'DVFDf']", "at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol Basic fast \"SDVB bf D\": if (prior", "- YDVB m d * (SDVBm D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm'] =", "= DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0],", "Index, top_up: bool, stage: int): ''' Implementation per Gunther's 210209 Calc Daily Vol", "# compute starting SlowVols figures by using average of 1st 5 days Volume", "at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV']", "at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: # Case 2: DVD < SDVCf2", "- at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily", "try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError as exc: logging.error(f'_2StVols_SlowDailyVols wrap_up", "# print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol Basic", "df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: # Case 2: DVD < SDVBf D-1 #", "df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol Compare slow \"SDVCsl", "# SDVCm D = SDVCf1 D-1 - YDVC f1 d * (SDVCf1 D-1", "f d * (SDVBf D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf']", "DV= {df.at[idx[0], 'DV']}\") if (prior is None): # first row df.at[idx[0], 'DaysDVup'] =", "= ['DV'] # this column we assume exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm',", "Vol Compare fast1 \"SDVCf1 D\": if (prior is None): # first row pass", "df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: # Case 1: DV", "m d * (SDVBm D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm']", "fast \"SDVB bf D\": if (prior is None): # first row pass elif", "asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol", "YDVC sl d * (SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0],", "u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] -", "'DV'] > df.at[prior[0], 'SDVCf2']: # Case 1: DVD > SDVCf2 D-1 # SDVCf2", "D = SDVBm D-1 - YDVB m d * (SDVBm D-1 - DVD)eSDVBm", "than 50 daysDVup = max(1, daysDVup) # not less than 1 df.at[idx[0], 'DaysDVup']", "< SDVCsl D-1 # SDVCsl D = SDVCsl D-1 - YDVC sl d", "+ YDVCf1 u * ( DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] =", "# Case 2: DVD < SDVCf1 D-1 # SDVCm D = SDVCf1 D-1", "= df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] #", "day by day operations ''' def wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage): assert", "Vol 1. Make Slow Daily Vols: Calculates the 'computeds' of single (daily) row", "- YDVC m d * (SDVCm D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm'] =", "(prior is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']:", "+ at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0],", "Slow Daily Vol Compare fast1 \"SDVCf2 D\": if (prior is None): # first", "SDVCm D = SDVCf1 D-1 - YDVC f1 d * (SDVCf1 D-1 -", "> SDVBf D-1 # SDVBf D = SDVBf D-1 + YDVBf u *", "# Case 2: DVD < SDVCm D-1 # SDVCm D = SDVCm D-1", "2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1' if", "performed AFTER day by day operations ''' def wrap_up(self, df, share_num, calc_dates_in_df, top_up,", "SDVBm D-1 + YDVBm u * ( DVD - SDVBm D-1)eSDVBm u df.at[idx[0],", "df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) **", "till this row, write the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0],", "'DV'] > df.at[prior[0], 'SDVBm']: # Case 1: DVD > SDVBm D-1 # SDVBm", "u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] -", "of single (daily) row of the df ''' assert stage == 2, f'{self.name}", "1: DVD > SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 + YDVCf2", "# assign into Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2',", "single (daily) row of the df ''' assert stage == 2, f'{self.name} calculation", "starting SlowVols figures by using average of 1st 5 days Volume DV_avg =", "(prior is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']:", "# Case 1: DV D > SDVBsl D-1 # we're not on the", "u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] -", "between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing:", "u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] -", "is None: # 'DVFDf' ??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0],", "stage == 2, f'{self.name} wrap_up calculation should only run at stage 2' #", "/ df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] =", "'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting']", "> SDVBsl D-1 # we're not on the very first row # \"DaysDVupD\"", "# 1g) Slow Daily Vol Compare fast1 \"SDVCf2 D\": if (prior is None):", "the very first row # \"DaysDVupD\" is the number of days in a", "= df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) **", "SDVCm D = SDVCm D-1 - YDVC m d * (SDVCm D-1 -", "write the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) # not", "#print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this row,", "SDVCf1 D-1 + YDVCf1 u * ( DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0],", "'DaysDVup'] = daysDVup # SDVB sl D = SDVBsl D-1 + YDVBsl u", "__init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume calculation' self.dependents = ['DV'] # this", "first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: # Case 1: DVD", "DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0],", "first row df.at[idx[0], 'DaysDVup'] = 0 # compute starting SlowVols figures by using", "df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup = 1 df.at[idx[0], 'DaysDVup'] = daysDVup #", "** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol Compare slow \"SDVCsl D\": if (prior", "import math from datetime import date, datetime from numpy.core.numeric import NaN from pandas.core.indexes.base", "* ( DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] +", "df.at[prior[0], 'SDVCf2']: # Case 2: DVD < SDVCf2 D-1 # SDVCf2 D =", "d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV'])", "# SDVCsl D = SDVCsl D-1 - YDVC sl d * (SDVCsl D-1", "= between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if", "'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif", "DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: # Case 1:", "'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: # Case 2: DVD", "slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior is None): # first row", "'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds = [] def day_calculate(self, df: pd.DataFrame,", "df.at[prior[0], 'SDVBf']: # Case 1: DVD > SDVBf D-1 # SDVBf D =", "'DV'] <= df.at[prior[0], 'SDVBsl']: # Case 2: DVD < SDVBsl D-1 # SDVBsl", "df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] =", "increasing till this row, write the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup =", "is the number of days in a row the Slow Daily Vol Basic", "- YDVBsl d * (SDVB sl D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] =", "df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] '''", "from bsbetl import ov_helpers import logging import math from datetime import date, datetime", "DVD - SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] *", "= DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] =", "'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting']", "DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg", "- DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0],", "df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0],", "Case 1: DV D > SDVBsl D-1 # we're not on the very", "<= df.at[prior[0], 'SDVCf2']: # Case 2: DVD < SDVCf2 D-1 # SDVCf2 D", "> df.at[prior[0], 'SDVBm']: # Case 1: DVD > SDVBm D-1 # SDVBm D", "sl D = SDVBsl D-1 + YDVBsl u / DaysDVupD * ( DVD", "SDVCsl D-1 + YDVCsl u * ( DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0],", "'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow", "assumed daily since stage 2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate:", "not on the very first row # \"DaysDVupD\" is the number of days", "calculation' self.dependents = ['DV'] # this column we assume exists self.at_computeds = ['DaysDVup',", "df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: # Case 2:", "DVD > SDVCm D-1 # SDVCm D = SDVCm D-1 + YDVCm u", "= DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: # Case 1: DV D", "** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol Basic medium \"SDVB m D\": if", "( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0],", "rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this row, write", "DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0],", "\"SDVCf1 D\": if (prior is None): # first row pass elif df.at[idx[0], 'DV']", "= df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm']", "= SDVBf D-1 + YDVBf u * ( DVD - SDVBf D-1)eSDVBf u", "at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: # Case 2: DVD < SDVBm", "ShW, we need figures to show a volumes constellation, the Daily Vols Figure,", "- df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: # Case", "Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg", "pandas as pd from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers", "SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0],", "Make Slow Daily Vols: Calculates the 'computeds' of single (daily) row of the", "SDVB sl D = SDVBsl D-1 + YDVBsl u / DaysDVupD * (", "'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: #", "* (SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting']", "'SDVCm']: # Case 1: DVD > SDVCm D-1 # SDVCm D = SDVCm", "< SDVBm D-1 # SDVBm D = SDVBm D-1 - YDVB m d", "'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the old ShW, we need", "( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the old ShW,", "this column we assume exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm',", "= SDVCm D-1 + YDVCm u * ( DVD - SDVCm D-1)eSDVCm u", "'DV'] <= df.at[prior[0], 'SDVBm']: # Case 2: DVD < SDVBm D-1 # SDVBm", "# 'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] /", "'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional calcs performed", "import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition", "first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: # Case 1: DVD", "Vols Figure, \"DVFxx\" if not prior is None: # 'DVFDf' ??? # df.at[idx[0],", "'DVFsl' ] self.ov_computeds = [] def day_calculate(self, df: pd.DataFrame, share_num: str, idx: Index,", "Daily Vol Basic slow \"SDVBsl D\" increased. up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF", "# 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional calcs", "days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] =", "* ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol Basic", "DVD < SDVBsl D-1 # SDVBsl D = SDVBsl D-1 - YDVBsl d", "# not more than 50 daysDVup = max(1, daysDVup) # not less than", "Daily Vol Compare medium \"SDVCm D\": if (prior is None): # first row", "- df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: # Case", "D = SDVCm D-1 + YDVCm u * ( DVD - SDVCm D-1)eSDVCm", "DVD > SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 + YDVCf1 u", "u * ( DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2']", "Slow Daily Vol Basic slow \"SDVBsl D\" increased. up_till = between_dates_condition(df, df.index[0], prior[0])", "= SDVCsl D-1 - YDVC sl d * (SDVCsl D-1 - DVD)eSDVCsl d", "as pd from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import", "= df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] =", "up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\")", "'DV'] <= df.at[prior[0], 'SDVCm']: # Case 2: DVD < SDVCm D-1 # SDVCm", "SDVCm D-1 - YDVC m d * (SDVCm D-1 - DVD)eSDVCm d df.at[idx[0],", "* ( DVD - SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] +", "* ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol Compare", "of 1st 5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] =", "# Case 1: DVD > SDVCf1 D-1 # SDVCm D = SDVCf1 D-1", "Slow Daily Vol Basic medium \"SDVB m D\": if (prior is None): #", "df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg", "df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV'] >", "'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: # Case 2: DVD", "# Case 2: DVD < SDVCsl D-1 # SDVCsl D = SDVCsl D-1", "DVD < SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 - YDVC f1", "df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: # Case 1: DVD > SDVCm D-1 #", "'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl']", "= df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1]", "Case 2: DVD < SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 -", "daysDVup = 1 df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB sl D = SDVBsl", "df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional calcs performed AFTER day by day", "self.dependents = ['DV'] # this column we assume exists self.at_computeds = ['DaysDVup', 'SDVBsl',", "<= df.at[prior[0], 'SDVBm']: # Case 2: DVD < SDVBm D-1 # SDVBm D", "'DV'] > df.at[prior[0], 'SDVCm']: # Case 1: DVD > SDVCm D-1 # SDVCm", "f1 d * (SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1']", "> df.at[prior[0], 'SDVBf']: # Case 1: DVD > SDVBf D-1 # SDVBf D", "(daily) row of the df ''' assert stage == 2, f'{self.name} calculation should", "compute starting SlowVols figures by using average of 1st 5 days Volume DV_avg", "d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV'])", "# 1d) Slow Daily Vol Compare slow \"SDVCsl D\": if (prior is None):", "'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2'])", "int): ''' Implementation per Gunther's 210209 Calc Daily Vol Initial Stage.odt Daily Vol", "'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds = [] def day_calculate(self, df:", "Slow Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior is", "- SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) *", "2: DVD < SDVBsl D-1 # SDVBsl D = SDVBsl D-1 - YDVBsl", "'DaysDVup'] = daysDVup else: daysDVup = 1 df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB", "( DVD - SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting']", "D-1 # SDVCf2 D = SDVCf2 D-1 + YDVCf2 u * ( DVD", "(prior is None): # first row df.at[idx[0], 'DaysDVup'] = 0 # compute starting", "2: DVD < SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 - YDVC", "= min(up_tillDF.shape[0], 50) # not more than 50 daysDVup = max(1, daysDVup) #", "assume exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf',", "'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds = [] def day_calculate(self,", "2: DVD < SDVBm D-1 # SDVBm D = SDVBm D-1 - YDVB", "1: DVD > SDVCm D-1 # SDVCm D = SDVCm D-1 + YDVCm", "Daily Vols Figure, \"DVFxx\" if not prior is None: # 'DVFDf' ??? #", "df.at[idx[0], 'SDVBsl'] ''' additional calcs performed AFTER day by day operations ''' def", "'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif", "# Case 1: DVD > SDVBm D-1 # SDVBm D = SDVBm D-1", "* ( DVD - SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] +", "'SDVCm']: # Case 2: DVD < SDVCm D-1 # SDVCm D = SDVCm", "m d * (SDVCm D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm']", "D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV']", "by day operations ''' def wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage): assert stage", "'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: # Case 2: DVD", "SDVBsl D-1 # SDVBsl D = SDVBsl D-1 - YDVBsl d * (SDVB", "'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting']", "[] def day_calculate(self, df: pd.DataFrame, share_num: str, idx: Index, prior: Index, top_up: bool,", "> SDVBm D-1 # SDVBm D = SDVBm D-1 + YDVBm u *", "#print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior is None): # first row df.at[idx[0], 'DaysDVup']", "df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol Basic medium \"SDVB", "= df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] #", "'DV']}\") if (prior is None): # first row df.at[idx[0], 'DaysDVup'] = 0 #", "row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: # Case 1: DVD >", "( DVD - SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting']", "DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0],", "self.ov_computeds = [] def day_calculate(self, df: pd.DataFrame, share_num: str, idx: Index, prior: Index,", "= SDVBf D-1 - YDVB f d * (SDVBf D-1 - DVD)eSDVBf d", "at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol", "Stage.odt Daily Vol 1. Make Slow Daily Vols: Calculates the 'computeds' of single", "YDVBsl u / DaysDVupD * ( DVD - SDVB sl D-1)eSDVBsl u df.at[idx[0],", "(SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] *", "if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf']", "row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: # Case 1: DVD >", "'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf'])", "'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: # Case 2: DVD", "'SDVBm']: # Case 2: DVD < SDVBm D-1 # SDVBm D = SDVBm", "''' additional calcs performed AFTER day by day operations ''' def wrap_up(self, df,", "math from datetime import date, datetime from numpy.core.numeric import NaN from pandas.core.indexes.base import", "elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: # Case 2: DVD < SDVBsl D-1", "'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As", "prior is None: # 'DVFDf' ??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] /", "df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] =", "< SDVBf D-1 # SDVBf D = SDVBf D-1 - YDVB f d", "daysDVup = max(1, daysDVup) # not less than 1 df.at[idx[0], 'DaysDVup'] = daysDVup", "Case 2: DVD < SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 -", "and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError as exc:", "** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: # Case 2: DVD <", "elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: # Case 1: DVD > SDVCsl D-1", "DVD > SDVCsl D-1 # SDVCsl D = SDVCsl D-1 + YDVCsl u", "D > SDVBsl D-1 # we're not on the very first row #", "# Case 2: DVD < SDVBf D-1 # SDVBf D = SDVBf D-1", "Vols: Calculates the 'computeds' of single (daily) row of the df ''' assert", "df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) **", "D-1 - YDVC sl d * (SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl']", "Daily Vol Basic fast \"SDVB bf D\": if (prior is None): # first", "'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl'])", "D-1 # SDVCsl D = SDVCsl D-1 - YDVC sl d * (SDVCsl", "pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: # Case 1: DVD > SDVCf1", "'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol Basic medium \"SDVB m D\":", "Case 2: DVD < SDVCm D-1 # SDVCm D = SDVCm D-1 -", "at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol", "row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: # Case 1: DVD >", "D = SDVCf2 D-1 - YDVC f2 d * (SDVCf2 D-1 - DVD)eSDVCf2", "'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal >= 2:", "df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional calcs performed AFTER", "SDVBsl D-1 # we're not on the very first row # \"DaysDVupD\" is", "'SDVBsl']: # Case 1: DV D > SDVBsl D-1 # we're not on", "df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: # Case 1: DVD > SDVCf1 D-1 #", "at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol", "'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol Compare slow \"SDVCsl D\":", "DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: # Case 1: DV D >", "D = SDVBf D-1 + YDVBf u * ( DVD - SDVBf D-1)eSDVBf", "+ YDVCf2 u * ( DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] =", "at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV']", "at stage 2' # assign into Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1',", "D = SDVCf1 D-1 + YDVCf1 u * ( DVD - SDVCf1 D-1)eSDVCf1", "run at stage 2' # df is assumed daily since stage 2 is", "SDVBf D-1 + YDVBf u * ( DVD - SDVBf D-1)eSDVBf u df.at[idx[0],", "( DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting']", "1: DVD > SDVBf D-1 # SDVBf D = SDVBf D-1 + YDVBf", "df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting']", "D-1 + YDVCf2 u * ( DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2']", "SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0],", "* ( DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] +", "is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: #", "SDVBf D-1 # SDVBf D = SDVBf D-1 + YDVBf u * (", "YDVC m d * (SDVCm D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0],", "df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV=", "calculation should only run at stage 2' # df is assumed daily since", "D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * (", "(prior is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']:", "'SDVCf2']: # Case 2: DVD < SDVCf2 D-1 # SDVCf2 D = SDVCf2", "\"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior is None): # first row df.at[idx[0],", "df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0],", "* ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol Basic", "'SDVCf2']: # Case 1: DVD > SDVCf2 D-1 # SDVCf2 D = SDVCf2", "'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: #", "= SDVBm D-1 + YDVBm u * ( DVD - SDVBm D-1)eSDVBm u", "0 # compute starting SlowVols figures by using average of 1st 5 days", "{prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this row, write the count in", "Vol Basic medium \"SDVB m D\": if (prior is None): # first row", "+ at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0],", "D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV']", "if (prior is None): # first row df.at[idx[0], 'DaysDVup'] = 0 # compute", "f'{self.name} wrap_up calculation should only run at stage 2' # assign into Ov", "volumes constellation, the Daily Vols Figure, \"DVFxx\" if not prior is None: #", "SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0],", "the Daily Vols Figure, \"DVFxx\" if not prior is None: # 'DVFDf' ???", "+ at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0],", "= SDVCsl D-1 + YDVCsl u * ( DVD - SDVCsl D-1)eSDVCsl u", "DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0],", "# Case 2: DVD < SDVBm D-1 # SDVBm D = SDVBm D-1", "df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2']", "/ DaysDVupD * ( DVD - SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] =", "D-1 # SDVCf2 D = SDVCf2 D-1 - YDVC f2 d * (SDVCf2", "pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: # Case 1: DVD > SDVBf", "of the df ''' assert stage == 2, f'{self.name} calculation should only run", "D-1 - YDVC f2 d * (SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2']", "at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the", "figures to show a volumes constellation, the Daily Vols Figure, \"DVFxx\" if not", "'SDVBf'] # 'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV']", "additional calcs performed AFTER day by day operations ''' def wrap_up(self, df, share_num,", "D-1 + YDVCsl u * ( DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl']", "'SDVCf1']: # Case 1: DVD > SDVCf1 D-1 # SDVCm D = SDVCf1", "+ at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0],", "df.at[prior[0], 'SDVBsl']: # Case 1: DV D > SDVBsl D-1 # we're not", "u * ( DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl']", "( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol Compare slow", "days in a row the Slow Daily Vol Basic slow \"SDVBsl D\" increased.", "row, write the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) #", "at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: # Case 2: DVD < SDVCsl", "Slow Daily Vol Compare fast1 \"SDVCf1 D\": if (prior is None): # first", "Compare fast1 \"SDVCf1 D\": if (prior is None): # first row pass elif", "1g) Slow Daily Vol Compare fast1 \"SDVCf2 D\": if (prior is None): #", "D-1 # SDVCm D = SDVCf1 D-1 + YDVCf1 u * ( DVD", "'computeds' of single (daily) row of the df ''' assert stage == 2,", "D-1 # SDVBf D = SDVBf D-1 + YDVBf u * ( DVD", "D-1 + YDVBf u * ( DVD - SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf']", "else: daysDVup = 1 df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB sl D =", "SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 - YDVC f2 d *", "# 1e) Slow Daily Vol Compare medium \"SDVCm D\": if (prior is None):", "stage 2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a)", "- SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * (", "** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol Compare medium \"SDVCm D\": if (prior", "need figures to show a volumes constellation, the Daily Vols Figure, \"DVFxx\" if", "assert stage == 2, f'{self.name} wrap_up calculation should only run at stage 2'", "df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e)", "( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0],", "u * ( DVD - SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf']", "df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: #", "'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds =", "D = SDVBsl D-1 - YDVBsl d * (SDVB sl D-1 - DVD)eSDVBsl", "been increasing till this row, write the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup", "/ df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] =", "slow \"SDVBsl D\" increased. up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF", "show a volumes constellation, the Daily Vols Figure, \"DVFxx\" if not prior is", "* ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol Compare", "# first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: # Case 1:", "at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCm']) ** at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV']", "D-1 + YDVBm u * ( DVD - SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm']", "= SDVCf2 D-1 - YDVC f2 d * (SDVCf2 D-1 - DVD)eSDVCf2 d", "is None): # first row df.at[idx[0], 'DaysDVup'] = 0 # compute starting SlowVols", "+ YDVBsl u / DaysDVupD * ( DVD - SDVB sl D-1)eSDVBsl u", "is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: #", "share_num, calc_dates_in_df, top_up, stage): assert stage == 2, f'{self.name} wrap_up calculation should only", "from pandas.core.indexes.base import Index import pandas as pd from bsbetl.alltable_calcs import Calculation from", "this row, write the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50)", "D-1 - YDVBsl d * (SDVB sl D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl']", "# first row df.at[idx[0], 'DaysDVup'] = 0 # compute starting SlowVols figures by", "df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting']", "df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: # Case 1: DVD > SDVCf2 D-1 #", "per Gunther's 210209 Calc Daily Vol Initial Stage.odt Daily Vol 1. Make Slow", "# df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal", "using average of 1st 5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg", "at stage 2' # df is assumed daily since stage 2 is asserted", "bf D\": if (prior is None): # first row pass elif df.at[idx[0], 'DV']", "YDVBf u * ( DVD - SDVBf D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0],", "calcs performed AFTER day by day operations ''' def wrap_up(self, df, share_num, calc_dates_in_df,", "210209 Calc Daily Vol Initial Stage.odt Daily Vol 1. Make Slow Daily Vols:", "D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV']", "'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional calcs performed AFTER day by day operations", "** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: # Case 2: DVD <", "'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: # Case 2: DVD", "* ( DVD - SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl']", "Case 1: DVD > SDVBf D-1 # SDVBf D = SDVBf D-1 +", "import logging import math from datetime import date, datetime from numpy.core.numeric import NaN", "assert stage == 2, f'{self.name} calculation should only run at stage 2' #", "Daily Volume calculation' self.dependents = ['DV'] # this column we assume exists self.at_computeds", "'DV'] <= df.at[prior[0], 'SDVBf']: # Case 2: DVD < SDVBf D-1 # SDVBf", "1 df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB sl D = SDVBsl D-1 +", "( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol Compare medium", "Compare medium \"SDVCm D\": if (prior is None): # first row pass elif", "- SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * (", "'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow", "df.at[prior[0], 'SDVBsl']: # Case 2: DVD < SDVBsl D-1 # SDVBsl D =", "- DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0],", "def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume calculation' self.dependents = ['DV'] #", "stage == 2, f'{self.name} calculation should only run at stage 2' # df", "daily since stage 2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}')", "d * (SDVBm D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] -", "'SDVCsl'] + at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif", "# SDVCm D = SDVCm D-1 - YDVC m d * (SDVCm D-1", "<= df.at[prior[0], 'SDVCf1']: # Case 2: DVD < SDVCf1 D-1 # SDVCm D", "# SDVBf D = SDVBf D-1 + YDVBf u * ( DVD -", "= df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl']", "is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: #", "+ YDVBm u * ( DVD - SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] =", "2: DVD < SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 - YDVC", "at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols')", "def wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage): assert stage == 2, f'{self.name} wrap_up", "df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: # Case 2: DVD < SDVCsl D-1 #", "df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this", "+ YDVCm u * ( DVD - SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] =", "'SDVBf'] # 'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1']", "Case 1: DVD > SDVCsl D-1 # SDVCsl D = SDVCsl D-1 +", "+ YDVCsl u * ( DVD - SDVCsl D-1)eSDVCsl u df.at[idx[0], 'SDVCsl'] =", "sl d * (SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl']", "D = SDVCsl D-1 + YDVCsl u * ( DVD - SDVCsl D-1)eSDVCsl", "daysDVup) # not less than 1 df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup =", "+ at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0],", "import pandas as pd from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from", "f'{self.name} calculation should only run at stage 2' # df is assumed daily", "None): # first row df.at[idx[0], 'DaysDVup'] = 0 # compute starting SlowVols figures", "D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * (", "* ( DVD - SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] +", "Case 2: DVD < SDVCsl D-1 # SDVCsl D = SDVCsl D-1 -", "SDVBsl D-1 - YDVBsl d * (SDVB sl D-1 - DVD)eSDVBsl d df.at[idx[0],", "( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol Compare fast1", "self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2',", "df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) **", "first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: # Case 1: DVD", "'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in the old ShW, we need figures", "D-1 # we're not on the very first row # \"DaysDVupD\" is the", "'SDVCf1']: # Case 2: DVD < SDVCf1 D-1 # SDVCm D = SDVCf1", "df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: # Case 1: DVD > SDVBm D-1 #", "DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0],", "elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: # Case 1: DVD > SDVBf D-1", "'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol Compare fast1 \"SDVCf1 D\":", "Calc Daily Vol Initial Stage.odt Daily Vol 1. Make Slow Daily Vols: Calculates", "'DV'] > df.at[prior[0], 'SDVBsl']: # Case 1: DV D > SDVBsl D-1 #", "'SDVBf'] # 'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2']", "df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0],", "df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError as exc: logging.error(f'_2StVols_SlowDailyVols wrap_up exception {exc}') return", "/ df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl']", "'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1'])", "(SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] *", "['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl'", "# \"DaysDVupD\" is the number of days in a row the Slow Daily", "'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0],", "Vol Compare medium \"SDVCm D\": if (prior is None): # first row pass", "# Case 1: DVD > SDVCsl D-1 # SDVCsl D = SDVCsl D-1", "- DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0],", "1: DVD > SDVCsl D-1 # SDVCsl D = SDVCsl D-1 + YDVCsl", "Case 1: DVD > SDVCm D-1 # SDVCm D = SDVCm D-1 +", "SDVCm D-1 + YDVCm u * ( DVD - SDVCm D-1)eSDVCm u df.at[idx[0],", "elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: # Case 1: DVD > SDVCf1 D-1", "to show a volumes constellation, the Daily Vols Figure, \"DVFxx\" if not prior", "Basic fast \"SDVB bf D\": if (prior is None): # first row pass", "** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: # Case 2: DVD <", "_3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm']", "pd from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import between_dates_condition,", "from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation):", "at_calc_params['atp_eSDVCmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: # Case 2: DVD < SDVCm", "more than 50 daysDVup = max(1, daysDVup) # not less than 1 df.at[idx[0],", "> SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 + YDVCf2 u *", "''' def wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage): assert stage == 2, f'{self.name}", "df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol Basic fast \"SDVB", "'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: #", "SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError as", "* ( DVD - SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] +", "> SDVCm D-1 # SDVCm D = SDVCm D-1 + YDVCm u *", "operations ''' def wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage): assert stage == 2,", "daysDVup # SDVB sl D = SDVBsl D-1 + YDVBsl u / DaysDVupD", "in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) # not more than 50", "class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume calculation' self.dependents =", "- at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h) As in", "DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl']", "D = SDVCsl D-1 - YDVC sl d * (SDVCsl D-1 - DVD)eSDVCsl", "\"SDVCm D\": if (prior is None): # first row pass elif df.at[idx[0], 'DV']", "df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: # Case 2:", "at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: # Case 2: DVD < SDVBf", "d * (SDVBf D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] -", "df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0], 'SDVCf2']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf2d']['setting'] # 1h)", "* (SDVBm D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting']", "u * ( DVD - SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm']", "# Case 1: DVD > SDVBf D-1 # SDVBf D = SDVBf D-1", "'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: #", "should only run at stage 2' # df is assumed daily since stage", "= df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] #", "Case 2: DVD < SDVBf D-1 # SDVBf D = SDVBf D-1 -", "* ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol Compare", "D = SDVCf2 D-1 + YDVCf2 u * ( DVD - SDVCf2 D-1)eSDVCf2", "count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) # not more than", "> SDVCsl D-1 # SDVCsl D = SDVCsl D-1 + YDVCsl u *", "= 'Modified Daily Volume calculation' self.dependents = ['DV'] # this column we assume", "df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal >=", "import date, datetime from numpy.core.numeric import NaN from pandas.core.indexes.base import Index import pandas", "Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError", "# Case 1: DVD > SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1", "50 daysDVup = max(1, daysDVup) # not less than 1 df.at[idx[0], 'DaysDVup'] =", "df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0],", "is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: #", "the Slow Daily Vol Basic slow \"SDVBsl D\" increased. up_till = between_dates_condition(df, df.index[0],", "'SDVBsl'] ''' additional calcs performed AFTER day by day operations ''' def wrap_up(self,", "at_calc_params['atp_eSDVBfd']['setting'] # 1d) Slow Daily Vol Compare slow \"SDVCsl D\": if (prior is", "= df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3]", "= DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] =", "SDVBsl D = SDVBsl D-1 - YDVBsl d * (SDVB sl D-1 -", "= df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) **", "datetime import date, datetime from numpy.core.numeric import NaN from pandas.core.indexes.base import Index import", "df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']:", "SlowVols figures by using average of 1st 5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0)", "1: DVD > SDVBm D-1 # SDVBm D = SDVBm D-1 + YDVBm", "df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol Compare fast1 \"SDVCf2", "numpy.core.numeric import NaN from pandas.core.indexes.base import Index import pandas as pd from bsbetl.alltable_calcs", "D-1 # SDVBm D = SDVBm D-1 + YDVBm u * ( DVD", "'DVFm', 'DVFsl' ] self.ov_computeds = [] def day_calculate(self, df: pd.DataFrame, share_num: str, idx:", "SDVBf D-1 # SDVBf D = SDVBf D-1 - YDVB f d *", "'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional calcs performed AFTER day", "= 0 # compute starting SlowVols figures by using average of 1st 5", "\"DaysDVupD\" is the number of days in a row the Slow Daily Vol", "DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) # not more than 50 daysDVup", "YDVB f d * (SDVBf D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0],", "pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: # Case 1: DVD > SDVCsl", "- DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0],", "D-1)eSDVBf u df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV']", "Vol Initial Stage.odt Daily Vol 1. Make Slow Daily Vols: Calculates the 'computeds'", "D-1 # SDVCm D = SDVCf1 D-1 - YDVC f1 d * (SDVCf1", "- df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: # Case", "self.description = 'Modified Daily Volume calculation' self.dependents = ['DV'] # this column we", "day operations ''' def wrap_up(self, df, share_num, calc_dates_in_df, top_up, stage): assert stage ==", "Daily Vol Compare fast1 \"SDVCf2 D\": if (prior is None): # first row", "1a) Slow Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior", "DVD - SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] *", "first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: # Case 1: DVD", "pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: # Case 1: DVD > SDVCf2", "from numpy.core.numeric import NaN from pandas.core.indexes.base import Index import pandas as pd from", "df.at[idx[0], 'SDVCf1'] / df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] /", "Basic slow \"SDVBsl D\" increased. up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till]", "DVD < SDVCm D-1 # SDVCm D = SDVCm D-1 - YDVC m", "<= df.at[prior[0], 'SDVBsl']: # Case 2: DVD < SDVBsl D-1 # SDVBsl D", "'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting']", "if not prior is None: # 'DVFDf' ??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0],", "(prior is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']:", "# SDVCf2 D = SDVCf2 D-1 + YDVCf2 u * ( DVD -", "we're not on the very first row # \"DaysDVupD\" is the number of", "SDVBm D = SDVBm D-1 - YDVB m d * (SDVBm D-1 -", "curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] #", "# SDVCm D = SDVCf1 D-1 + YDVCf1 u * ( DVD -", "- YDVC sl d * (SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] =", "of days in a row the Slow Daily Vol Basic slow \"SDVBsl D\"", "D-1 - YDVC m d * (SDVCm D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm']", "df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0],", "D-1 # SDVBsl D = SDVBsl D-1 - YDVBsl d * (SDVB sl", "(SDVBf D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] *", "'SDVBm']: # Case 1: DVD > SDVBm D-1 # SDVBm D = SDVBm", "D-1 - YDVB f d * (SDVBf D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf']", "# 'DVFDf' ??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] #", "since stage 2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') #", "DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV']", "1f) Slow Daily Vol Compare fast1 \"SDVCf1 D\": if (prior is None): #", "'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional", "SDVBf D = SDVBf D-1 + YDVBf u * ( DVD - SDVBf", "d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV'])", "#print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) # not more than 50 daysDVup =", "elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: # Case 2: DVD < SDVBm D-1", "(prior is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']:", "# SDVBm D = SDVBm D-1 - YDVB m d * (SDVBm D-1", "at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol", "df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBf']: # Case 2:", "elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: # Case 2: DVD < SDVCm D-1", "# 1b) Slow Daily Vol Basic medium \"SDVB m D\": if (prior is", "'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ]", "** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: # Case 2: DVD <", "fast1 \"SDVCf1 D\": if (prior is None): # first row pass elif df.at[idx[0],", "df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']:", "d * (SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] -", "'DVFDf' ??? # df.at[idx[0], 'DVFDf'] = df.at[idx[0], 'DV'] / df.at[idx[0], 'SDVBf'] # 'DVFf3'", "'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal >= 3:", "# 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0],", "SDVBm D-1 - YDVB m d * (SDVBm D-1 - DVD)eSDVBm d df.at[idx[0],", "** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol Basic fast \"SDVB bf D\": if", "'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow Daily Vol Basic fast \"SDVB bf", "'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1']", "bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description =", "''' Implementation per Gunther's 210209 Calc Daily Vol Initial Stage.odt Daily Vol 1.", "df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f) Slow Daily Vol Compare fast1 \"SDVCf1", "df.at[_3_back, 'SDVBf'] # 'DVFm' df.at[idx[0], 'DVFm'] = df.at[idx[0], 'SDVCm'] / df.at[idx[0], 'SDVBm'] #", "stage): assert stage == 2, f'{self.name} wrap_up calculation should only run at stage", "# Case 2: DVD < SDVBsl D-1 # SDVBsl D = SDVBsl D-1", "= df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBfd']['setting'] #", "df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: # Case 2: DVD < SDVCf2 D-1 #", "< SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 - YDVC f2 d", "Slow Daily Vols: Calculates the 'computeds' of single (daily) row of the df", "1 df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup = 1 df.at[idx[0], 'DaysDVup'] = daysDVup", "df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0],", "Daily Vol Initial Stage.odt Daily Vol 1. Make Slow Daily Vols: Calculates the", "'DV'] > df.at[prior[0], 'SDVCf1']: # Case 1: DVD > SDVCf1 D-1 # SDVCm", "df.at[prior[0], 'SDVCf1']: # Case 1: DVD > SDVCf1 D-1 # SDVCm D =", "Volume calculation' self.dependents = ['DV'] # this column we assume exists self.at_computeds =", "df.at[idx[0], 'DaysDVup'] = daysDVup # SDVB sl D = SDVBsl D-1 + YDVBsl", "SDVCm D = SDVCf1 D-1 + YDVCf1 u * ( DVD - SDVCf1", "{df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been increasing till this row, write the", "2 is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow", "row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: # Case 1: DVD >", "{df.at[idx[0], 'DV']}\") if (prior is None): # first row df.at[idx[0], 'DaysDVup'] = 0", "1e) Slow Daily Vol Compare medium \"SDVCm D\": if (prior is None): #", "'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back,", "stage 2' # assign into Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf'])", "2' # df is assumed daily since stage 2 is asserted # print(f'prior_idx={prior},idx={idx}')", "* (SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting']", "df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']:", "at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV']", "NaN from pandas.core.indexes.base import Index import pandas as pd from bsbetl.alltable_calcs import Calculation", "df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) **", "None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: # Case", "SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 + YDVCf1 u * (", "Basic medium \"SDVB m D\": if (prior is None): # first row pass", "YDVCf2 u * ( DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0], 'SDVCf2'] = df.at[prior[0],", "stage: int): ''' Implementation per Gunther's 210209 Calc Daily Vol Initial Stage.odt Daily", "1b) Slow Daily Vol Basic medium \"SDVB m D\": if (prior is None):", "\"DVFxx\" if not prior is None: # 'DVFDf' ??? # df.at[idx[0], 'DVFDf'] =", "SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except IndexError as exc: logging.error(f'_2StVols_SlowDailyVols", "DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf'] = DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg", "at_calc_params['atp_YDVCslu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV']", "stage 2' # df is assumed daily since stage 2 is asserted #", "import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified", "df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: # Case 2:", "at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol Compare medium \"SDVCm D\": if (prior is", "None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: # Case", "pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: # Case 1: DVD > SDVCm", "u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] -", "* ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol Compare", "elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']: # Case 2: DVD < SDVCf2 D-1", "None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: # Case", "top_up, stage): assert stage == 2, f'{self.name} wrap_up calculation should only run at", "* (SDVCm D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting']", "'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting']", "df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0],", "#print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0],", "not more than 50 daysDVup = max(1, daysDVup) # not less than 1", "None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: # Case", "= DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0],", "(SDVBm D-1 - DVD)eSDVBm d df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] *", "df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal >=", "- YDVC f2 d * (SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] =", "D-1 # SDVBf D = SDVBf D-1 - YDVB f d * (SDVBf", "# SDVBf D = SDVBf D-1 - YDVB f d * (SDVBf D-1", "YDVC f2 d * (SDVCf2 D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0],", "SDVBm D-1 # SDVBm D = SDVBm D-1 - YDVB m d *", "not less than 1 df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup = 1 df.at[idx[0],", "DVD > SDVBf D-1 # SDVBf D = SDVBf D-1 + YDVBf u", "D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0], 'DV']", "YDVC f1 d * (SDVCf1 D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0],", "SDVCsl D-1 # SDVCsl D = SDVCsl D-1 + YDVCsl u * (", "( DVD - SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0], 'SDVBm'] + at_calc_params['atp_YDVBmu']['setting']", "5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl'] = DV_avg df.at[idx[0],'SDVBm'] = DV_avg df.at[idx[0],'SDVBf']", "DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0],", "'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol Compare medium \"SDVCm D\":", "YDVCm u * ( DVD - SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0],", "* ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <=", "DVD < SDVCsl D-1 # SDVCsl D = SDVCsl D-1 - YDVC sl", "is None): # first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: #", "= df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal >= 2: _2_back=df.index[curday_ordinal-2]", "2: DVD < SDVBf D-1 # SDVBf D = SDVBf D-1 - YDVB", "df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCm']: # Case 2: DVD < SDVCm D-1 #", "SDVBm D-1 # SDVBm D = SDVBm D-1 + YDVBm u * (", "> SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 + YDVCf1 u *", "D-1 - DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * (", "assign into Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf'])", "'SDVCm'] / df.at[idx[0], 'SDVBm'] # 'DVFsl' df.at[idx[0], 'DVFsl'] = df.at[idx[0], 'SDVCsl'] / df.at[idx[0],", "'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol Compare fast1 \"SDVCf2 D\":", "Case 2: DVD < SDVBsl D-1 # SDVBsl D = SDVBsl D-1 -", "- DVD)eSDVCf1 d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0],", "# SDVCsl D = SDVCsl D-1 + YDVCsl u * ( DVD -", "first row # \"DaysDVupD\" is the number of days in a row the", "> df.at[prior[0], 'SDVCf1']: # Case 1: DVD > SDVCf1 D-1 # SDVCm D", "# first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf2']: # Case 1:", "< SDVCm D-1 # SDVCm D = SDVCm D-1 - YDVC m d", "the count in DaysDVup #print(f'up_tilDF rows={up_tillDF.shape[0]}') daysDVup = min(up_tillDF.shape[0], 50) # not more", "SDVCf2 D = SDVCf2 D-1 - YDVC f2 d * (SDVCf2 D-1 -", "D-1 + YDVBsl u / DaysDVupD * ( DVD - SDVB sl D-1)eSDVBsl", "/ df.at[idx[0], 'SDVBf'] # 'DVFf3' if curday_ordinal >= 1: _1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] =", "'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow Daily Vol Compare fast1 \"SDVCf2 D\": if", "d df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] - at_calc_params['atp_YDVBfd']['setting'] * ( df.at[prior[0], 'SDVBf']-df.at[idx[0], 'DV'])", "Case 2: DVD < SDVBm D-1 # SDVBm D = SDVBm D-1 -", "D\" increased. up_till = between_dates_condition(df, df.index[0], prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]}", "DVD - SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] *", "- SDVCf1 D-1)eSDVCf1 u df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * (", "2, f'{self.name} wrap_up calculation should only run at stage 2' # assign into", "very first row # \"DaysDVupD\" is the number of days in a row", "\"SDVB m D\": if (prior is None): # first row pass elif df.at[idx[0],", "- YDVB f d * (SDVBf D-1 - DVD)eSDVBf d df.at[idx[0], 'SDVBf'] =", "d * (SDVCm D-1 - DVD)eSDVCm d df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] -", "'SDVCf1'] + at_calc_params['atp_YDVCf1u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif", "Daily Vol Compare fast1 \"SDVCf1 D\": if (prior is None): # first row", "( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0],", "# first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCf1']: # Case 1:", "= df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) **", "df.at[_2_back, 'SDVBf'] # 'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0],", "\"SDVCsl D\": if (prior is None): # first row pass elif df.at[idx[0], 'DV']", "import NaN from pandas.core.indexes.base import Index import pandas as pd from bsbetl.alltable_calcs import", "'DV'] > df.at[prior[0], 'SDVBf']: # Case 1: DVD > SDVBf D-1 # SDVBf", "( DVD - SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] +", "= SDVCm D-1 - YDVC m d * (SDVCm D-1 - DVD)eSDVCm d", "# not less than 1 df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup = 1", "Vol Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior is None): #", "'SDVBm'] - at_calc_params['atp_YDVBmd']['setting'] * ( df.at[prior[0], 'SDVBm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBmd']['setting'] # 1c) Slow", "# Case 2: DVD < SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1", "= max(1, daysDVup) # not less than 1 df.at[idx[0], 'DaysDVup'] = daysDVup else:", "D = SDVBm D-1 + YDVBm u * ( DVD - SDVBm D-1)eSDVBm", "DVD < SDVBf D-1 # SDVBf D = SDVBf D-1 - YDVB f", "df, share_num, calc_dates_in_df, top_up, stage): assert stage == 2, f'{self.name} wrap_up calculation should", "df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting']", "= df.at[idx[0], 'SDVCsl'] / df.at[idx[0], 'SDVBsl'] ''' additional calcs performed AFTER day by", "m D\": if (prior is None): # first row pass elif df.at[idx[0], 'DV']", "elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBm']: # Case 1: DVD > SDVBm D-1", "from bsbetl.alltable_calcs import Calculation from bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df,", "* ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBf']) ** at_calc_params['atp_eSDVBfu']['setting'] elif df.at[idx[0], 'DV'] <=", "df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCsl']: # Case 1: DVD > SDVCsl D-1 #", "\"SDVB bf D\": if (prior is None): # first row pass elif df.at[idx[0],", "'SDVBf']: # Case 2: DVD < SDVBf D-1 # SDVBf D = SDVBf", "'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds", ">= 2: _2_back=df.index[curday_ordinal-2] df.at[idx[0], 'DVFf2'] = df.at[idx[0], 'SDVCf2'] / df.at[_2_back, 'SDVBf'] # 'DVFf1'", "we assume exists self.at_computeds = ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2',", "d df.at[idx[0], 'SDVCf1'] = df.at[prior[0], 'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV'])", "the df ''' assert stage == 2, f'{self.name} calculation should only run at", "SDVBf D-1 - YDVB f d * (SDVBf D-1 - DVD)eSDVBf d df.at[idx[0],", "into Ov SDVBf.D-1, and SDVBf.D-2 try: ov_helpers.global_ov_update(share_num, 'SDVBf.D-1', df.loc[df.index[-2],'SDVBf']) ov_helpers.global_ov_update(share_num, 'SDVBf.D-2', df.loc[df.index[-3],'SDVBf']) except", "- df.at[prior[0], 'SDVCsl']) ** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: # Case", "Compare fast1 \"SDVCf2 D\": if (prior is None): # first row pass elif", "bsbetl.alltable_calcs.at_params import at_calc_params from bsbetl.calc_helpers import between_dates_condition, get_row_index_from_daily_df, last_trading_row_index, single_day_condition class _2StVols_SlowDailyVols(Calculation.Calculation): def", "df.at[prior[0], 'SDVBm']: # Case 2: DVD < SDVBm D-1 # SDVBm D =", "D-1 # SDVBm D = SDVBm D-1 - YDVB m d * (SDVBm", "= SDVBm D-1 - YDVB m d * (SDVBm D-1 - DVD)eSDVBm d", "date, datetime from numpy.core.numeric import NaN from pandas.core.indexes.base import Index import pandas as", "Slow Daily Vol Compare slow \"SDVCsl D\": if (prior is None): # first", "'DV'] <= df.at[prior[0], 'SDVCf1']: # Case 2: DVD < SDVCf1 D-1 # SDVCm", "# 'DVFf1' if curday_ordinal >= 3: _3_back=df.index[curday_ordinal-3] df.at[idx[0], 'DVFf1'] = df.at[idx[0], 'SDVCf1'] /", "df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily Vol Compare medium \"SDVCm", "= ['DaysDVup', 'SDVBsl', 'SDVBm', 'SDVBf', 'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm',", "df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBm']: # Case 2: DVD < SDVBm D-1 #", "is asserted # print(f'prior_idx={prior},idx={idx}') curday_ordinal = df.index.tolist().index(idx[0]) #print(f'_2StVols_SlowDailyVols:day_calculate: curday_ordinal={curday_ordinal}') # 1a) Slow Daily", "df.at[prior[0], 'SDVCsl']: # Case 1: DVD > SDVCsl D-1 # SDVCsl D =", "> df.at[prior[0], 'SDVCm']: # Case 1: DVD > SDVCm D-1 # SDVCm D", "SDVCf2 D-1 + YDVCf2 u * ( DVD - SDVCf2 D-1)eSDVCf2 u df.at[idx[0],", "a row the Slow Daily Vol Basic slow \"SDVBsl D\" increased. up_till =", "calc_dates_in_df, top_up, stage): assert stage == 2, f'{self.name} wrap_up calculation should only run", "Initial Stage.odt Daily Vol 1. Make Slow Daily Vols: Calculates the 'computeds' of", "df.at[prior[0], 'SDVCm'] - at_calc_params['atp_YDVCmd']['setting'] * ( df.at[prior[0], 'SDVCm']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCmd']['setting'] # 1f)", "'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: # Case 2: DVD", "df.at[prior[0], 'SDVBm']: # Case 1: DVD > SDVBm D-1 # SDVBm D =", "SDVCf2 D-1 # SDVCf2 D = SDVCf2 D-1 + YDVCf2 u * (", "prior[0]) up_tillDF = df[up_till] #print(f\"up_tillDF rows={up_tillDF.shape[0]} {df.index[0]} -> {prior[0]}\") if up_tillDF['SDVBsl'].is_monotonic_increasing: # been", "2, f'{self.name} calculation should only run at stage 2' # df is assumed", "YDVBm u * ( DVD - SDVBm D-1)eSDVBm u df.at[idx[0], 'SDVBm'] = df.at[prior[0],", "bsbetl import ov_helpers import logging import math from datetime import date, datetime from", "from datetime import date, datetime from numpy.core.numeric import NaN from pandas.core.indexes.base import Index", "at_calc_params['atp_YDVBsld']['setting'] * ( df.at[prior[0], 'SDVBsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol", "D-1 # SDVCm D = SDVCm D-1 - YDVC m d * (SDVCm", "'SDVCsl', 'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds = []", "'SDVCf2'] + at_calc_params['atp_YDVCf2u']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif", "D-1 - DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * (", "# 1h) As in the old ShW, we need figures to show a", "DV_avg df.at[idx[0],'SDVCsl'] = DV_avg df.at[idx[0],'SDVCm'] = DV_avg df.at[idx[0],'SDVCf1'] = DV_avg df.at[idx[0],'SDVCf2'] = DV_avg", "a volumes constellation, the Daily Vols Figure, \"DVFxx\" if not prior is None:", "Case 1: DVD > SDVCf1 D-1 # SDVCm D = SDVCf1 D-1 +", "# first row pass elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBf']: # Case 1:", "- df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: # Case", "'SDVCm', 'SDVCf1', 'SDVCf2', 'DVFDf', 'DVFf1', 'DVFf2', 'DVFm', 'DVFsl' ] self.ov_computeds = [] def", "figures by using average of 1st 5 days Volume DV_avg = df.iloc[:5]['ODV'].mean(0) df.at[idx[0],'SDVBsl']", "= DV_avg df.at[idx[0],'SDVCf2'] = DV_avg elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVBsl']: # Case", "Gunther's 210209 Calc Daily Vol Initial Stage.odt Daily Vol 1. Make Slow Daily", "top_up: bool, stage: int): ''' Implementation per Gunther's 210209 Calc Daily Vol Initial", "= SDVBsl D-1 - YDVBsl d * (SDVB sl D-1 - DVD)eSDVBsl d", "d * (SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] -", "fast1 \"SDVCf2 D\": if (prior is None): # first row pass elif df.at[idx[0],", "ov_helpers import logging import math from datetime import date, datetime from numpy.core.numeric import", "(SDVCsl D-1 - DVD)eSDVCsl d df.at[idx[0], 'SDVCsl'] = df.at[prior[0], 'SDVCsl'] - at_calc_params['atp_YDVCsld']['setting'] *", "elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: # Case 2: DVD < SDVCsl D-1", "# 1c) Slow Daily Vol Basic fast \"SDVB bf D\": if (prior is", "elif df.at[idx[0], 'DV'] > df.at[prior[0], 'SDVCm']: # Case 1: DVD > SDVCm D-1", "sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * ( df.at[idx[0],", "'DV'] - df.at[prior[0], 'SDVCf1']) ** at_calc_params['atp_eSDVCf1u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf1']: #", "should only run at stage 2' # assign into Ov SDVBf.D-1, and SDVBf.D-2", "Basic slow \"SDVBsl\": #print(f\"{idx[0]} DV= {df.at[idx[0], 'DV']}\") if (prior is None): # first", "( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBm']) ** at_calc_params['atp_eSDVBmu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0],", "# SDVCf2 D = SDVCf2 D-1 - YDVC f2 d * (SDVCf2 D-1", "- at_calc_params['atp_YDVCsld']['setting'] * ( df.at[prior[0], 'SDVCsl']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCsld']['setting'] # 1e) Slow Daily", "_2StVols_SlowDailyVols(Calculation.Calculation): def __init__(self): super().__init__('SlowDailyVols') self.description = 'Modified Daily Volume calculation' self.dependents = ['DV']", "# SDVBsl D = SDVBsl D-1 - YDVBsl d * (SDVB sl D-1", "D = SDVCm D-1 - YDVC m d * (SDVCm D-1 - DVD)eSDVCm", "at_calc_params['atp_eSDVBsld']['setting'] # 1b) Slow Daily Vol Basic medium \"SDVB m D\": if (prior", "SDVB sl D-1)eSDVBsl u df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] + (at_calc_params['atp_YDVBslu']['setting']/daysDVup) * (", "= SDVCf1 D-1 - YDVC f1 d * (SDVCf1 D-1 - DVD)eSDVCf1 d", "pd.DataFrame, share_num: str, idx: Index, prior: Index, top_up: bool, stage: int): ''' Implementation", "> df.at[prior[0], 'SDVCsl']: # Case 1: DVD > SDVCsl D-1 # SDVCsl D", "max(1, daysDVup) # not less than 1 df.at[idx[0], 'DaysDVup'] = daysDVup else: daysDVup", "df.at[idx[0], 'SDVBf'] = df.at[prior[0], 'SDVBf'] + at_calc_params['atp_YDVBfu']['setting'] * ( df.at[idx[0], 'DV'] - df.at[prior[0],", "( df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVBsl']) ** at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0],", "'SDVCsl']: # Case 1: DVD > SDVCsl D-1 # SDVCsl D = SDVCsl", "df.at[idx[0], 'DV'] - df.at[prior[0], 'SDVCf2']) ** at_calc_params['atp_eSDVCf2u']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCf2']:", "'DV'] > df.at[prior[0], 'SDVCsl']: # Case 1: DVD > SDVCsl D-1 # SDVCsl", "SDVCm D-1)eSDVCm u df.at[idx[0], 'SDVCm'] = df.at[prior[0], 'SDVCm'] + at_calc_params['atp_YDVCmu']['setting'] * ( df.at[idx[0],", "- DVD)eSDVCf2 d df.at[idx[0], 'SDVCf2'] = df.at[prior[0], 'SDVCf2'] - at_calc_params['atp_YDVCf2d']['setting'] * ( df.at[prior[0],", "(SDVB sl D-1 - DVD)eSDVBsl d df.at[idx[0], 'SDVBsl'] = df.at[prior[0], 'SDVBsl'] - at_calc_params['atp_YDVBsld']['setting']", "** at_calc_params['atp_eSDVCslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVCsl']: # Case 2: DVD <", "SDVCf2 D = SDVCf2 D-1 + YDVCf2 u * ( DVD - SDVCf2", "'SDVCf1'] - at_calc_params['atp_YDVCf1d']['setting'] * ( df.at[prior[0], 'SDVCf1']-df.at[idx[0], 'DV']) ** at_calc_params['atp_eSDVCf1d']['setting'] # 1g) Slow", "slow \"SDVCsl D\": if (prior is None): # first row pass elif df.at[idx[0],", "at_calc_params['atp_eSDVBslu']['setting'] elif df.at[idx[0], 'DV'] <= df.at[prior[0], 'SDVBsl']: # Case 2: DVD < SDVBsl", "== 2, f'{self.name} calculation should only run at stage 2' # df is", "_1_back=df.index[curday_ordinal-1] df.at[idx[0], 'DVFf3'] = df.at[idx[0], 'DV'] / df.at[_1_back, 'SDVBf'] # 'DVFf2' if curday_ordinal" ]
[ "12)) aspects = tuple(0.5 * x for x in range(1, 5)) n_feature_maps =", "# true for resnet50 with FPN ag_sizes = tuple(tuple(sizes[i : i + 3])", "padding=(3, 3), bias=False ) return model def mrcnn(): # Get a resnet50 fpn", "import GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform =", "with FPN ag_sizes = tuple(tuple(sizes[i : i + 3]) for i in range(n_feature_maps))", ": i + 3]) for i in range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,)", "# Get a resnet50 fpn backbone and change the first layer for grayscale", "and 5 aspect ratios sizes = tuple(2.0 ** x for x in range(5,", "resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3,", "MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import", "+ 3]) for i in range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,) anchor_generator =", "aspects = tuple(0.5 * x for x in range(1, 5)) n_feature_maps = 5", "model def mrcnn(): # Get a resnet50 fpn backbone and change the first", "padding=(3, 3), bias=False ) # Make anchor generator with 3 sizes per feature", "model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False )", "] import torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from", "range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into", "n_feature_maps = 5 # true for resnet50 with FPN ag_sizes = tuple(tuple(sizes[i :", "import torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn", "3), bias=False ) return model def mrcnn(): # Get a resnet50 fpn backbone", "(aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn = MaskRCNN( backbone,", "__all__ = [ \"make_mrcnn\", \"mrcnn\", ] import torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn", "and change the first layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1", "GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d( 1,", "pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3),", "= tuple(0.5 * x for x in range(1, 5)) n_feature_maps = 5 #", "image_std=[1] ) model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2,", "# Assemble into MaskRCNN mrcnn = MaskRCNN( backbone, 2, image_mean=[0], image_std=[1], rpn_anchor_generator=anchor_generator, )", "\"make_mrcnn\", \"mrcnn\", ] import torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import", "= maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1]", "bias=False ) return model def mrcnn(): # Get a resnet50 fpn backbone and", "sizes per feature map and 5 aspect ratios sizes = tuple(2.0 ** x", "true for resnet50 with FPN ag_sizes = tuple(tuple(sizes[i : i + 3]) for", "64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) return model def mrcnn():", "model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0],", "n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn =", "= tuple(tuple(sizes[i : i + 3]) for i in range(n_feature_maps)) ag_aspects = n_feature_maps", "for resnet50 with FPN ag_sizes = tuple(tuple(sizes[i : i + 3]) for i", "x for x in range(5, 12)) aspects = tuple(0.5 * x for x", "from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator", "with 3 sizes per feature map and 5 aspect ratios sizes = tuple(2.0", "Make anchor generator with 3 sizes per feature map and 5 aspect ratios", "5 aspect ratios sizes = tuple(2.0 ** x for x in range(5, 12))", "* (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn = MaskRCNN(", "from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn(", "tuple(tuple(sizes[i : i + 3]) for i in range(n_feature_maps)) ag_aspects = n_feature_maps *", "AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn = MaskRCNN( backbone, 2, image_mean=[0], image_std=[1],", "torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import", "stride=(2, 2), padding=(3, 3), bias=False ) return model def mrcnn(): # Get a", "into MaskRCNN mrcnn = MaskRCNN( backbone, 2, image_mean=[0], image_std=[1], rpn_anchor_generator=anchor_generator, ) return mrcnn", "resnet50 fpn backbone and change the first layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\",", "5)) n_feature_maps = 5 # true for resnet50 with FPN ag_sizes = tuple(tuple(sizes[i", "anchor generator with 3 sizes per feature map and 5 aspect ratios sizes", "in range(1, 5)) n_feature_maps = 5 # true for resnet50 with FPN ag_sizes", "model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3,", "layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64,", "range(5, 12)) aspects = tuple(0.5 * x for x in range(1, 5)) n_feature_maps", "tuple(0.5 * x for x in range(1, 5)) n_feature_maps = 5 # true", "kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) # Make anchor generator with", "for x in range(5, 12)) aspects = tuple(0.5 * x for x in", "from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 )", "2), padding=(3, 3), bias=False ) return model def mrcnn(): # Get a resnet50", "first layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1,", "2), padding=(3, 3), bias=False ) # Make anchor generator with 3 sizes per", "feature map and 5 aspect ratios sizes = tuple(2.0 ** x for x", "x in range(1, 5)) n_feature_maps = 5 # true for resnet50 with FPN", "bias=False ) # Make anchor generator with 3 sizes per feature map and", "tuple(2.0 ** x for x in range(5, 12)) aspects = tuple(0.5 * x", "torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from", "ag_aspects = n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN", "return model def mrcnn(): # Get a resnet50 fpn backbone and change the", "= tuple(2.0 ** x for x in range(5, 12)) aspects = tuple(0.5 *", "= GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d(", "transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform = transform model.backbone.body.conv1 =", "import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True,", "AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5", "3 sizes per feature map and 5 aspect ratios sizes = tuple(2.0 **", ") # Make anchor generator with 3 sizes per feature map and 5", "fpn backbone and change the first layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True,", "min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64,", "1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) # Make anchor", "resnet50 with FPN ag_sizes = tuple(tuple(sizes[i : i + 3]) for i in", ") transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform = transform model.backbone.body.conv1", "[ \"make_mrcnn\", \"mrcnn\", ] import torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils", "stride=(2, 2), padding=(3, 3), bias=False ) # Make anchor generator with 3 sizes", "num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform", "for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7,", "transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False", "grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7),", "backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False )", "ag_sizes = tuple(tuple(sizes[i : i + 3]) for i in range(n_feature_maps)) ag_aspects =", "def mrcnn(): # Get a resnet50 fpn backbone and change the first layer", "range(1, 5)) n_feature_maps = 5 # true for resnet50 with FPN ag_sizes =", "maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] )", "trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False", "import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model", "= [ \"make_mrcnn\", \"mrcnn\", ] import torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from", "sizes = tuple(2.0 ** x for x in range(5, 12)) aspects = tuple(0.5", "for x in range(1, 5)) n_feature_maps = 5 # true for resnet50 with", "= resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2),", "map and 5 aspect ratios sizes = tuple(2.0 ** x for x in", "* x for x in range(1, 5)) n_feature_maps = 5 # true for", "torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) # Make", "torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform", "GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform(", "x for x in range(1, 5)) n_feature_maps = 5 # true for resnet50", "max_size=1333, image_mean=[0], image_std=[1] ) model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7,", "= transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3),", "Assemble into MaskRCNN mrcnn = MaskRCNN( backbone, 2, image_mean=[0], image_std=[1], rpn_anchor_generator=anchor_generator, ) return", "3]) for i in range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes,", "x in range(5, 12)) aspects = tuple(0.5 * x for x in range(1,", "i in range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) #", "image_mean=[0], image_std=[1] ) model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7),", "= 5 # true for resnet50 with FPN ag_sizes = tuple(tuple(sizes[i : i", "generator with 3 sizes per feature map and 5 aspect ratios sizes =", "for i in range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects)", "the first layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d(", "backbone and change the first layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5)", "trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform = transform", "= torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) #", ") return model def mrcnn(): # Get a resnet50 fpn backbone and change", "per feature map and 5 aspect ratios sizes = tuple(2.0 ** x for", "1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) return model def", "resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model =", "# Make anchor generator with 3 sizes per feature map and 5 aspect", "7), stride=(2, 2), padding=(3, 3), bias=False ) return model def mrcnn(): # Get", "change the first layer for grayscale backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 =", "5 # true for resnet50 with FPN ag_sizes = tuple(tuple(sizes[i : i +", "aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn = MaskRCNN( backbone, 2, image_mean=[0], image_std=[1], rpn_anchor_generator=anchor_generator,", "anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn = MaskRCNN( backbone, 2,", "= n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn", "mrcnn(): # Get a resnet50 fpn backbone and change the first layer for", "3), bias=False ) # Make anchor generator with 3 sizes per feature map", "Get a resnet50 fpn backbone and change the first layer for grayscale backbone", "make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800, max_size=1333,", "from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def", "def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2, pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800,", "ratios sizes = tuple(2.0 ** x for x in range(5, 12)) aspects =", "= torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) return", "= AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble into MaskRCNN mrcnn = MaskRCNN( backbone, 2, image_mean=[0],", "import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform", "FPN ag_sizes = tuple(tuple(sizes[i : i + 3]) for i in range(n_feature_maps)) ag_aspects", "torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) return model", "7), stride=(2, 2), padding=(3, 3), bias=False ) # Make anchor generator with 3", "aspect ratios sizes = tuple(2.0 ** x for x in range(5, 12)) aspects", "in range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,) anchor_generator = AnchorGenerator(sizes=ag_sizes, aspect_ratios=ag_aspects) # Assemble", "torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn(): model = maskrcnn_resnet50_fpn( num_classes=2,", "** x for x in range(5, 12)) aspects = tuple(0.5 * x for", "\"mrcnn\", ] import torch from torchvision.models.detection import MaskRCNN, maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone", "torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform def make_mrcnn():", "64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) # Make anchor generator", "i + 3]) for i in range(n_feature_maps)) ag_aspects = n_feature_maps * (aspects,) anchor_generator", "backbone = resnet_fpn_backbone(\"resnet50\", pretrained=True, trainable_layers=5) backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2,", "kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False ) return model def mrcnn(): #", "pretrained_backbone=True, trainable_backbone_layers=5 ) transform = GeneralizedRCNNTransform( min_size=800, max_size=1333, image_mean=[0], image_std=[1] ) model.transform =", "maskrcnn_resnet50_fpn from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.rpn import AnchorGenerator from torchvision.models.detection.transform import GeneralizedRCNNTransform", ") model.transform = transform model.backbone.body.conv1 = torch.nn.Conv2d( 1, 64, kernel_size=(7, 7), stride=(2, 2),", "in range(5, 12)) aspects = tuple(0.5 * x for x in range(1, 5))", "a resnet50 fpn backbone and change the first layer for grayscale backbone =" ]
[]
[ "mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp = open('sql.txt', 'a+') print('write start!') try: fp.write(sql)", "= open('sql.txt', 'a+') print('write start!') try: fp.write(sql) finally: fp.close() print('write finish!') def read_sql_file():", "open('sql.txt', 'a+') print('write start!') try: fp.write(sql) finally: fp.close() print('write finish!') def read_sql_file(): fp", "open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp =", "fp.write(sql) finally: fp.close() print('write finish!') def read_sql_file(): fp = open('sql.txt', 'r+') return fp.read()", "threading mu = threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True):", "= threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release()", "-*- import threading mu = threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql):", "-*- coding: UTF-8 -*- import threading mu = threading.Lock() def create_sql_file(): open('sql.txt', 'w+',", "'a+') print('write start!') try: fp.write(sql) finally: fp.close() print('write finish!') def read_sql_file(): fp =", "#!/usr/bin/python # -*- coding: UTF-8 -*- import threading mu = threading.Lock() def create_sql_file():", "lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp = open('sql.txt', 'a+') print('write start!')", "try: fp.write(sql) finally: fp.close() print('write finish!') def read_sql_file(): fp = open('sql.txt', 'r+') return", "import threading mu = threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if", "def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql):", "def write_to_file(sql): fp = open('sql.txt', 'a+') print('write start!') try: fp.write(sql) finally: fp.close() print('write", "create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp", "encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp = open('sql.txt', 'a+')", "def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp = open('sql.txt', 'a+') print('write", "write_to_file(sql): fp = open('sql.txt', 'a+') print('write start!') try: fp.write(sql) finally: fp.close() print('write finish!')", "write_to_file(sql) mu.release() def write_to_file(sql): fp = open('sql.txt', 'a+') print('write start!') try: fp.write(sql) finally:", "# -*- coding: UTF-8 -*- import threading mu = threading.Lock() def create_sql_file(): open('sql.txt',", "fp = open('sql.txt', 'a+') print('write start!') try: fp.write(sql) finally: fp.close() print('write finish!') def", "threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def", "UTF-8 -*- import threading mu = threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def", "coding: UTF-8 -*- import threading mu = threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8')", "'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp = open('sql.txt',", "print('write start!') try: fp.write(sql) finally: fp.close() print('write finish!') def read_sql_file(): fp = open('sql.txt',", "start!') try: fp.write(sql) finally: fp.close() print('write finish!') def read_sql_file(): fp = open('sql.txt', 'r+')", "mu = threading.Lock() def create_sql_file(): open('sql.txt', 'w+', encoding='utf-8') def lock_test(sql): if mu.acquire(True): write_to_file(sql)", "if mu.acquire(True): write_to_file(sql) mu.release() def write_to_file(sql): fp = open('sql.txt', 'a+') print('write start!') try:", "mu.release() def write_to_file(sql): fp = open('sql.txt', 'a+') print('write start!') try: fp.write(sql) finally: fp.close()" ]
[ "M <10: M='0'+str(M) if O<1: print(f\"{H}:{M} - A porta fechou!\") else: print(f\"{H}:{M} -", "H='0'+str(H) if M <10: M='0'+str(M) if O<1: print(f\"{H}:{M} - A porta fechou!\") else:", "in range(int(input())): H,M,O =map(int,input().split()) if H <10: H='0'+str(H) if M <10: M='0'+str(M) if", "i in range(int(input())): H,M,O =map(int,input().split()) if H <10: H='0'+str(H) if M <10: M='0'+str(M)", "for i in range(int(input())): H,M,O =map(int,input().split()) if H <10: H='0'+str(H) if M <10:", "H <10: H='0'+str(H) if M <10: M='0'+str(M) if O<1: print(f\"{H}:{M} - A porta", "<10: M='0'+str(M) if O<1: print(f\"{H}:{M} - A porta fechou!\") else: print(f\"{H}:{M} - A", "H,M,O =map(int,input().split()) if H <10: H='0'+str(H) if M <10: M='0'+str(M) if O<1: print(f\"{H}:{M}", "<10: H='0'+str(H) if M <10: M='0'+str(M) if O<1: print(f\"{H}:{M} - A porta fechou!\")", "M='0'+str(M) if O<1: print(f\"{H}:{M} - A porta fechou!\") else: print(f\"{H}:{M} - A porta", "range(int(input())): H,M,O =map(int,input().split()) if H <10: H='0'+str(H) if M <10: M='0'+str(M) if O<1:", "=map(int,input().split()) if H <10: H='0'+str(H) if M <10: M='0'+str(M) if O<1: print(f\"{H}:{M} -", "if H <10: H='0'+str(H) if M <10: M='0'+str(M) if O<1: print(f\"{H}:{M} - A", "if O<1: print(f\"{H}:{M} - A porta fechou!\") else: print(f\"{H}:{M} - A porta abriu!\")", "if M <10: M='0'+str(M) if O<1: print(f\"{H}:{M} - A porta fechou!\") else: print(f\"{H}:{M}" ]
[ "this using Python logging system. Otherwise raise the error as an exception. :param", "_send_sms(request, receiver, from_, text_body, log_failure) def send_sms(request: Request, receiver: str, text_body: str, sender:", "text_body) logger.info(\"Queuing sending SMS to: %s, body: %s\", receiver, text_body) # Put the", "supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request, receiver, text_body, sender,", "not block HTTP request due to slow API calls to a third party", "framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver,", "input before passing in. See :py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound SMS body.", "If there is an exception from the SMS backend then log this using", "Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request,", "is None: raise SMSConfigurationError(\"sms.async setting not defined\") _async = asbool(_async) if sender is", "logging import pkg_resources from pyramid.renderers import render from pyramid.settings import asbool from pyramid_sms.utils", "backend then log this using Python logging system. Otherwise raise the error as", "calls to a third party service. :param user_dialog: This SMS is part of", "request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def send_templated_sms(request: Request, template: str, context: dict, receiver:", "examples. :param text_body: Outbound SMS body. Usually up to 1600 characters. :param sender:", "log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def send_templated_sms(request: Request, template: str, context: dict,", "_async is None: _async = request.registry.settings.get(\"sms.async\") if _async is None: raise SMSConfigurationError(\"sms.async setting", "len(text_body) >= 1600: logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing sending SMS to: %s,", "task subsystem. If ``None`` respect ``sms.async`` settings. If the operation is asynchronous, this", "SMSSent logger = logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform actual SMS", "uses templates instead of hardcoded messages. :param request: HTTP request :param template: Template", "default configured SMS service. Example: .. code-block:: python def test_sms_view(request): '''Dummy view to", "Set ``False`` to two-factor auth tokens and such. :raise SMSConfigurationError: If configuration settings", "known user. Use this flag to log messages with the user in your", "a third party service. :param user_dialog: This SMS is part of a dialog", "phone number as international format. You should normalize this number from all user", "operation is asynchronous, this function returns instantly and does not block HTTP request", "test_sms_view(request): '''Dummy view to simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param receiver:", "If ``None`` respect ``sms.async`` settings. If the operation is asynchronous, this function returns", "send_templated_sms(request: Request, template: str, context: dict, receiver: str, sender: str=None, log_failure: bool=True, _async:", "dict, receiver: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send out", "then log this using Python logging system. Otherwise raise the error as an", "log_failure): \"\"\"Perform actual SMS outbound operation through a configured service.\"\"\" service = get_sms_backend(request)", "all user input before passing in. See :py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound", "asynchronous operation through task subsystem. If ``None`` respect ``sms.async`` settings. If the operation", "using a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of hardcoded", "not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too long SMS: %s\", text_body)", ":py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound SMS body. Usually up to 1600 characters.", "1600: logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing sending SMS to: %s, body: %s\",", "receiver, text_body, sender, user_dialog)) def send_templated_sms(request: Request, template: str, context: dict, receiver: str,", "async: Force asynchronous operation through task subsystem. If ``None`` respect ``sms.async`` settings. If", "pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request from websauna.system.task.tasks import task", "configured SMS service. Example: .. code-block:: python def test_sms_view(request): '''Dummy view to simulate", "str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send out a SMS that is", "this function returns instantly and does not block HTTP request due to slow", "import SMSSent logger = logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform actual", "import Request HAS_WEBSAUNA = False from .interfaces import SMSConfigurationError from .events import SMSSent", "outgoing SMS message using the default configured SMS service. Example: .. code-block:: python", "log this using Python logging system. Otherwise raise the error as an exception.", "messages. :param request: HTTP request :param template: Template name. Like ``welcome_sms.txt.jinja``. :param context:", "if _async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only supported with Websauna", "to 1600 characters. :param sender: Envelope from number. Needs to be configured in", "get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA: # TODO: Factor this to a", "up to 1600 characters. :param sender: Envelope from number. Needs to be configured", "a dialog with a known user. Use this flag to log messages with", "your conversation dashboard. Set ``False`` to two-factor auth tokens and such. :raise SMSConfigurationError:", "characters. :param sender: Envelope from number. Needs to be configured in the service.", "import logging import pkg_resources from pyramid.renderers import render from pyramid.settings import asbool from", "str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS message using the", "Usually up to 1600 characters. :param sender: Envelope from number. Needs to be", "sender, log_failure) if HAS_WEBSAUNA: # TODO: Factor this to a separate configurable module", "async queue if _async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only supported", "receiver: Receiver's phone number as international format. You should normalize this number from", "send_sms(request: Request, receiver: str, text_body: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog:", "SMS API.\"\"\" import logging import pkg_resources from pyramid.renderers import render from pyramid.settings import", "instead of hardcoded messages. :param request: HTTP request :param template: Template name. Like", "dialog with a known user. Use this flag to log messages with the", "_async = asbool(_async) if sender is None: sender = request.registry.settings.get(\"sms.default_sender\") if not sender:", "rendering engine \"\"\" text_body = render(template, context, request=request) send_sms(request, receiver, text_body, sender, log_failure,", "for examples. :param text_body: Outbound SMS body. Usually up to 1600 characters. :param", "str, text_body: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing", "to log messages with the user in your conversation dashboard. Set ``False`` to", "template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of hardcoded messages. :param request:", "\"\"\" if _async is None: _async = request.registry.settings.get(\"sms.async\") if _async is None: raise", "receiver, text_body, sender, log_failure): \"\"\"Perform actual SMS outbound operation through a configured service.\"\"\"", "this number from all user input before passing in. See :py:mod:`pyramid_sms.utils` for examples.", "operations are only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request,", "import asbool from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request from", "exception. :param async: Force asynchronous operation through task subsystem. If ``None`` respect ``sms.async``", "the user in your conversation dashboard. Set ``False`` to two-factor auth tokens and", ":param template: Template name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed to template rendering", "from .interfaces import SMSConfigurationError from .events import SMSSent logger = logging.getLogger(__name__) def _send_sms(request,", "none use default configured \"sms.default_from\". :param log_failure: If there is an exception from", ".. code-block:: python def test_sms_view(request): '''Dummy view to simulate outgoing SMS.''' send_sms(request, \"+15551231234\",", "operation through task subsystem. If ``None`` respect ``sms.async`` settings. If the operation is", "user input before passing in. See :py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound SMS", "sender = request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if", "request = self.request.request _send_sms(request, receiver, from_, text_body, log_failure) def send_sms(request: Request, receiver: str,", "SMS: %s\", text_body) logger.info(\"Queuing sending SMS to: %s, body: %s\", receiver, text_body) #", "the SMS synchronously outside HTTP request proccesing.\"\"\" request = self.request.request _send_sms(request, receiver, from_,", "Force asynchronous operation through task subsystem. If ``None`` respect ``sms.async`` settings. If the", "template: str, context: dict, receiver: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog:", "due to slow API calls to a third party service. :param user_dialog: This", "str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS message", "websauna.system.http import Request from websauna.system.task.tasks import task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA =", "returns instantly and does not block HTTP request due to slow API calls", "websauna.system.task.tasks import task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from", "Use this flag to log messages with the user in your conversation dashboard.", "simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param receiver: Receiver's phone number as", "function returns instantly and does not block HTTP request due to slow API", "out a SMS that is constructed using a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`,", "None: sender = request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters", "Template name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed to template rendering engine \"\"\"", "%s\", receiver, text_body) # Put the actual Twilio operation async queue if _async:", "to template rendering engine \"\"\" text_body = render(template, context, request=request) send_sms(request, receiver, text_body,", "constructed using a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of", "part of a dialog with a known user. Use this flag to log", "and such. :raise SMSConfigurationError: If configuration settings are missing \"\"\" if _async is", "Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed to template rendering engine \"\"\" text_body =", "a SMS that is constructed using a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but", "@task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery task to send the", "``welcome_sms.txt.jinja``. :param context: Dictionary passed to template rendering engine \"\"\" text_body = render(template,", "log messages with the user in your conversation dashboard. Set ``False`` to two-factor", "sender: Envelope from number. Needs to be configured in the service. If none", "Request, receiver: str, text_body: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False):", "from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from pyramid.request import Request", "receiver: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send out a", "is None: _async = request.registry.settings.get(\"sms.async\") if _async is None: raise SMSConfigurationError(\"sms.async setting not", "import ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA =", "user. Use this flag to log messages with the user in your conversation", "the default configured SMS service. Example: .. code-block:: python def test_sms_view(request): '''Dummy view", "SMS synchronously outside HTTP request proccesing.\"\"\" request = self.request.request _send_sms(request, receiver, from_, text_body,", "SMS service. Example: .. code-block:: python def test_sms_view(request): '''Dummy view to simulate outgoing", "from pyramid.settings import asbool from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import", "receiver, from_, text_body, log_failure): \"\"\"Celery task to send the SMS synchronously outside HTTP", "the operation is asynchronous, this function returns instantly and does not block HTTP", "using Python logging system. Otherwise raise the error as an exception. :param async:", "from websauna.system.http import Request from websauna.system.task.tasks import task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA", "hardcoded messages. :param request: HTTP request :param template: Template name. Like ``welcome_sms.txt.jinja``. :param", "two-factor auth tokens and such. :raise SMSConfigurationError: If configuration settings are missing \"\"\"", "to simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param receiver: Receiver's phone number", "service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA: # TODO: Factor", "are missing \"\"\" if _async is None: _async = request.registry.settings.get(\"sms.async\") if _async is", "if len(text_body) >= 1600: logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing sending SMS to:", "_async is None: raise SMSConfigurationError(\"sms.async setting not defined\") _async = asbool(_async) if sender", "receiver, from_, text_body, log_failure) def send_sms(request: Request, receiver: str, text_body: str, sender: str=None,", "\"\"\"Send outgoing SMS message using the default configured SMS service. Example: .. code-block::", "task to send the SMS synchronously outside HTTP request proccesing.\"\"\" request = self.request.request", "text_body, log_failure): \"\"\"Celery task to send the SMS synchronously outside HTTP request proccesing.\"\"\"", ":param user_dialog: This SMS is part of a dialog with a known user.", "None: _async = request.registry.settings.get(\"sms.async\") if _async is None: raise SMSConfigurationError(\"sms.async setting not defined\")", "bool=False): \"\"\"Send out a SMS that is constructed using a page template. Same", "this flag to log messages with the user in your conversation dashboard. Set", "\"\"\"Outgoing SMS API.\"\"\" import logging import pkg_resources from pyramid.renderers import render from pyramid.settings", "format. You should normalize this number from all user input before passing in.", "# https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing sending", "ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA = False", "text_body, log_failure) def send_sms(request: Request, receiver: str, text_body: str, sender: str=None, log_failure: bool=True,", "def _send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery task to send the SMS synchronously", "body. Usually up to 1600 characters. :param sender: Envelope from number. Needs to", "= self.request.request _send_sms(request, receiver, from_, text_body, log_failure) def send_sms(request: Request, receiver: str, text_body:", ":param async: Force asynchronous operation through task subsystem. If ``None`` respect ``sms.async`` settings.", "# TODO: Factor this to a separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self,", "logger.info(\"Queuing sending SMS to: %s, body: %s\", receiver, text_body) # Put the actual", "If none use default configured \"sms.default_from\". :param log_failure: If there is an exception", "bool=None, user_dialog: bool=False): \"\"\"Send out a SMS that is constructed using a page", "user_dialog: bool=False): \"\"\"Send out a SMS that is constructed using a page template.", "\"Test message\") :param receiver: Receiver's phone number as international format. You should normalize", "False except pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA = False from .interfaces import", "configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA: # TODO:", "Otherwise raise the error as an exception. :param async: Force asynchronous operation through", "text_body, sender, log_failure,)) else: _send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender,", "that is constructed using a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates", "sender, log_failure): \"\"\"Perform actual SMS outbound operation through a configured service.\"\"\" service =", "logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform actual SMS outbound operation through", "from_, text_body, log_failure): \"\"\"Celery task to send the SMS synchronously outside HTTP request", "passed to template rendering engine \"\"\" text_body = render(template, context, request=request) send_sms(request, receiver,", "import task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from pyramid.request", "bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS message using the default configured SMS service.", "asynchronous, this function returns instantly and does not block HTTP request due to", "conversation dashboard. Set ``False`` to two-factor auth tokens and such. :raise SMSConfigurationError: If", "service. :param user_dialog: This SMS is part of a dialog with a known", "instantly and does not block HTTP request due to slow API calls to", "be configured in the service. If none use default configured \"sms.default_from\". :param log_failure:", "a configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA: #", "https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing sending SMS", "operation async queue if _async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only", "flag to log messages with the user in your conversation dashboard. Set ``False``", "the error as an exception. :param async: Force asynchronous operation through task subsystem.", "SMS outbound operation through a configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body, sender,", "raise SMSConfigurationError(\"sms.async setting not defined\") _async = asbool(_async) if sender is None: sender", "not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body,", "with the user in your conversation dashboard. Set ``False`` to two-factor auth tokens", "body: %s\", receiver, text_body) # Put the actual Twilio operation async queue if", "separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery task", "pyramid.settings import asbool from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request", "SMSConfigurationError(\"Async operations are only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else:", "context: dict, receiver: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send", "import render from pyramid.settings import asbool from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from", "SMSConfigurationError: If configuration settings are missing \"\"\" if _async is None: _async =", "If the operation is asynchronous, this function returns instantly and does not block", "configuration settings are missing \"\"\" if _async is None: _async = request.registry.settings.get(\"sms.async\") if", "get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request from websauna.system.task.tasks import task from websauna.system.task.tasks", "Dictionary passed to template rendering engine \"\"\" text_body = render(template, context, request=request) send_sms(request,", "receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def send_templated_sms(request: Request, template:", "auth tokens and such. :raise SMSConfigurationError: If configuration settings are missing \"\"\" if", "%s, body: %s\", receiver, text_body) # Put the actual Twilio operation async queue", "sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS message using", "to: %s, body: %s\", receiver, text_body) # Put the actual Twilio operation async", "pkg_resources.get_distribution('websauna') from websauna.system.http import Request from websauna.system.task.tasks import task from websauna.system.task.tasks import ScheduleOnCommitTask", "request due to slow API calls to a third party service. :param user_dialog:", "request :param template: Template name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed to template", "respect ``sms.async`` settings. If the operation is asynchronous, this function returns instantly and", "templates instead of hardcoded messages. :param request: HTTP request :param template: Template name.", "of hardcoded messages. :param request: HTTP request :param template: Template name. Like ``welcome_sms.txt.jinja``.", "import SMSConfigurationError from .events import SMSSent logger = logging.getLogger(__name__) def _send_sms(request, receiver, text_body,", "\"\"\" text_body = render(template, context, request=request) send_sms(request, receiver, text_body, sender, log_failure, _async, user_dialog)", "Python logging system. Otherwise raise the error as an exception. :param async: Force", "except pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA = False from .interfaces import SMSConfigurationError", "if sender is None: sender = request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender not", ":param receiver: Receiver's phone number as international format. You should normalize this number", "pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA = False from .interfaces import SMSConfigurationError from", "from_, text_body, log_failure) def send_sms(request: Request, receiver: str, text_body: str, sender: str=None, log_failure:", "sender: raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too long", "not sender: raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too", "if _async is None: _async = request.registry.settings.get(\"sms.async\") if _async is None: raise SMSConfigurationError(\"sms.async", "and does not block HTTP request due to slow API calls to a", "block HTTP request due to slow API calls to a third party service.", "if not sender: raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600:", "import pkg_resources from pyramid.renderers import render from pyramid.settings import asbool from pyramid_sms.utils import", "subsystem. If ``None`` respect ``sms.async`` settings. If the operation is asynchronous, this function", ".interfaces import SMSConfigurationError from .events import SMSSent logger = logging.getLogger(__name__) def _send_sms(request, receiver,", "text_body: Outbound SMS body. Usually up to 1600 characters. :param sender: Envelope from", "_async: bool=None, user_dialog: bool=False): \"\"\"Send out a SMS that is constructed using a", "third party service. :param user_dialog: This SMS is part of a dialog with", "_send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body,", "Twilio operation async queue if _async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are", "raise the error as an exception. :param async: Force asynchronous operation through task", "system. Otherwise raise the error as an exception. :param async: Force asynchronous operation", ":raise SMSConfigurationError: If configuration settings are missing \"\"\" if _async is None: _async", "SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too long SMS: %s\",", "outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param receiver: Receiver's phone number as international", "synchronously outside HTTP request proccesing.\"\"\" request = self.request.request _send_sms(request, receiver, from_, text_body, log_failure)", "if _async is None: raise SMSConfigurationError(\"sms.async setting not defined\") _async = asbool(_async) if", "is None: sender = request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender not configured\") #", "an exception from the SMS backend then log this using Python logging system.", "to slow API calls to a third party service. :param user_dialog: This SMS", "to be configured in the service. If none use default configured \"sms.default_from\". :param", "= False except pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA = False from .interfaces", "render from pyramid.settings import asbool from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http", "text_body, sender, log_failure): \"\"\"Perform actual SMS outbound operation through a configured service.\"\"\" service", "Envelope from number. Needs to be configured in the service. If none use", "number as international format. You should normalize this number from all user input", "use default configured \"sms.default_from\". :param log_failure: If there is an exception from the", "\"+15551231234\", \"Test message\") :param receiver: Receiver's phone number as international format. You should", "%s\", text_body) logger.info(\"Queuing sending SMS to: %s, body: %s\", receiver, text_body) # Put", "template: Template name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed to template rendering engine", "outbound operation through a configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure)", "only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request, receiver, text_body,", "bool=False): \"\"\"Send outgoing SMS message using the default configured SMS service. Example: ..", "task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from pyramid.request import", "setting not defined\") _async = asbool(_async) if sender is None: sender = request.registry.settings.get(\"sms.default_sender\")", "SMSConfigurationError(\"sms.async setting not defined\") _async = asbool(_async) if sender is None: sender =", "def test_sms_view(request): '''Dummy view to simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param", "log_failure): \"\"\"Celery task to send the SMS synchronously outside HTTP request proccesing.\"\"\" request", "API calls to a third party service. :param user_dialog: This SMS is part", "log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send out a SMS that is constructed", ":py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of hardcoded messages. :param request: HTTP request :param", "from the SMS backend then log this using Python logging system. Otherwise raise", "raise SMSConfigurationError(\"Async operations are only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,))", "text_body) # Put the actual Twilio operation async queue if _async: if not", "but uses templates instead of hardcoded messages. :param request: HTTP request :param template:", "with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request, receiver, text_body, sender, log_failure)", "None: raise SMSConfigurationError(\"sms.async setting not defined\") _async = asbool(_async) if sender is None:", "logger = logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform actual SMS outbound", "service. Example: .. code-block:: python def test_sms_view(request): '''Dummy view to simulate outgoing SMS.'''", "the actual Twilio operation async queue if _async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async", "pyramid.request import Request HAS_WEBSAUNA = False from .interfaces import SMSConfigurationError from .events import", "number from all user input before passing in. See :py:mod:`pyramid_sms.utils` for examples. :param", "sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send out a SMS that", "SMS is part of a dialog with a known user. Use this flag", "view to simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param receiver: Receiver's phone", "as international format. You should normalize this number from all user input before", "log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS message using the default", ":param request: HTTP request :param template: Template name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary", "asbool(_async) if sender is None: sender = request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender", "sender is None: sender = request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender not configured\")", "if HAS_WEBSAUNA: # TODO: Factor this to a separate configurable module @task(base=ScheduleOnCommitTask, bind=True)", "websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA", "``False`` to two-factor auth tokens and such. :raise SMSConfigurationError: If configuration settings are", "context: Dictionary passed to template rendering engine \"\"\" text_body = render(template, context, request=request)", "try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request from websauna.system.task.tasks import task from websauna.system.task.tasks import", "_send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def send_templated_sms(request: Request,", "defined\") _async = asbool(_async) if sender is None: sender = request.registry.settings.get(\"sms.default_sender\") if not", ":param text_body: Outbound SMS body. Usually up to 1600 characters. :param sender: Envelope", "service. If none use default configured \"sms.default_from\". :param log_failure: If there is an", ".events import SMSSent logger = logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform", "a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of hardcoded messages.", "party service. :param user_dialog: This SMS is part of a dialog with a", "receiver, text_body) # Put the actual Twilio operation async queue if _async: if", "service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA: # TODO: Factor this to a separate", "This SMS is part of a dialog with a known user. Use this", "def send_templated_sms(request: Request, template: str, context: dict, receiver: str, sender: str=None, log_failure: bool=True,", "str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send out a SMS", "_async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only supported with Websauna framework\")", "sending SMS to: %s, body: %s\", receiver, text_body) # Put the actual Twilio", "Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of hardcoded messages. :param request: HTTP", "python def test_sms_view(request): '''Dummy view to simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\")", "from pyramid.renderers import render from pyramid.settings import asbool from pyramid_sms.utils import get_sms_backend try:", "user in your conversation dashboard. Set ``False`` to two-factor auth tokens and such.", "text_body, sender, user_dialog)) def send_templated_sms(request: Request, template: str, context: dict, receiver: str, sender:", "service = get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA: # TODO: Factor this", "a known user. Use this flag to log messages with the user in", "in the service. If none use default configured \"sms.default_from\". :param log_failure: If there", "this to a separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_, text_body,", "are only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender, log_failure,)) else: _send_sms(request, receiver,", "settings are missing \"\"\" if _async is None: _async = request.registry.settings.get(\"sms.async\") if _async", "HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver, text_body, sender,", "HTTP request due to slow API calls to a third party service. :param", "is constructed using a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead", "from all user input before passing in. See :py:mod:`pyramid_sms.utils` for examples. :param text_body:", "= request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body)", "= False from .interfaces import SMSConfigurationError from .events import SMSSent logger = logging.getLogger(__name__)", "``sms.async`` settings. If the operation is asynchronous, this function returns instantly and does", "text_body: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS", ">= 1600: logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing sending SMS to: %s, body:", "if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only supported with Websauna framework\") _send_sms_async.apply_async(args=(receiver,", "\"sms.default_from\". :param log_failure: If there is an exception from the SMS backend then", "there is an exception from the SMS backend then log this using Python", "HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound: from pyramid.request import Request HAS_WEBSAUNA = False from", "def send_sms(request: Request, receiver: str, text_body: str, sender: str=None, log_failure: bool=True, _async: bool=None,", "SMS to: %s, body: %s\", receiver, text_body) # Put the actual Twilio operation", "else: _send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def send_templated_sms(request:", "= get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA: # TODO: Factor this to", "configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery task to", "passing in. See :py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound SMS body. Usually up", "False from .interfaces import SMSConfigurationError from .events import SMSSent logger = logging.getLogger(__name__) def", "SMSConfigurationError from .events import SMSSent logger = logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender,", "_send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform actual SMS outbound operation through a configured", "before passing in. See :py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound SMS body. Usually", "pkg_resources from pyramid.renderers import render from pyramid.settings import asbool from pyramid_sms.utils import get_sms_backend", "send_sms(request, \"+15551231234\", \"Test message\") :param receiver: Receiver's phone number as international format. You", "exception from the SMS backend then log this using Python logging system. Otherwise", "through a configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if HAS_WEBSAUNA:", "the service. If none use default configured \"sms.default_from\". :param log_failure: If there is", "API.\"\"\" import logging import pkg_resources from pyramid.renderers import render from pyramid.settings import asbool", "the SMS backend then log this using Python logging system. Otherwise raise the", "bind=True) def _send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery task to send the SMS", "1600 characters. :param sender: Envelope from number. Needs to be configured in the", "number. Needs to be configured in the service. If none use default configured", "from websauna.system.task.tasks import task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False except pkg_resources.DistributionNotFound:", "a separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery", "from number. Needs to be configured in the service. If none use default", "Needs to be configured in the service. If none use default configured \"sms.default_from\".", "\"\"\"Perform actual SMS outbound operation through a configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver,", "TODO: Factor this to a separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver,", ":param context: Dictionary passed to template rendering engine \"\"\" text_body = render(template, context,", "engine \"\"\" text_body = render(template, context, request=request) send_sms(request, receiver, text_body, sender, log_failure, _async,", "self.request.request _send_sms(request, receiver, from_, text_body, log_failure) def send_sms(request: Request, receiver: str, text_body: str,", "\"\"\"Celery task to send the SMS synchronously outside HTTP request proccesing.\"\"\" request =", "request.registry.settings.get(\"sms.default_sender\") if not sender: raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >=", "from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request from websauna.system.task.tasks import", "SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param receiver: Receiver's phone number as international format.", "from pyramid.request import Request HAS_WEBSAUNA = False from .interfaces import SMSConfigurationError from .events", "does not block HTTP request due to slow API calls to a third", "You should normalize this number from all user input before passing in. See", "slow API calls to a third party service. :param user_dialog: This SMS is", "import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request from websauna.system.task.tasks import task from", "Request from websauna.system.task.tasks import task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False except", "request proccesing.\"\"\" request = self.request.request _send_sms(request, receiver, from_, text_body, log_failure) def send_sms(request: Request,", "'''Dummy view to simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test message\") :param receiver: Receiver's", "Put the actual Twilio operation async queue if _async: if not HAS_WEBSAUNA: raise", "long SMS: %s\", text_body) logger.info(\"Queuing sending SMS to: %s, body: %s\", receiver, text_body)", "= asbool(_async) if sender is None: sender = request.registry.settings.get(\"sms.default_sender\") if not sender: raise", "See :py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound SMS body. Usually up to 1600", "user_dialog: This SMS is part of a dialog with a known user. Use", "not defined\") _async = asbool(_async) if sender is None: sender = request.registry.settings.get(\"sms.default_sender\") if", "to two-factor auth tokens and such. :raise SMSConfigurationError: If configuration settings are missing", "receiver: str, text_body: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send", "from .events import SMSSent logger = logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender, log_failure):", "= request.registry.settings.get(\"sms.async\") if _async is None: raise SMSConfigurationError(\"sms.async setting not defined\") _async =", "as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of hardcoded messages. :param request: HTTP request", "actual SMS outbound operation through a configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body,", "SMS backend then log this using Python logging system. Otherwise raise the error", "text_body, sender, log_failure) if HAS_WEBSAUNA: # TODO: Factor this to a separate configurable", "text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def send_templated_sms(request: Request, template: str,", "_async = request.registry.settings.get(\"sms.async\") if _async is None: raise SMSConfigurationError(\"sms.async setting not defined\") _async", "actual Twilio operation async queue if _async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations", "is asynchronous, this function returns instantly and does not block HTTP request due", "log_failure) if HAS_WEBSAUNA: # TODO: Factor this to a separate configurable module @task(base=ScheduleOnCommitTask,", "in your conversation dashboard. Set ``False`` to two-factor auth tokens and such. :raise", "str, context: dict, receiver: str, sender: str=None, log_failure: bool=True, _async: bool=None, user_dialog: bool=False):", "pyramid.renderers import render from pyramid.settings import asbool from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna')", "HTTP request proccesing.\"\"\" request = self.request.request _send_sms(request, receiver, from_, text_body, log_failure) def send_sms(request:", ":param log_failure: If there is an exception from the SMS backend then log", "template rendering engine \"\"\" text_body = render(template, context, request=request) send_sms(request, receiver, text_body, sender,", "using the default configured SMS service. Example: .. code-block:: python def test_sms_view(request): '''Dummy", "Factor this to a separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_,", "message\") :param receiver: Receiver's phone number as international format. You should normalize this", "to send the SMS synchronously outside HTTP request proccesing.\"\"\" request = self.request.request _send_sms(request,", "send the SMS synchronously outside HTTP request proccesing.\"\"\" request = self.request.request _send_sms(request, receiver,", "HAS_WEBSAUNA = False from .interfaces import SMSConfigurationError from .events import SMSSent logger =", "user_dialog)) def send_templated_sms(request: Request, template: str, context: dict, receiver: str, sender: str=None, log_failure:", "sender, log_failure,)) else: _send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog))", "in. See :py:mod:`pyramid_sms.utils` for examples. :param text_body: Outbound SMS body. Usually up to", "SMS message using the default configured SMS service. Example: .. code-block:: python def", "operation through a configured service.\"\"\" service = get_sms_backend(request) service.send_sms(receiver, text_body, sender, log_failure) if", "bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send out a SMS that is constructed using", "messages with the user in your conversation dashboard. Set ``False`` to two-factor auth", "\"\"\"Send out a SMS that is constructed using a page template. Same as", "= logging.getLogger(__name__) def _send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform actual SMS outbound operation", "configured in the service. If none use default configured \"sms.default_from\". :param log_failure: If", "name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed to template rendering engine \"\"\" text_body", "to a third party service. :param user_dialog: This SMS is part of a", "raise SMSConfigurationError(\"sms.default_sender not configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too long SMS:", "Example: .. code-block:: python def test_sms_view(request): '''Dummy view to simulate outgoing SMS.''' send_sms(request,", "def _send_sms(request, receiver, text_body, sender, log_failure): \"\"\"Perform actual SMS outbound operation through a", "HAS_WEBSAUNA: # TODO: Factor this to a separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def", "message using the default configured SMS service. Example: .. code-block:: python def test_sms_view(request):", "sender, user_dialog)) def send_templated_sms(request: Request, template: str, context: dict, receiver: str, sender: str=None,", "``None`` respect ``sms.async`` settings. If the operation is asynchronous, this function returns instantly", "code-block:: python def test_sms_view(request): '''Dummy view to simulate outgoing SMS.''' send_sms(request, \"+15551231234\", \"Test", "is part of a dialog with a known user. Use this flag to", "through task subsystem. If ``None`` respect ``sms.async`` settings. If the operation is asynchronous,", "with a known user. Use this flag to log messages with the user", "of a dialog with a known user. Use this flag to log messages", "tokens and such. :raise SMSConfigurationError: If configuration settings are missing \"\"\" if _async", "should normalize this number from all user input before passing in. See :py:mod:`pyramid_sms.utils`", "queue if _async: if not HAS_WEBSAUNA: raise SMSConfigurationError(\"Async operations are only supported with", "logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing sending SMS to: %s, body: %s\", receiver,", "such. :raise SMSConfigurationError: If configuration settings are missing \"\"\" if _async is None:", "to a separate configurable module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_, text_body, log_failure):", "request.registry.settings.get(\"sms.async\") if _async is None: raise SMSConfigurationError(\"sms.async setting not defined\") _async = asbool(_async)", "an exception. :param async: Force asynchronous operation through task subsystem. If ``None`` respect", "user_dialog: bool=False): \"\"\"Send outgoing SMS message using the default configured SMS service. Example:", "Outbound SMS body. Usually up to 1600 characters. :param sender: Envelope from number.", "missing \"\"\" if _async is None: _async = request.registry.settings.get(\"sms.async\") if _async is None:", "import Request from websauna.system.task.tasks import task from websauna.system.task.tasks import ScheduleOnCommitTask HAS_WEBSAUNA = False", "dashboard. Set ``False`` to two-factor auth tokens and such. :raise SMSConfigurationError: If configuration", "_send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery task to send the SMS synchronously outside", "Receiver's phone number as international format. You should normalize this number from all", ":param sender: Envelope from number. Needs to be configured in the service. If", "SMS body. Usually up to 1600 characters. :param sender: Envelope from number. Needs", "Request, template: str, context: dict, receiver: str, sender: str=None, log_failure: bool=True, _async: bool=None,", "request: HTTP request :param template: Template name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed", "Request HAS_WEBSAUNA = False from .interfaces import SMSConfigurationError from .events import SMSSent logger", "asbool from pyramid_sms.utils import get_sms_backend try: pkg_resources.get_distribution('websauna') from websauna.system.http import Request from websauna.system.task.tasks", "configured\") # https://www.twilio.com/help/faq/sms/does-twilio-support-concatenated-sms-messages-or-messages-over-160-characters if len(text_body) >= 1600: logger.warn(\"Too long SMS: %s\", text_body) logger.info(\"Queuing", "normalize this number from all user input before passing in. See :py:mod:`pyramid_sms.utils` for", "log_failure: If there is an exception from the SMS backend then log this", "SMS that is constructed using a page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses", "bool=True, _async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS message using the default configured", "error as an exception. :param async: Force asynchronous operation through task subsystem. If", "log_failure) def send_sms(request: Request, receiver: str, text_body: str, sender: str=None, log_failure: bool=True, _async:", "proccesing.\"\"\" request = self.request.request _send_sms(request, receiver, from_, text_body, log_failure) def send_sms(request: Request, receiver:", "default configured \"sms.default_from\". :param log_failure: If there is an exception from the SMS", "If configuration settings are missing \"\"\" if _async is None: _async = request.registry.settings.get(\"sms.async\")", "_async: bool=None, user_dialog: bool=False): \"\"\"Send outgoing SMS message using the default configured SMS", "is an exception from the SMS backend then log this using Python logging", "# Put the actual Twilio operation async queue if _async: if not HAS_WEBSAUNA:", "outside HTTP request proccesing.\"\"\" request = self.request.request _send_sms(request, receiver, from_, text_body, log_failure) def", "logging system. Otherwise raise the error as an exception. :param async: Force asynchronous", "sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def send_templated_sms(request: Request, template: str, context:", "page template. Same as :py:meth:`pyramid_sms.outgoing.send_sms`, but uses templates instead of hardcoded messages. :param", "log_failure,)) else: _send_sms(request, receiver, text_body, sender, log_failure) request.registry.notify(SMSSent(request, receiver, text_body, sender, user_dialog)) def", "as an exception. :param async: Force asynchronous operation through task subsystem. If ``None``", "international format. You should normalize this number from all user input before passing", "module @task(base=ScheduleOnCommitTask, bind=True) def _send_sms_async(self, receiver, from_, text_body, log_failure): \"\"\"Celery task to send", "settings. If the operation is asynchronous, this function returns instantly and does not", "HTTP request :param template: Template name. Like ``welcome_sms.txt.jinja``. :param context: Dictionary passed to", "configured \"sms.default_from\". :param log_failure: If there is an exception from the SMS backend" ]
[ "moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving image (BxCxHxW) target ([type]): Fixed image (BxCxHxW)", "losses (dict): Dictionary of losses Returns: loss (Tensor): Weighted loss \"\"\" def blend(self,x,y):", "start self.set_dict(self.lw) def set_dict(self, dic): self.lw = dic for k in dic.keys(): if", "False, also return non-integrated inverse flow field. Else return the integrated one. Defaults", "# for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) #", "to False. # Returns: # moved (Tensor): Moved image # field (Tensor): Deformation", "self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to x from flow field Args: x (Tensor):", "# downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) #", "self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only \"forward\" transitions (phi_i->j with j>i). Other", "dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1", "from moving to target # \"\"\" # stack_moved=[] # stack_field=[] # stack_preint=[] #", "if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field)", "# loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...])", "Transformed image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]):", "fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if", "Else return the integrated one. Defaults to False. # Returns: # moved (Tensor):", "(BxCxHxW) target ([type]): Fixed image (BxCxHxW) registration (bool, optional): If False, also return", "mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) #", "flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args:", "kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False def norm(self,", "field (Tensor): Deformation field (Bx2xHxW) Returns: Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field) def", "losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI", "loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration from the sequence of", "chunk=[] #Binarize ground truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for", "the integrated one. Defaults to False. Returns: moved (Tensor): Moved image field (Tensor):", "= nn.ParameterDict() for k in losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic): self.lw", "Moved image field (Tensor): Deformation field from moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration)", "stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed anatomical image", "not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down", "chunks (i->j) for i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk)", "BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False", "mask to deform (BxCxHxW) field (Tensor): Deformation field (Bx2xHxW) Returns: Tensor: Transformed image", "# field (Tensor): Deformation field from moving to target # \"\"\" # stack_moved=[]", "F import pytorch_lightning as pl import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses", "y_opt.zero_grad() #Sequences of flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[]", "enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if", "of flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up)", "# loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field)", "# loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean()", "# def multi_level_training(self,moving,target,level=3): # \"\"\" # Args: # moving (Tensor): Moving image (BxCxHxW)", "if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk in chunks: y_opt.zero_grad() #Sequences", "VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property", "\"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI #", "stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved", "flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for", "return the integrated one. Defaults to False. # Returns: # moved (Tensor): Moved", "# moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if", "\"\"\" Args: losses (dict): Dictionary of losses Returns: loss (Tensor): Weighted loss \"\"\"", "Args: moved : Transformed anatomical image target : Target anatomical image moved_mask :", "if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else:", "integrated one. Defaults to False. Returns: moved (Tensor): Moved image field (Tensor): Deformation", "self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim'])", "loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict", "BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary of losses Returns: loss (Tensor):", "as F import pytorch_lightning as pl import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from", "\"\"\" Args: moving (Tensor): Moving image (BxCxHxW) target ([type]): Fixed image (BxCxHxW) registration", "field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else:", "torch import nn import torch.nn.functional as F import pytorch_lightning as pl import kornia", "field. Else return the integrated one. Defaults to False. Returns: moved (Tensor): Moved", "self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down)", "moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not", "self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss)", "(BxCxHxW) registration (bool, optional): If False, also return non-integrated inverse flow field. Else", "of losses Returns: loss (Tensor): Weighted loss \"\"\" def blend(self,x,y): #For visualization x=self.norm(x)", "\"down\", \"both\". Bet you understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition:", "not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in", "Target mask field : Velocity field (=non integrated) \"\"\" losses={} if moved!=None: #", "prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']:", "field (Tensor): Deformation field from moving to target # \"\"\" # stack_moved=[] #", "= flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args: moving", "self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1", "Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[]", "= 0 with torch.set_grad_enabled(True): for k in loss_dict.keys(): if k in self.lw.keys(): loss", "return the integrated one. Defaults to False. Returns: moved (Tensor): Moved image field", "@property def automatic_optimization(self): return False def norm(self, x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x)", "optional): If False, also return non-integrated inverse flow field. Else return the integrated", "= kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None,", "deform (BxCxHxW) field (Tensor): Deformation field (Bx2xHxW) Returns: Tensor: Transformed image \"\"\" return", "Deformation field from moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): #", "If False, also return non-integrated inverse flow field. Else return the integrated one.", "downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) #", "#Computing flow fields and loss for each hop from chunk[0] to chunk[1] x1=X[:,:,i,...]", "nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict): loss = 0 with torch.set_grad_enabled(True): for k", "set_dict(self, dic): self.lw = dic for k in dic.keys(): if dic[k] > 0:", "pytorch_lightning as pl import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss", "(=non integrated) \"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8", "flow field Args: x (Tensor): Image or mask to deform (BxCxHxW) field (Tensor):", "anatomical image moved_mask : Transformed mask target_mask : Target mask field : Velocity", "def training_step(self, batch, batch_nb): X,Y=batch # X : Full scan (1x1xLxHxW) | Y", "self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition for training') print('Losses',losses) self.save_hyperparameters()", "if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition:", "([type]): Fixed image (BxCxHxW) registration (bool, optional): If False, also return non-integrated inverse", "losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if", "#Used in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only", "flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor):", "# loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up':", "self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) #", "you understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition for", "chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk in chunks: y_opt.zero_grad()", "from torch import nn import torch.nn.functional as F import pytorch_lightning as pl import", "for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg']", ": Transformed mask target_mask : Target mask field : Velocity field (=non integrated)", "loss = 0 with torch.set_grad_enabled(True): for k in loss_dict.keys(): if k in self.lw.keys():", "max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target)", "self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self):", "loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: #", "VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only \"forward\" transitions (phi_i->j with j>i). Other choices", "and \"negative\" flows are equal # if self.way=='both': # #This helps # if", "Deformation field from moving to target # \"\"\" # stack_moved=[] # stack_field=[] #", "deformation to x from flow field Args: x (Tensor): Image or mask to", "flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args:", "torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def __init__(self,", "kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes", "i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if", "prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss", "field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg']", "self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0:", "chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) #", "prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not", "False def norm(self, x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:,", "# stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved :", "self.set_dict(self.lw) def set_dict(self, dic): self.lw = dic for k in dic.keys(): if dic[k]", "field : Velocity field (=non integrated) \"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() #", "loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary", "dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down)", "(dict): Dictionary of losses Returns: loss (Tensor): Weighted loss \"\"\" def blend(self,x,y): #For", "contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses (dict):", "return False def norm(self, x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x=", "prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal", "else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(),", "# downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) #", "chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk in chunks: y_opt.zero_grad() #Sequences of flow", "...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used", "blended def training_step(self, batch, batch_nb): X,Y=batch # X : Full scan (1x1xLxHxW) |", "# moving (Tensor): Moving image (BxCxHxW) # target ([type]): Fixed image (BxCxHxW) #", "in losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic): self.lw = dic for k", "training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to x from flow field Args:", "or mask to deform (BxCxHxW) field (Tensor): Deformation field (Bx2xHxW) Returns: Tensor: Transformed", "fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing", "self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']:", "reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss)", "\"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving", "prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in reversed(fields_down):", "Defaults to False. # Returns: # moved (Tensor): Moved image # field (Tensor):", "image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo)", "Defaults to False. Returns: moved (Tensor): Moved image field (Tensor): Deformation field from", "also return non-integrated inverse flow field. Else return the integrated one. Defaults to", "#Identifying chunks (i->j) for i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2:", "self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean()", "x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down)", "for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow", "> 0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict): loss = 0", "target # \"\"\" # stack_moved=[] # stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for", "moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\" # Args:", "to target # \"\"\" # stack_moved=[] # stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) #", "if self.way=='both': # #This helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks", "field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans}", "inverse flow field. Else return the integrated one. Defaults to False. # Returns:", "\"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns: [Tensor]: Flow field flow_i_j =", "weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary of losses Returns: loss (Tensor): Weighted loss", "if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif", "self.by_composition: print('Using composition for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to x", "sequence of flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up)", "helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff # if self.losses['bidir-cons-reg']:", "mask target_mask : Target mask field : Velocity field (=non integrated) \"\"\" losses={}", "# Computing registration from the sequence of flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...]", "# loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc", "flow field. Else return the integrated one. Defaults to False. # Returns: #", "not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) #", "Fixed image (BxCxHxW) registration (bool, optional): If False, also return non-integrated inverse flow", "super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict() for k in losses: self.lw[k]= start self.set_dict(self.lw)", "i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss", "__init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step if", "# loss=(loss_up+loss_down) # Computing registration from the sequence of flow fields if not", "loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing flow fields", "def set_dict(self, dic): self.lw = dic for k in dic.keys(): if dic[k] >", ";) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition for training') print('Losses',losses)", "moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim)", "Args: flow_i_k flow_k_j Returns: [Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j)", "(BxCxHxW) field (Tensor): Deformation field (Bx2xHxW) Returns: Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field)", "(images and masks) generated from \"positive\" and \"negative\" flows are equal # if", "def __init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict() for k in losses:", "MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict() for k in", "dim) class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict() for", "if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target)", "list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks", "(i->j) for i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i]", "from \"positive\" and \"negative\" flows are equal # if self.way=='both': # #This helps", "Target anatomical image moved_mask : Transformed mask target_mask : Target mask field :", "...] else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False):", "if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field)", "def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim):", ": \"down\", \"both\". Bet you understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if", "breaks stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...])", "# field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) #", "batch, batch_nb): X,Y=batch # X : Full scan (1x1xLxHxW) | Y : Ground", "\"\"\" # stack_moved=[] # stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in", "from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient", "# field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth'])", "self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else:", "def weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary of losses Returns: loss (Tensor): Weighted", "understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition for training')", "moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if", "# loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return", "mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field)", "losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic): self.lw = dic for k in", "if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...]", "# stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1),", "# downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved)", "integrated) \"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI", "with torch.set_grad_enabled(True): for k in loss_dict.keys(): if k in self.lw.keys(): loss +=0.5 *", "def automatic_optimization(self): return False def norm(self, x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif", "return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag)", "class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False def norm(self, x): if len(x.shape)==4: x", "# if self.way=='both': # #This helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This", "if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if", "self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure sequences (images", "Velocity field (=non integrated) \"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) #", "field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) #", "= n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7)", "self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\"", "the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1:", "and masks) generated from \"positive\" and \"negative\" flows are equal # if self.way=='both':", "n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way", "torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed anatomical image target : Target", "y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop)", "def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j):", "# Returns: # moved (Tensor): Moved image # field (Tensor): Deformation field from", "# if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down)", "and loss for each hop from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not", "Args: x (Tensor): Image or mask to deform (BxCxHxW) field (Tensor): Deformation field", "to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i in range(X.shape[2]): y=Y[:,:,i,...] if", "one. Defaults to False. # Returns: # moved (Tensor): Moved image # field", "downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) # return", "losses['smooth']=loss_trans #Return dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss", "moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI", "self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in", "in chunks: y_opt.zero_grad() #Sequences of flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[]", "flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving image (BxCxHxW) target ([type]):", "def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self, batch, batch_nb): X,Y=batch", "# loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: #", "lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1)", "for i in range(chunk[0],chunk[1]): #Computing flow fields and loss for each hop from", "# resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) #", "x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self, batch, batch_nb): X,Y=batch # X : Full", "from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth'])", "image field (Tensor): Deformation field from moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration) #", "# self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop)", "x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) #", "...])[0, 0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay", "dic): self.lw = dic for k in dic.keys(): if dic[k] > 0: self.sigmas[k]", "loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing flow fields and", "return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo", "register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return", "if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim'])", "chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) #", "return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def", "losses Returns: loss (Tensor): Weighted loss \"\"\" def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x])", "image (BxCxHxW) target ([type]): Fixed image (BxCxHxW) registration (bool, optional): If False, also", "moving to target # \"\"\" # stack_moved=[] # stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True)", "from flow field Args: x (Tensor): Image or mask to deform (BxCxHxW) field", "return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed anatomical image target :", "(Tensor): Image or mask to deform (BxCxHxW) field (Tensor): Deformation field (Bx2xHxW) Returns:", "self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up,", "flow_k_j Returns: [Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j", "losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def", "\"positive\" and \"negative\" flows are equal # if self.way=='both': # #This helps #", "equal # if self.way=='both': # #This helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) #", "in range(chunk[0],chunk[1]): #Computing flow fields and loss for each hop from chunk[0] to", "def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns: [Tensor]: Flow field", "= dic for k in dic.keys(): if dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1)", "#MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute", "stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v", "len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x", "if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for", "forward(self, loss_dict): loss = 0 with torch.set_grad_enabled(True): for k in loss_dict.keys(): if k", "in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss", "self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration from the sequence", "[Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self,", "loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if", "len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk in chunks:", "self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) #", "(BxCxHxW) # registration (bool, optional): If False, also return non-integrated inverse flow field.", "# field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1))", "kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None, None,", "else: i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim']", "loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask):", "prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim']", "x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation", "x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x =", "0 with torch.set_grad_enabled(True): for k in loss_dict.keys(): if k in self.lw.keys(): loss +=0.5", "\"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\" # Args: # moving (Tensor):", "loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing flow fields and loss for each", "return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1,", "k in dic.keys(): if dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def", "masks) generated from \"positive\" and \"negative\" flows are equal # if self.way=='both': #", "for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target)", "kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel,", "moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def", "# \"\"\" # Args: # moving (Tensor): Moving image (BxCxHxW) # target ([type]):", "fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for", "return flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving image (BxCxHxW) target", "weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__()", "(bool, optional): If False, also return non-integrated inverse flow field. Else return the", "up, learning only \"forward\" transitions (phi_i->j with j>i). Other choices : \"down\", \"both\".", "Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize", "self.way=way #If up, learning only \"forward\" transitions (phi_i->j with j>i). Other choices :", "loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean()", "for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to x from flow field", "...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return x def", "(Tensor): Deformation field from moving to target # \"\"\" # stack_moved=[] # stack_field=[]", "each hop from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field)", "Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns: [Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.))", "batch_nb): X,Y=batch # X : Full scan (1x1xLxHxW) | Y : Ground truth", "field (Bx2xHxW) Returns: Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1]", "the sequence of flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition:", "to target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\" # Args: #", "integrated one. Defaults to False. # Returns: # moved (Tensor): Moved image #", "dices_prop.append(dice_loss) #Additionnal loss to ensure sequences (images and masks) generated from \"positive\" and", "composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down)", "def norm(self, x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None,", "X,Y=batch # X : Full scan (1x1xLxHxW) | Y : Ground truth (1xCxLxHxW)", "if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure sequences", "#Binarize ground truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i", "print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to x from flow field Args: x", "(Bx2xHxW) Returns: Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for", "loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration from", "of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img)", "from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def", "__init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict() for k in losses: self.lw[k]=", "LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False def norm(self, x): if len(x.shape)==4: x =", "if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses return losses#{'sim':", "loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field)", "stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving)", "loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() #", "# stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args:", "flows are equal # if self.way=='both': # #This helps # if self.losses['bidir-cons-dice']: #", "x from flow field Args: x (Tensor): Image or mask to deform (BxCxHxW)", "prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if", "self.selected_slices=selected_slices #Used in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning", "flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns: [Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\"", "Returns: [Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def", "in loss_dict.keys(): if k in self.lw.keys(): loss +=0.5 * loss_dict[k] / (self.sigmas[k])**2 +", "else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__()", "Returns: # moved (Tensor): Moved image # field (Tensor): Deformation field from moving", "range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) #", "losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...]", "the integrated one. Defaults to False. # Returns: # moved (Tensor): Moved image", "dic.keys(): if dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict):", "# max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI #", "kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if", "len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim'])", "y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk", "if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v", "(Tensor): Moved image field (Tensor): Deformation field from moving to target \"\"\" return", "elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0,", "blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self, batch, batch_nb): X,Y=batch #", "for k in losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic): self.lw = dic", "composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up)", "#Additionnal loss to ensure sequences (images and masks) generated from \"positive\" and \"negative\"", "loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses):", "self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return", "loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...])", "one. Defaults to False. Returns: moved (Tensor): Moved image field (Tensor): Deformation field", "self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in", "Returns: loss (Tensor): Weighted loss \"\"\" def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return", "field. Else return the integrated one. Defaults to False. # Returns: # moved", "moved_mask : Transformed mask target_mask : Target mask field : Velocity field (=non", "truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i in range(X.shape[2]):", "0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices", ": Transformed anatomical image target : Target anatomical image moved_mask : Transformed mask", ": Velocity field (=non integrated) \"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target)", "print('Using composition for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to x from", "flow field. Else return the integrated one. Defaults to False. Returns: moved (Tensor):", "moving (Tensor): Moving image (BxCxHxW) target ([type]): Fixed image (BxCxHxW) registration (bool, optional):", "# edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary of losses", "dic for k in dic.keys(): if dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1) *", "Dictionary of losses Returns: loss (Tensor): Weighted loss \"\"\" def blend(self,x,y): #For visualization", "in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only \"forward\"", "monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self):", "range(chunk[0],chunk[1]): #Computing flow fields and loss for each hop from chunk[0] to chunk[1]", "return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas", "stack_moved=[] # stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level): #", "self.sigmas = nn.ParameterDict() for k in losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic):", "for v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...])", "loss (Tensor): Weighted loss \"\"\" def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended", "prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up)", "loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def", "loss \"\"\" def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self, batch,", "in dic.keys(): if dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def forward(self,", "* dic[k]) def forward(self, loss_dict): loss = 0 with torch.set_grad_enabled(True): for k in", "losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) #", "if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up", "flow Args: flow_i_k flow_k_j Returns: [Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j=", "def forward(self, loss_dict): loss = 0 with torch.set_grad_enabled(True): for k in loss_dict.keys(): if", "return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True)", "Moved image # field (Tensor): Deformation field from moving to target # \"\"\"", "stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True)", "scan (1x1xLxHxW) | Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab", "moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) #", "prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up)", "x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes", "# loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() # loss=self.loss_model(losses)", "losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure sequences (images and", "dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) #", "according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i in range(X.shape[2]): y=Y[:,:,i,...]", "return non-integrated inverse flow field. Else return the integrated one. Defaults to False.", "else: for i,field_up in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']:", "self.current_epoch==0: print(lab,chunks) for chunk in chunks: y_opt.zero_grad() #Sequences of flow fields (field_up=forward, field_down=backward)", "Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if", "# moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0)", "# self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None):", "if self.by_composition: print('Using composition for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to", "sequences (images and masks) generated from \"positive\" and \"negative\" flows are equal #", "self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True)", "field from moving to target # \"\"\" # stack_moved=[] # stack_field=[] # stack_preint=[]", "def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\"", "(Tensor): Deformation field (Bx2xHxW) Returns: Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows):", "dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict): loss =", "# return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed anatomical image target", "# field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean()", "image (BxCxHxW) # target ([type]): Fixed image (BxCxHxW) # registration (bool, optional): If", "only \"forward\" transitions (phi_i->j with j>i). Other choices : \"down\", \"both\". Bet you", "Args: # moving (Tensor): Moving image (BxCxHxW) # target ([type]): Fixed image (BxCxHxW)", "(BxCxHxW) # target ([type]): Fixed image (BxCxHxW) # registration (bool, optional): If False,", "norm(self, x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0,", "def multi_level_training(self,moving,target,level=3): # \"\"\" # Args: # moving (Tensor): Moving image (BxCxHxW) #", "registration from the sequence of flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up)", "self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\" # Args: # moving (Tensor): Moving image", "choices : \"down\", \"both\". Bet you understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses", "#MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None:", "to x from flow field Args: x (Tensor): Image or mask to deform", "prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down)", ": Target anatomical image moved_mask : Transformed mask target_mask : Target mask field", "losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration from the sequence of flow fields if", "# moved (Tensor): Moved image # field (Tensor): Deformation field from moving to", "# stack_moved=[] # stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level):", "fields and loss for each hop from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if", "torch from torch import nn import torch.nn.functional as F import pytorch_lightning as pl", "if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only \"forward\" transitions (phi_i->j with", "prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up':", "loss to ensure sequences (images and masks) generated from \"positive\" and \"negative\" flows", "moved (Tensor): Moved image # field (Tensor): Deformation field from moving to target", "# #This breaks stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) #", "# moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with", "def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1.", "mask field : Velocity field (=non integrated) \"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum()", "self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...])", "-1, dim) class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict()", "torch.set_grad_enabled(True): for k in loss_dict.keys(): if k in self.lw.keys(): loss +=0.5 * loss_dict[k]", "compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns:", "False. # Returns: # moved (Tensor): Moved image # field (Tensor): Deformation field", "#This helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff # if", "of flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None}", "self.lw={} self.sigmas = nn.ParameterDict() for k in losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self,", "\"\"\" Args: moved : Transformed anatomical image target : Target anatomical image moved_mask", "#This breaks stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...],", "losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return", "truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground", "import pytorch_lightning as pl import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import", "super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step if isinstance(shape,int):shape=[shape,shape]", "image # field (Tensor): Deformation field from moving to target # \"\"\" #", "# loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if", "self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def", "loss_dict): loss = 0 with torch.set_grad_enabled(True): for k in loss_dict.keys(): if k in", "loss_dict.keys(): if k in self.lw.keys(): loss +=0.5 * loss_dict[k] / (self.sigmas[k])**2 + torch.log(self.sigmas[k])", "else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration from the sequence of flow", "field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better", "flow fields and loss for each hop from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...]", "def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class", "lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def __init__(self, losses):", "loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses", "Image or mask to deform (BxCxHxW) field (Tensor): Deformation field (Bx2xHxW) Returns: Tensor:", "with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down)", "# loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...],", "print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return", "# Args: # moving (Tensor): Moving image (BxCxHxW) # target ([type]): Fixed image", "self.losses=losses if self.by_composition: print('Using composition for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation", "Moving image (BxCxHxW) # target ([type]): Fixed image (BxCxHxW) # registration (bool, optional):", "generated from \"positive\" and \"negative\" flows are equal # if self.way=='both': # #This", "= nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict): loss = 0 with torch.set_grad_enabled(True): for", "# if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff # if self.losses['bidir-cons-reg']: #", "# registration (bool, optional): If False, also return non-integrated inverse flow field. Else", "loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: #", "Bet you understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition", "field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2))", "k in self.lw.keys(): loss +=0.5 * loss_dict[k] / (self.sigmas[k])**2 + torch.log(self.sigmas[k]) return loss", "losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure sequences (images and masks) generated from \"positive\"", "training_step(self, batch, batch_nb): X,Y=batch # X : Full scan (1x1xLxHxW) | Y :", "loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)#", "hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1. self.lw={}", "Fixed image (BxCxHxW) # registration (bool, optional): If False, also return non-integrated inverse", "def apply_deform(self,x,field): \"\"\"Apply deformation to x from flow field Args: x (Tensor): Image", "#If up, learning only \"forward\" transitions (phi_i->j with j>i). Other choices : \"down\",", "(phi_i->j with j>i). Other choices : \"down\", \"both\". Bet you understood ;) self.by_composition=by_composition", "target : Target anatomical image moved_mask : Transformed mask target_mask : Target mask", "pl import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters", "image (BxCxHxW) # registration (bool, optional): If False, also return non-integrated inverse flow", "# loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step()", "compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns: [Tensor]: Flow field flow_i_j", "if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return", "len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down':", "dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure sequences (images and masks) generated from", "step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only \"forward\" transitions (phi_i->j", "k in loss_dict.keys(): if k in self.lw.keys(): loss +=0.5 * loss_dict[k] / (self.sigmas[k])**2", "(1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth", "field (Tensor): Deformation field from moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def", "y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth according", "def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed anatomical image target : Target anatomical", "# stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) #", "i in range(chunk[0],chunk[1]): #Computing flow fields and loss for each hop from chunk[0]", "self.way=='both': # #This helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff", "blended=torch.stack([y,x,x]) return blended def training_step(self, batch, batch_nb): X,Y=batch # X : Full scan", "automatic_optimization(self): return False def norm(self, x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3:", "Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth according to the", "self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1])", "sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False def norm(self, x): if", "mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary of", "if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down)", "False. Returns: moved (Tensor): Moved image field (Tensor): Deformation field from moving to", "if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up)", "0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict): loss = 0 with", "inverse flow field. Else return the integrated one. Defaults to False. Returns: moved", "if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure sequences (images and masks)", "dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def", "flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else:", "x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...]", "Args: losses (dict): Dictionary of losses Returns: loss (Tensor): Weighted loss \"\"\" def", "\"both\". Bet you understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using", "ground truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i in", "\"\"\" # Args: # moving (Tensor): Moving image (BxCxHxW) # target ([type]): Fixed", "anatomical image target : Target anatomical image moved_mask : Transformed mask target_mask :", "kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return", "torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas =", "start=1. self.lw={} self.sigmas = nn.ParameterDict() for k in losses: self.lw[k]= start self.set_dict(self.lw) def", "#MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask)", "image (BxCxHxW) registration (bool, optional): If False, also return non-integrated inverse flow field.", "import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False def norm(self, x):", "self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator=", "for each hop from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False)", "Transformed mask target_mask : Target mask field : Velocity field (=non integrated) \"\"\"", "if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing", "multi_level_training(self,moving,target,level=3): # \"\"\" # Args: # moving (Tensor): Moving image (BxCxHxW) # target", "target ([type]): Fixed image (BxCxHxW) # registration (bool, optional): If False, also return", "return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\" # Args: # moving (Tensor): Moving", "Computing registration from the sequence of flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...]", "are equal # if self.way=='both': # #This helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up)", "self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If", "dic[k]) def forward(self, loss_dict): loss = 0 with torch.set_grad_enabled(True): for k in loss_dict.keys():", "as pl import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from", "gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False def norm(self, x): if len(x.shape)==4:", "to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field)", "i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure", "for k in loss_dict.keys(): if k in self.lw.keys(): loss +=0.5 * loss_dict[k] /", "losses): super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict() for k in losses: self.lw[k]= start", "in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying", "#Return dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2])", "in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks)", "in enumerate(fields_up): prop_x_up=self.apply_deform(prop_x_up,field_up) prop_y_up=self.apply_deform(prop_y_up,field_up) losses['contours']=self.compute_contour_loss(X[:,:,chunk[0]+i+1],prop_y_up) if self.losses['compo-reg-up']: losses['comp']=self.compute_loss(prop_x_up,X[:,:,chunk[1],...])['sim'] if self.losses['compo-dice-up']: dice_loss=self.compute_loss(moved_mask=prop_y_up,target_mask=Y[:,:,chunk[1],...])['seg'] losses['seg']=dice_loss dices_prop.append(dice_loss)", "validation step if isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only \"forward\" transitions", "# target ([type]): Fixed image (BxCxHxW) # registration (bool, optional): If False, also", "i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) #", "cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: #", "from moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\" #", "loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg=", "= MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field):", "field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True): \"\"\"", "compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns: [Tensor]: Flow", "len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk in chunks: y_opt.zero_grad() #Sequences of", "Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in", "target ([type]): Fixed image (BxCxHxW) registration (bool, optional): If False, also return non-integrated", "field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]):", "self.lw = dic for k in dic.keys(): if dic[k] > 0: self.sigmas[k] =", "moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean", "reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j", "chunks=[] chunk=[] #Binarize ground truth according to the label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j)", "loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True)", "Returns: Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow", "prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else: i=1 for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if", "# self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch)", "# if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[0],...], self.compose_deformation(composed_fields_up,composed_fields_down)),X[:,:,chunk[0],...]) # loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for", "Transformed anatomical image target : Target anatomical image moved_mask : Transformed mask target_mask", "torch.nn.functional as F import pytorch_lightning as pl import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice", "hop from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim'])", "#For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self, batch, batch_nb): X,Y=batch # X", "losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2,", "loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) #", "#Sequences of flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0", "Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of", "\"\"\" def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self, batch, batch_nb):", "self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration", "x): if len(x.shape)==4: x = kornia.enhance.normalize_min_max(x) elif len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...]", "isinstance(shape,int):shape=[shape,shape] self.registrator= VxmDense(shape,bidir=False,int_downsize=1,int_steps=7) self.way=way #If up, learning only \"forward\" transitions (phi_i->j with j>i).", "# self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss", ": Target mask field : Velocity field (=non integrated) \"\"\" losses={} if moved!=None:", "dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth according to", "import torch.nn.functional as F import pytorch_lightning as pl import kornia from .voxelmorph2d import", "moved (Tensor): Moved image field (Tensor): Deformation field from moving to target \"\"\"", "image target : Target anatomical image moved_mask : Transformed mask target_mask : Target", "chunk in chunks: y_opt.zero_grad() #Sequences of flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[]", "losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to", "# self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask):", "# loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1", "chunks: y_opt.zero_grad() #Sequences of flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[]", "stack_field.append(field) # stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed", "if dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict): loss", "moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans", "losses['seg']=dice_loss dices_prop.append(dice_loss) if not self.way=='up': prop_x_down=X[:,:,chunk[1],...] prop_y_down=Y[:,:,chunk[1],...] composed_fields_down=self.compose_list(fields_down[::-1]) if self.by_composition: prop_x_down=self.apply_deform(prop_x_down,composed_fields_down) prop_y_down=self.apply_deform(prop_y_down,composed_fields_down) else:", "def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in validation step", "target_mask : Target mask field : Velocity field (=non integrated) \"\"\" losses={} if", "loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing flow fields and loss", "len(x.shape)==3: x= kornia.enhance.normalize_min_max(x[:, None, ...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0,", "(Tensor): Weighted loss \"\"\" def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def", "to deform (BxCxHxW) field (Tensor): Deformation field (Bx2xHxW) Returns: Tensor: Transformed image \"\"\"", "k in losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic): self.lw = dic for", "Full scan (1x1xLxHxW) | Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for", "return blended def training_step(self, batch, batch_nb): X,Y=batch # X : Full scan (1x1xLxHxW)", "# loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean()", "# loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def", "Args: moving (Tensor): Moving image (BxCxHxW) target ([type]): Fixed image (BxCxHxW) registration (bool,", "loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses return losses#{'sim': loss_ncc,'seg':loss_seg,'smooth':loss_trans} def compute_contour_loss(self,img,moved_mask): #Compute contour", "#Better with mean if self.way=='up': loss=torch.stack(loss_up).mean() elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() #", "(Tensor): Moving image (BxCxHxW) # target ([type]): Fixed image (BxCxHxW) # registration (bool,", "(1x1xLxHxW) | Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in", "from the sequence of flow fields if not self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if", "for chunk in chunks: y_opt.zero_grad() #Sequences of flow fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[]", "#Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses", "X : Full scan (1x1xLxHxW) | Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[]", "fields (field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i", "field from moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\"", "registration (bool, optional): If False, also return non-integrated inverse flow field. Else return", "loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down)", "(Tensor): Deformation field from moving to target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3):", "self.way=='down': prop_x_up=X[:,:,chunk[0],...] prop_y_up=Y[:,:,chunk[0],...] composed_fields_up=self.compose_list(fields_up) if self.by_composition: prop_x_up=self.apply_deform(prop_x_up,composed_fields_up) prop_y_up=self.apply_deform(prop_y_up,composed_fields_up) else: for i,field_up in enumerate(fields_up):", "in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k", "flow_i_k flow_k_j Returns: [Tensor]: Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return", "in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...])", "(field_up=forward, field_down=backward) fields_up=[] fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in", "# stack_preint.append(preint_field) # return torch.stack(stack_moved,0).mean(0),torch.stack(stack_field,0).mean(0),torch.stack(stack_preint,0).mean(0) def compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed anatomical", "x (Tensor): Image or mask to deform (BxCxHxW) field (Tensor): Deformation field (Bx2xHxW)", "loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down) moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) #", "self.loss_model = MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition for training') print('Losses',losses) self.save_hyperparameters() def", "compute_contour_loss(self,img,moved_mask): #Compute contour loss mag,mask_contour=canny(moved_mask[:,1:2]) # edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args:", "= kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes =", "flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving image (BxCxHxW)", "loss for each hop from chunk[0] to chunk[1] x1=X[:,:,i,...] x2=X[:,:,i+1,...] if not self.way=='down':", "def forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving image (BxCxHxW) target ([type]): Fixed", "losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing flow fields and loss for each hop", "return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate self.weight_decay=weight_decay self.selected_slices=selected_slices #Used in", "([type]): Fixed image (BxCxHxW) # registration (bool, optional): If False, also return non-integrated", "downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field) # stack_moved.append(moved) # stack_field.append(field) # stack_preint.append(preint_field)", "target \"\"\" return self.registrator.forward(moving,target,registration=registration) # def multi_level_training(self,moving,target,level=3): # \"\"\" # Args: # moving", ": Full scan (1x1xLxHxW) | Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y)", "nn import torch.nn.functional as F import pytorch_lightning as pl import kornia from .voxelmorph2d", "import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule):", "class MTL_loss(torch.nn.Module): def __init__(self, losses): super().__init__() start=1. self.lw={} self.sigmas = nn.ParameterDict() for k", "\"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return", "if k in self.lw.keys(): loss +=0.5 * loss_dict[k] / (self.sigmas[k])**2 + torch.log(self.sigmas[k]) return", "resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target)", "for i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if", "loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing flow fields and loss for", "Other choices : \"down\", \"both\". Bet you understood ;) self.by_composition=by_composition self.loss_model = MTL_loss(['sim','seg','comp','smooth'])", "flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns", "moved : Transformed anatomical image target : Target anatomical image moved_mask : Transformed", "forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving image (BxCxHxW) target ([type]): Fixed image", "image moved_mask : Transformed mask target_mask : Target mask field : Velocity field", "non-integrated inverse flow field. Else return the integrated one. Defaults to False. #", "fields_down=[] loss_up_sim=[] loss_up_smooth=[] loss_down_sim=[] loss_down_smooth=[] loss=0 losses={'sim':None,'seg':None,'comp':None,'smooth':None} for i in range(chunk[0],chunk[1]): #Computing flow", "self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k]) def forward(self, loss_dict): loss = 0 with torch.set_grad_enabled(True):", "import nn import torch.nn.functional as F import pytorch_lightning as pl import kornia from", "edges,mag=canny(img) return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary of losses Returns:", "moved_x2=self.registrator.transformer(x2,field_down) cur_loss=self.compute_loss(moved_x2,x1,field=preint_field) loss_down_sim.append(cur_loss['sim']) loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0:", "\"negative\" flows are equal # if self.way=='both': # #This helps # if self.losses['bidir-cons-dice']:", "composition for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply deformation to x from flow", "# loss_ncc=NCC().loss(moved,target) loss_ncc=GlobalMutualInformationLoss()(moved,target)*0.8 #MONAI # loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None:", "moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes),", "for field_down in reversed(fields_down): prop_x_down=self.apply_deform(prop_x_down,field_down) prop_y_down=self.apply_deform(prop_y_down,field_down) losses['contours']+=self.compute_contour_loss(X[:,:,chunk[1]-i],prop_y_down) i+=1 if self.losses['compo-reg-down']: losses['comp']+=self.compute_loss(prop_x_down,X[:,:,chunk[0],...])['sim'] if self.losses['compo-dice-down']:", "import torch from torch import nn import torch.nn.functional as F import pytorch_lightning as", "import kornia from .voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import", "not self.way=='down': moved_x1,field_up,preint_field=self.forward(x1,x2,registration=False) cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up)", "Returns: moved (Tensor): Moved image field (Tensor): Deformation field from moving to target", "self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic): self.lw = dic for k in dic.keys():", "print(lab,chunks) for chunk in chunks: y_opt.zero_grad() #Sequences of flow fields (field_up=forward, field_down=backward) fields_up=[]", "ensure sequences (images and masks) generated from \"positive\" and \"negative\" flows are equal", "compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.))", "non-integrated inverse flow field. Else return the integrated one. Defaults to False. Returns:", "loss_down_smooth.append(cur_loss['smooth']) # field_up=self.registrator.integrate(-preint_field) # moved_x1=self.registrator.transformer(x1,field_up) # loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) #", "to ensure sequences (images and masks) generated from \"positive\" and \"negative\" flows are", "loss_down_sim.append(self.compute_loss(moved_x1,x2)['sim']) # if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up':", "\"forward\" transitions (phi_i->j with j>i). Other choices : \"down\", \"both\". Bet you understood", "apply_deform(self,x,field): \"\"\"Apply deformation to x from flow field Args: x (Tensor): Image or", "loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up)", "compose_list(self,flows): flows=list(flows) compo=flows[-1] for flow in reversed(flows[:-1]): compo=self.compose_deformation(flow,compo) return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\"", ": Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[]", "if self.current_epoch==0: print(lab,chunks) for chunk in chunks: y_opt.zero_grad() #Sequences of flow fields (field_up=forward,", "for lab in list(range(Y_multi_lab.shape[1]))[1:]: chunks=[] chunk=[] #Binarize ground truth according to the label", "loss_ncc=LocalNormalizedCrossCorrelationLoss(spatial_dims=2, kernel_size=99)(moved,target) #MONAI # loss_ncc=nn.MSELoss()(moved,target) losses['sim']=loss_ncc if moved_mask!=None: # loss_seg= Dice().loss(moved_mask,target_mask) loss_seg=DiceLoss(include_background=False)(moved_mask,target_mask)-1 losses['seg']=loss_seg", "loss=(loss_up+loss_down) # Computing registration from the sequence of flow fields if not self.way=='down':", "visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self, batch, batch_nb): X,Y=batch # X :", "import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return", "from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class LabelProp(pl.LightningModule): @property def automatic_optimization(self): return False def", "(Tensor): Moved image # field (Tensor): Deformation field from moving to target #", "field (=non integrated) \"\"\" losses={} if moved!=None: # max_peak=F.conv2d(target,target).sum() # loss_ncc=-F.conv2d(moved,target).sum()/max_peak#+NCC().loss(moved,target) # loss_ncc=NCC().loss(moved,target)", "cur_loss=self.compute_loss(moved_x1,x2,field=preint_field) loss_up_sim.append(cur_loss['sim']) loss_up_smooth.append(cur_loss['smooth']) # field_down=self.registrator.integrate(-preint_field) # moved_x2=self.registrator.transformer(x2,field_down) # loss_up_sim.append(self.compute_loss(moved_x2,x1)['sim']) fields_up.append(field_up) # if len(fields_up)>0:", "self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def", "# #This helps # if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff #", "for k in dic.keys(): if dic[k] > 0: self.sigmas[k] = nn.Parameter(torch.ones(1) * dic[k])", "in range(level): # downsampling=nn.Upsample(scale_factor=1/(i+1), mode='bilinear',align_corners=True) # downsampled_moving=downsampling(moving) # downsampled_target=downsampling(target) # moved,field,preint_field=self.forward(downsampled_moving,downsampled_target) # self.compute_loss(moved,target,field=field)", "# X : Full scan (1x1xLxHxW) | Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers()", "moving (Tensor): Moving image (BxCxHxW) # target ([type]): Fixed image (BxCxHxW) # registration", "None, ...])[:,0, ...] else: x = kornia.enhance.normalize_min_max(x[None, None, ...])[0, 0, ...] return x", "flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True): \"\"\" Args: moving (Tensor): Moving image", "MTL_loss(['sim','seg','comp','smooth']) self.losses=losses if self.by_composition: print('Using composition for training') print('Losses',losses) self.save_hyperparameters() def apply_deform(self,x,field): \"\"\"Apply", "losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration from the sequence of flow fields", "label Y=torch.stack([1-Y_multi_lab[:,lab],Y_multi_lab[:,lab]],dim=1) #Identifying chunks (i->j) for i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i)", "j>i). Other choices : \"down\", \"both\". Bet you understood ;) self.by_composition=by_composition self.loss_model =", "# \"\"\" # stack_moved=[] # stack_field=[] # stack_preint=[] # resampling=torch.nn.Upsample(size=self.shape,mode='bilinear',align_corners=True) # for i", "transitions (phi_i->j with j>i). Other choices : \"down\", \"both\". Bet you understood ;)", "if self.losses['bidir-cons-dice']: # loss+=self.compute_loss(moved_mask=prop_y_down,target_mask=prop_y_up) # #This breaks stuff # if self.losses['bidir-cons-reg']: # loss+=self.compute_loss(prop_x_up,prop_x_down)", "nn.ParameterDict() for k in losses: self.lw[k]= start self.set_dict(self.lw) def set_dict(self, dic): self.lw =", "i in range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0:", "# if len(fields_down)>0: # field_down_2=self.compose_deformation(fields_down[-1],field_down) # loss_down.append(self.compute_loss(self.apply_deform(X[:,:,i+1],field_down_2),x1)) #Better with mean if self.way=='up': loss=torch.stack(loss_up).mean()", "return compo def compose_deformation(self,flow_i_k,flow_k_j): \"\"\" Returns flow_k_j(flow_i_k(.)) flow Args: flow_i_k flow_k_j Returns: [Tensor]:", "loss+=nn.L1Loss()(self.apply_deform(X[:,:,chunk[1],...], self.compose_deformation(composed_fields_down,composed_fields_up)),X[:,:,chunk[1],...]) loss=losses['seg']+losses['sim']+losses['contours']#+losses['smooth']#torch.stack([v for v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() #", "Weighted loss \"\"\" def blend(self,x,y): #For visualization x=self.norm(x) blended=torch.stack([y,x,x]) return blended def training_step(self,", "Deformation field (Bx2xHxW) Returns: Tensor: Transformed image \"\"\" return self.registrator.transformer(x,field) def compose_list(self,flows): flows=list(flows)", "return BendingEnergyLoss()(mag) def weighting_loss(self,losses): \"\"\" Args: losses (dict): Dictionary of losses Returns: loss", "self.losses['compo-dice-down']: dice_loss=self.compute_loss(moved_mask=prop_y_down,target_mask=Y[:,:,chunk[0],...])['seg'] losses['seg']+=dice_loss dices_prop.append(dice_loss) #Additionnal loss to ensure sequences (images and masks) generated", "to False. Returns: moved (Tensor): Moved image field (Tensor): Deformation field from moving", "len(dices_prop)>0: dices_prop=-torch.stack(dices_prop).mean() self.log('val_accuracy',dices_prop) print(dices_prop) else: self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field", "with j>i). Other choices : \"down\", \"both\". Bet you understood ;) self.by_composition=by_composition self.loss_model", "Flow field flow_i_j = flow_k_j(flow_i_k(.)) \"\"\" flow_i_j= flow_k_j+self.apply_deform(flow_i_k,flow_k_j) return flow_i_j def forward(self, moving,target,registration=True):", "learning only \"forward\" transitions (phi_i->j with j>i). Other choices : \"down\", \"both\". Bet", "Else return the integrated one. Defaults to False. Returns: moved (Tensor): Moved image", "None, ...])[0, 0, ...] return x def __init__(self,n_channels=1,n_classes=2,learning_rate=5e-3,weight_decay=1e-8,way='up',shape=256,selected_slices=None,losses={},by_composition=False): super().__init__() self.n_classes = n_classes self.learning_rate=learning_rate", "compute_loss(self,moved=None,target=None,moved_mask=None,target_mask=None,field=None): \"\"\" Args: moved : Transformed anatomical image target : Target anatomical image", "field Args: x (Tensor): Image or mask to deform (BxCxHxW) field (Tensor): Deformation", "elif self.way=='down': loss=torch.stack(loss_down).mean() else: losses['sim']=torch.stack(loss_up_sim).mean()+torch.stack(loss_down_sim).mean() losses['smooth']=torch.stack(loss_up_smooth).mean()+torch.stack(loss_down_smooth).mean() # loss=(loss_up+loss_down) # Computing registration from the", "range(X.shape[2]): y=Y[:,:,i,...] if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for", "losses['seg']=loss_seg if field!=None: # loss_trans=BendingEnergyLoss()(field) #MONAI loss_trans=Grad().loss(field,field) losses['smooth']=loss_trans #Return dict of losses return", "| Y : Ground truth (1xCxLxHxW) y_opt=self.optimizers() dices_prop=[] Y_multi_lab=torch.clone(Y) for lab in list(range(Y_multi_lab.shape[1]))[1:]:", "Moving image (BxCxHxW) target ([type]): Fixed image (BxCxHxW) registration (bool, optional): If False,", "configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay,amsgrad=True) def hardmax(self,Y,dim): return torch.moveaxis(F.one_hot(torch.argmax(Y,dim),self.n_classes), -1, dim) class MTL_loss(torch.nn.Module):", "chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk in chunks: y_opt.zero_grad() #Sequences of flow fields", "self.log('val_accuracy',self.current_epoch) return loss def register_images(self,moving,target,moving_mask): moved,field=self.forward(moving,target,registration=True) return moved,self.apply_deform(moving_mask,field),field def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=self.learning_rate,", "if len(torch.unique(torch.argmax(y,1)))>1: chunk.append(i) if len(chunk)==2: chunks.append(chunk) chunk=[i] if self.current_epoch==0: print(lab,chunks) for chunk in", "v in losses.values()]).mean() # loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) #", "loss=self.loss_model(losses) self.log_dict({'loss':loss},prog_bar=True) self.manual_backward(loss) y_opt.step() # self.logger.experiment.add_image('x_true',X[0,:,chunk[0],...]) # self.logger.experiment.add_image('prop_x_down',prop_x_down[0,:,0,...]) # self.logger.experiment.add_image('x_true_f',X[0,:,chunk[1],...]) # self.logger.experiment.add_image('prop_x_up',prop_x_up[0,:,-1,...]) if", "\"\"\"Apply deformation to x from flow field Args: x (Tensor): Image or mask", "fields_up.append(field_up) # if len(fields_up)>0: # field_up_2=self.compose_deformation(fields_up[-1],field_up) # loss_up.append(self.compute_loss(self.apply_deform(X[:,:,i-1],field_up_2),x2)) if not self.way=='up': moved_x2,field_down,preint_field=self.forward(x2,x1,registration=False)# fields_down.append(field_down)", "(Tensor): Moving image (BxCxHxW) target ([type]): Fixed image (BxCxHxW) registration (bool, optional): If", ".voxelmorph2d import VxmDense,NCC,Grad,Dice from monai.losses import BendingEnergyLoss,GlobalMutualInformationLoss,DiceLoss,LocalNormalizedCrossCorrelationLoss from kornia.filters import sobel, gaussian_blur2d,canny,spatial_gradient class" ]
[ "for v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v for v in individual.routed_graph.nodes()", "EvalBase import EvalBase import networkx as nx import os import signal import math", "1 if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(),", "GenMap and released under the MIT License, see LICENSE. # Author: <NAME> from", "if info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(),", "individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v for v in individual.routed_graph.nodes() if CGRA.isALU(v)] width,", "app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count", "output_count) / 2),\\ minh_op) else: min_maph = max(input_count, output_count, minh_op) if min_maph ==", "An individual to be evaluated Returns: int: mapping height \"\"\" y_coords = []", "networkx as nx import os import signal import math main_pid = os.getpid() class", "signal import math main_pid = os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def", "\\ node == rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords) + 1 if \"quit_minheight\"", "minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count)", "EvalBase import networkx as nx import os import signal import math main_pid =", "else: min_maph = max(input_count, output_count, minh_op) if min_maph == map_height and individual.isValid(): os.kill(main_pid,", "break map_height = max(y_coords) + 1 if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is", "v in se_set ] or \\ node == rsc[\"ALU\"]: y_coords.append(y) break map_height =", "+ ALUs: for x in range(width): for y in range(height): rsc = CGRA.get_PE_resources((x,", "[] SEs = [v for v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v", "CGRA app (Application): An application to be optimized sim_params (SimParameters): parameters for some", "len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared(): min_maph =", "in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v for v in individual.routed_graph.nodes() if CGRA.isALU(v)]", "= max(math.ceil((input_count + output_count) / 2),\\ minh_op) else: min_maph = max(input_count, output_count, minh_op)", "= max(input_count, output_count, minh_op) if min_maph == map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return", "info.keys(): if info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\", "for some simulations individual (Individual): An individual to be evaluated Returns: int: mapping", "Returns: int: mapping height \"\"\" y_coords = [] SEs = [v for v", "width, height = CGRA.getSize() for node in SEs + ALUs: for x in", "the CGRA app (Application): An application to be optimized sim_params (SimParameters): parameters for", "\"\"\"Return mapping height. Args: CGRA (PEArrayModel): A model of the CGRA app (Application):", "simulations individual (Individual): An individual to be evaluated Returns: int: mapping height \"\"\"", "pass @staticmethod def eval(CGRA, app, sim_params, individual, **info): \"\"\"Return mapping height. Args: CGRA", "SEs = [v for v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v for", "CGRA.getSize() for node in SEs + ALUs: for x in range(width): for y", "Author: <NAME> from EvalBase import EvalBase import networkx as nx import os import", "of GenMap and released under the MIT License, see LICENSE. # Author: <NAME>", "model of the CGRA app (Application): An application to be optimized sim_params (SimParameters):", "= [v for v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v for v", "CGRA.isSE(v)] ALUs = [v for v in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height =", "os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def isMinimize(): return True @staticmethod def name(): return", "is part of GenMap and released under the MIT License, see LICENSE. #", "parameters for some simulations individual (Individual): An individual to be evaluated Returns: int:", "app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width)", "# Author: <NAME> from EvalBase import EvalBase import networkx as nx import os", "if node in [v for se_set in rsc[\"SE\"].values() for v in se_set ]", "__init__(self): pass @staticmethod def eval(CGRA, app, sim_params, individual, **info): \"\"\"Return mapping height. Args:", "v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v for v in individual.routed_graph.nodes() if", "some simulations individual (Individual): An individual to be evaluated Returns: int: mapping height", "app (Application): An application to be optimized sim_params (SimParameters): parameters for some simulations", "be optimized sim_params (SimParameters): parameters for some simulations individual (Individual): An individual to", "in rsc[\"SE\"].values() for v in se_set ] or \\ node == rsc[\"ALU\"]: y_coords.append(y)", "\\ / width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count) / 2),\\ minh_op)", "in info.keys(): if info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count =", "min_maph == map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def isMinimize(): return", "(Application): An application to be optimized sim_params (SimParameters): parameters for some simulations individual", "int: mapping height \"\"\" y_coords = [] SEs = [v for v in", "ALUs: for x in range(width): for y in range(height): rsc = CGRA.get_PE_resources((x, y))", "(PEArrayModel): A model of the CGRA app (Application): An application to be optimized", "class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def eval(CGRA, app, sim_params, individual, **info): \"\"\"Return", "= [v for v in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height = CGRA.getSize() for", "as nx import os import signal import math main_pid = os.getpid() class MapHeightEval(EvalBase):", "A model of the CGRA app (Application): An application to be optimized sim_params", "see LICENSE. # Author: <NAME> from EvalBase import EvalBase import networkx as nx", "math main_pid = os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def eval(CGRA, app,", "os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def eval(CGRA, app, sim_params, individual, **info):", "if min_maph == map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def isMinimize():", "This file is part of GenMap and released under the MIT License, see", "= math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count) /", "in SEs + ALUs: for x in range(width): for y in range(height): rsc", "if CGRA.isALU(v)] width, height = CGRA.getSize() for node in SEs + ALUs: for", "rsc = CGRA.get_PE_resources((x, y)) if node in [v for se_set in rsc[\"SE\"].values() for", "\"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if", "= os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def eval(CGRA, app, sim_params, individual,", "map_height = max(y_coords) + 1 if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is True:", "**info): \"\"\"Return mapping height. Args: CGRA (PEArrayModel): A model of the CGRA app", "application to be optimized sim_params (SimParameters): parameters for some simulations individual (Individual): An", "in [v for se_set in rsc[\"SE\"].values() for v in se_set ] or \\", "in range(height): rsc = CGRA.get_PE_resources((x, y)) if node in [v for se_set in", "from EvalBase import EvalBase import networkx as nx import os import signal import", "the MIT License, see LICENSE. # Author: <NAME> from EvalBase import EvalBase import", "individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def isMinimize(): return True @staticmethod def name():", "An application to be optimized sim_params (SimParameters): parameters for some simulations individual (Individual):", "= len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared(): min_maph", "Args: CGRA (PEArrayModel): A model of the CGRA app (Application): An application to", "<NAME> from EvalBase import EvalBase import networkx as nx import os import signal", "max(math.ceil((input_count + output_count) / 2),\\ minh_op) else: min_maph = max(input_count, output_count, minh_op) if", "/ width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count) / 2),\\ minh_op) else:", "max(y_coords) + 1 if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is True: input_count =", "= max(y_coords) + 1 if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is True: input_count", "node in [v for se_set in rsc[\"SE\"].values() for v in se_set ] or", "y in range(height): rsc = CGRA.get_PE_resources((x, y)) if node in [v for se_set", "\"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count", "CGRA.isALU(v)] width, height = CGRA.getSize() for node in SEs + ALUs: for x", "is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op", "in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height = CGRA.getSize() for node in SEs +", "if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys()))", "to be optimized sim_params (SimParameters): parameters for some simulations individual (Individual): An individual", "CGRA (PEArrayModel): A model of the CGRA app (Application): An application to be", "[v for v in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height = CGRA.getSize() for node", "= CGRA.get_PE_resources((x, y)) if node in [v for se_set in rsc[\"SE\"].values() for v", "se_set ] or \\ node == rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords) +", "True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op =", "math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count) / 2),\\", "mapping height \"\"\" y_coords = [] SEs = [v for v in individual.routed_graph.nodes()", "] or \\ node == rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords) + 1", "== map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def isMinimize(): return True", "(SimParameters): parameters for some simulations individual (Individual): An individual to be evaluated Returns:", "ALUs = [v for v in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height = CGRA.getSize()", "/ 2),\\ minh_op) else: min_maph = max(input_count, output_count, minh_op) if min_maph == map_height", "under the MIT License, see LICENSE. # Author: <NAME> from EvalBase import EvalBase", "# This file is part of GenMap and released under the MIT License,", "MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def eval(CGRA, app, sim_params, individual, **info): \"\"\"Return mapping", "import EvalBase import networkx as nx import os import signal import math main_pid", "and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def isMinimize(): return True @staticmethod def", "x in range(width): for y in range(height): rsc = CGRA.get_PE_resources((x, y)) if node", "+ 1 if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\", "if CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count) / 2),\\ minh_op) else: min_maph =", "in se_set ] or \\ node == rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords)", "input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes())", "= len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\", "nx import os import signal import math main_pid = os.getpid() class MapHeightEval(EvalBase): def", "max(input_count, output_count, minh_op) if min_maph == map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height", "v in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height = CGRA.getSize() for node in SEs", "+ output_count) / 2),\\ minh_op) else: min_maph = max(input_count, output_count, minh_op) if min_maph", "to be evaluated Returns: int: mapping height \"\"\" y_coords = [] SEs =", "output_count, minh_op) if min_maph == map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod", "individual to be evaluated Returns: int: mapping height \"\"\" y_coords = [] SEs", "rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords) + 1 if \"quit_minheight\" in info.keys(): if", "width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count) / 2),\\ minh_op) else: min_maph", "in range(width): for y in range(height): rsc = CGRA.get_PE_resources((x, y)) if node in", "minh_op) if min_maph == map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def", "CGRA.get_PE_resources((x, y)) if node in [v for se_set in rsc[\"SE\"].values() for v in", "SEs + ALUs: for x in range(width): for y in range(height): rsc =", "2),\\ minh_op) else: min_maph = max(input_count, output_count, minh_op) if min_maph == map_height and", "def eval(CGRA, app, sim_params, individual, **info): \"\"\"Return mapping height. Args: CGRA (PEArrayModel): A", "of the CGRA app (Application): An application to be optimized sim_params (SimParameters): parameters", "map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1) return map_height @staticmethod def isMinimize(): return True @staticmethod", "min_maph = max(input_count, output_count, minh_op) if min_maph == map_height and individual.isValid(): os.kill(main_pid, signal.SIGUSR1)", "sim_params (SimParameters): parameters for some simulations individual (Individual): An individual to be evaluated", "= CGRA.getSize() for node in SEs + ALUs: for x in range(width): for", "eval(CGRA, app, sim_params, individual, **info): \"\"\"Return mapping height. Args: CGRA (PEArrayModel): A model", "if CGRA.isSE(v)] ALUs = [v for v in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height", "for se_set in rsc[\"SE\"].values() for v in se_set ] or \\ node ==", "(Individual): An individual to be evaluated Returns: int: mapping height \"\"\" y_coords =", "License, see LICENSE. # Author: <NAME> from EvalBase import EvalBase import networkx as", "individual, **info): \"\"\"Return mapping height. Args: CGRA (PEArrayModel): A model of the CGRA", "signal.SIGUSR1) return map_height @staticmethod def isMinimize(): return True @staticmethod def name(): return \"Mapping_Height\"", "and released under the MIT License, see LICENSE. # Author: <NAME> from EvalBase", "height = CGRA.getSize() for node in SEs + ALUs: for x in range(width):", "height \"\"\" y_coords = [] SEs = [v for v in individual.routed_graph.nodes() if", "MIT License, see LICENSE. # Author: <NAME> from EvalBase import EvalBase import networkx", "or \\ node == rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords) + 1 if", "app, sim_params, individual, **info): \"\"\"Return mapping height. Args: CGRA (PEArrayModel): A model of", "node == rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords) + 1 if \"quit_minheight\" in", "for v in se_set ] or \\ node == rsc[\"ALU\"]: y_coords.append(y) break map_height", "info[\"quit_minheight\"] is True: input_count = len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys()))", "optimized sim_params (SimParameters): parameters for some simulations individual (Individual): An individual to be", "[v for v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs = [v for v in", "released under the MIT License, see LICENSE. # Author: <NAME> from EvalBase import", "for node in SEs + ALUs: for x in range(width): for y in", "y)) if node in [v for se_set in rsc[\"SE\"].values() for v in se_set", "CGRA.isIOShared(): min_maph = max(math.ceil((input_count + output_count) / 2),\\ minh_op) else: min_maph = max(input_count,", "import os import signal import math main_pid = os.getpid() class MapHeightEval(EvalBase): def __init__(self):", "main_pid = os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def eval(CGRA, app, sim_params,", "[v for se_set in rsc[\"SE\"].values() for v in se_set ] or \\ node", "\"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared(): min_maph = max(math.ceil((input_count +", "evaluated Returns: int: mapping height \"\"\" y_coords = [] SEs = [v for", "import signal import math main_pid = os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod", "individual (Individual): An individual to be evaluated Returns: int: mapping height \"\"\" y_coords", "individual.routed_graph.nodes() if CGRA.isALU(v)] width, height = CGRA.getSize() for node in SEs + ALUs:", "= [] SEs = [v for v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs =", "y_coords = [] SEs = [v for v in individual.routed_graph.nodes() if CGRA.isSE(v)] ALUs", "y_coords.append(y) break map_height = max(y_coords) + 1 if \"quit_minheight\" in info.keys(): if info[\"quit_minheight\"]", "range(width): for y in range(height): rsc = CGRA.get_PE_resources((x, y)) if node in [v", "minh_op) else: min_maph = max(input_count, output_count, minh_op) if min_maph == map_height and individual.isValid():", "file is part of GenMap and released under the MIT License, see LICENSE.", "== rsc[\"ALU\"]: y_coords.append(y) break map_height = max(y_coords) + 1 if \"quit_minheight\" in info.keys():", "LICENSE. # Author: <NAME> from EvalBase import EvalBase import networkx as nx import", "os import signal import math main_pid = os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass", "def __init__(self): pass @staticmethod def eval(CGRA, app, sim_params, individual, **info): \"\"\"Return mapping height.", "mapping height. Args: CGRA (PEArrayModel): A model of the CGRA app (Application): An", "height. Args: CGRA (PEArrayModel): A model of the CGRA app (Application): An application", "for y in range(height): rsc = CGRA.get_PE_resources((x, y)) if node in [v for", "range(height): rsc = CGRA.get_PE_resources((x, y)) if node in [v for se_set in rsc[\"SE\"].values()", "len(set(nx.get_node_attributes(\\ app.getInputSubGraph(), \"input\").keys())) output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ /", "output_count = len(set(nx.get_node_attributes(\\ app.getOutputSubGraph(), \"output\").keys())) minh_op = math.ceil(len(app.getCompSubGraph().nodes()) \\ / width) if CGRA.isIOShared():", "min_maph = max(math.ceil((input_count + output_count) / 2),\\ minh_op) else: min_maph = max(input_count, output_count,", "be evaluated Returns: int: mapping height \"\"\" y_coords = [] SEs = [v", "@staticmethod def eval(CGRA, app, sim_params, individual, **info): \"\"\"Return mapping height. Args: CGRA (PEArrayModel):", "for v in individual.routed_graph.nodes() if CGRA.isALU(v)] width, height = CGRA.getSize() for node in", "for x in range(width): for y in range(height): rsc = CGRA.get_PE_resources((x, y)) if", "import math main_pid = os.getpid() class MapHeightEval(EvalBase): def __init__(self): pass @staticmethod def eval(CGRA,", "rsc[\"SE\"].values() for v in se_set ] or \\ node == rsc[\"ALU\"]: y_coords.append(y) break", "node in SEs + ALUs: for x in range(width): for y in range(height):", "se_set in rsc[\"SE\"].values() for v in se_set ] or \\ node == rsc[\"ALU\"]:", "import networkx as nx import os import signal import math main_pid = os.getpid()", "\"\"\" y_coords = [] SEs = [v for v in individual.routed_graph.nodes() if CGRA.isSE(v)]", "part of GenMap and released under the MIT License, see LICENSE. # Author:", "sim_params, individual, **info): \"\"\"Return mapping height. Args: CGRA (PEArrayModel): A model of the" ]
[ "cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is not None: pulumi.set(__self__,", "None, gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str] = None):", "pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter", "from .. import _utilities, _tables from . import outputs __all__ = [ 'DeviceConfig',", "Optional[float] = None): if details is not None: pulumi.set(__self__, \"details\", details) if message", "public key used to verify the signature of JSON Web Tokens (JWTs). Structure", "not edit by hand unless you're certain you know what you are doing!", "Union from .. import _utilities, _tables from . import outputs __all__ = [", "not None: pulumi.set(__self__, \"message\", message) if number is not None: pulumi.set(__self__, \"number\", number)", "data. \"\"\" return pulumi.get(self, \"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "Dict, List, Mapping, Optional, Tuple, Union from .. import _utilities, _tables from .", ":param str expiration_time: The time at which this credential becomes invalid. \"\"\" pulumi.set(__self__,", "Optional[str] = None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str] = None): \"\"\" :param", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format:", "RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str): \"\"\" :param str http_enabled_state: The field allows", "Optional[str] = None, number: Optional[float] = None): if details is not None: pulumi.set(__self__,", "List, Mapping, Optional, Tuple, Union from .. import _utilities, _tables from . import", "def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]:", "certificate(self) -> str: \"\"\" The certificate data. \"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str): \"\"\"", "or prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str] = None, gateway_type:", "value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id: -", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str,", "return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\") def", "are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str key: The key data. \"\"\"", "\"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\")", "is not None: pulumi.set(__self__, \"details\", details) if message is not None: pulumi.set(__self__, \"message\",", "@pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state: The", "DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str, key: str): \"\"\" :param str format: The", "@property @pulumi.getter def version(self) -> Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self, prop): return", "subfolder_matches(self) -> Optional[str]: \"\"\" If the subfolder name matches this string exactly, this", "last_accessed_gateway_id: - The ID of the gateway the device accessed most recently. :param", "the device is a gateway. Default value is `NON_GATEWAY`. Possible values are `GATEWAY`", "number is not None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter def details(self) -> Optional[List[Mapping[str,", "None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time is not", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str): \"\"\" :param", "pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def", "pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter def format(self) -> str: \"\"\"", "\"public_key\", public_key) if expiration_time is not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def", "`ES256_X509_PEM`. :param str key: The key data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\",", "specified in last_accessed_gateway. \"\"\" if gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if", "is not None: pulumi.set(__self__, \"message\", message) if number is not None: pulumi.set(__self__, \"number\",", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str, key:", "The key data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter def", "key) @property @pulumi.getter def format(self) -> str: \"\"\" The format of the key.", "\"\"\" - The most recent time at which the device accessed the gateway", "str http_enabled_state: The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property", "gateway. Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. :param str", "__init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate format", ":param str http_enabled_state: The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state)", "= None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is", "to publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) ->", "-> str: \"\"\" PubSub topic name to publish device events. \"\"\" return pulumi.get(self,", "pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The ID of", "*** # *** Do not edit by hand unless you're certain you know", "@property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether the device is a", "DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs'", "str: \"\"\" The field allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def _translate_property(self,", "subfolder_matches: If the subfolder name matches this string exactly, this configuration will be", "`GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]:", "def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to publish device events. \"\"\"", "publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str:", "class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str, format: str): \"\"\" :param str certificate:", "\"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "gateway_type(self) -> Optional[str]: \"\"\" Indicates whether the device is a gateway. Default value", "allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "The time at which this credential becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def", "pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def", "details) if message is not None: pulumi.set(__self__, \"message\", message) if number is not", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str] = None,", ":param str subfolder_matches: If the subfolder name matches this string exactly, this configuration", "empty, all strings are matched. Empty value can only be used for the", "the device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param", "prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state:", "\"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def", "@pulumi.getter def version(self) -> Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "device_ack_time) if version is not None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self)", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\"", "mqtt_enabled_state(self) -> str: \"\"\" The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self,", "None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is not", "@property @pulumi.getter def format(self) -> str: \"\"\" The format of the key. Possible", "binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return", "'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str] =", "*, pubsub_topic_name: str, subfolder_matches: Optional[str] = None): \"\"\" :param str pubsub_topic_name: PubSub topic", "the last `event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str):", "publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "str, format: str): \"\"\" :param str certificate: The certificate data. :param str format:", "@property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\" If the subfolder name matches this", "@pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str, format: str): \"\"\" :param str", "'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *, binary_data:", "prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name:", "None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether", "@pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str] = None, cloud_update_time: Optional[str] =", "return pulumi.get(self, \"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict):", "'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *,", "\"format\", format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter def format(self) -> str: \"\"\" The", "certificate format and data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "\"\"\" :param str pubsub_topic_name: PubSub topic name to publish device events. :param str", "key used to verify the signature of JSON Web Tokens (JWTs). Structure is", "'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate)", "time at which this credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time", "Indicates whether the device is a gateway. Default value is `NON_GATEWAY`. Possible values", "recent time at which the device accessed the gateway specified in last_accessed_gateway. \"\"\"", "def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str] = None): \"\"\" :param str pubsub_topic_name:", "@property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to publish device", "\"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__,", "None): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish device events. :param", "`ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter def key(self) -> str: \"\"\" The", "warnings import pulumi import pulumi.runtime from typing import Any, Dict, List, Mapping, Optional,", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate:", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'):", "Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", ":param str pubsub_topic_name: PubSub topic name to publish device events. :param str subfolder_matches:", "(tfgen) Tool. *** # *** Do not edit by hand unless you're certain", "format: str): \"\"\" :param str certificate: The certificate data. :param str format: The", "if message is not None: pulumi.set(__self__, \"message\", message) if number is not None:", "the gateway specified in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return", "__init__(__self__, *, binary_data: Optional[str] = None, update_time: Optional[str] = None): if binary_data is", "format(self) -> str: \"\"\" The field allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\")", "\"device_ack_time\", device_ack_time) if version is not None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def", "-> Optional[str]: \"\"\" - The most recent time at which the device accessed", "http_enabled_state(self) -> str: \"\"\" The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self,", "# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen)", "subfolder name matches this string exactly, this configuration will be used. The string", "__init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str] = None): \"\"\" :param str pubsub_topic_name: PubSub", "\"cloud_update_time\", cloud_update_time) if device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is", "used to verify the signature of JSON Web Tokens (JWTs). Structure is documented", "if last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) ->", "class DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]] = None, message: Optional[str] =", "\"\"\" If the subfolder name matches this string exactly, this configuration will be", "'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__,", "expiration_time: Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public key used to", "format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param", "this configuration will be used. The string must not include the leading '/'", "public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public key", "The ID of the gateway the device accessed most recently. :param str last_accessed_gateway_time:", "return pulumi.get(self, \"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict):", "@pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether the device is a gateway.", "class DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str] = None, cloud_update_time: Optional[str] = None,", "message(self) -> Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter def number(self) -> Optional[float]: return", "\"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__,", "for the last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not", "recently. :param str last_accessed_gateway_time: - The most recent time at which the device", "\"number\", number) @property @pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property", "import pulumi import pulumi.runtime from typing import Any, Dict, List, Mapping, Optional, Tuple,", "public key certificate format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def", "-> Optional[str]: \"\"\" The time at which this credential becomes invalid. \"\"\" return", "the device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\"", "@property @pulumi.getter def number(self) -> Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self, prop): return", "pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict): def", "binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time is not None: pulumi.set(__self__,", "\"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__,", "*, mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state: The field allows `MQTT_ENABLED` or `MQTT_DISABLED`.", "`ES256_PEM`, and `ES256_X509_PEM`. :param str key: The key data. \"\"\" pulumi.set(__self__, \"format\", format)", "\"format\") @property @pulumi.getter def key(self) -> str: \"\"\" The key data. \"\"\" return", "of the gateway the device accessed most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property", "'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate format and data. \"\"\" return pulumi.get(self, \"public_key_certificate\")", "@pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter def message(self)", "\"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The ID of the gateway the device", "`RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter def key(self)", "= None): \"\"\" :param str gateway_auth_method: Indicates whether the device is a gateway.", "\"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\" The field", "key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\")", "prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str] = None, gateway_type: Optional[str]", "pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id", "def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether the device is a gateway. Possible", "string must not include the leading '/' character. If empty, all strings are", "topic name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def", "The ID of the gateway the device accessed most recently. \"\"\" return pulumi.get(self,", ". import outputs __all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential',", "name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self)", "JSON Web Tokens (JWTs). Structure is documented below. :param str expiration_time: The time", "is not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\",", "return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\" Indicates whether the", "or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str): \"\"\" :param str", "strings are matched. Empty value can only be used for the last `event_notification_configs`", "Indicates whether the device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and", "pulumi.get(self, \"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict): def", "@pulumi.getter def number(self) -> Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "@pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate format and data.", "'/' character. If empty, all strings are matched. Empty value can only be", "is not None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\",", "public_key) if expiration_time is not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self)", "pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\" The time at which", "`DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]:", "Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self,", "\"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter def format(self) -> str:", "events. :param str subfolder_matches: If the subfolder name matches this string exactly, this", "documented below. \"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\"", "__init__(__self__, *, certificate: str, format: str): \"\"\" :param str certificate: The certificate data.", "for the last `event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return", "Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property", "def __init__(__self__, *, format: str, key: str): \"\"\" :param str format: The format", "not None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]: return", "str): \"\"\" :param str format: The format of the key. Possible values are", "'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type", "is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is not None: pulumi.set(__self__, \"gateway_type\",", "\"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__,", "format and data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "`ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) ->", "not include the leading '/' character. If empty, all strings are matched. Empty", "`NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" -", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]] =", "RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str, format: str): \"\"\" :param str certificate: The", "Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless", "\"\"\" PubSub topic name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property", "field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str]", "if number is not None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter def details(self) ->", "@pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to publish device events.", "allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str, key: str): \"\"\" :param", "whether the device is a gateway. Default value is `NON_GATEWAY`. Possible values are", "-> str: \"\"\" The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\")", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *,", "not None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time)", "or prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str): \"\"\" :param str", "None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property", "@pulumi.getter def format(self) -> str: \"\"\" The field allows only `X509_CERTIFICATE_PEM`. \"\"\" return", "*, format: str, key: str): \"\"\" :param str format: The format of the", ":param str format: The format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`,", "\"\"\" The key data. \"\"\" return pulumi.get(self, \"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is not None: pulumi.set(__self__,", "Optional[str] = None, update_time: Optional[str] = None): if binary_data is not None: pulumi.set(__self__,", "is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\"", "return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The most", "this string exactly, this configuration will be used. The string must not include", "certificate format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) ->", "-> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return pulumi.get(self,", "\"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate format and data. \"\"\" pulumi.set(__self__,", "@pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str] = None): \"\"\"", "`NON_GATEWAY`. :param str last_accessed_gateway_id: - The ID of the gateway the device accessed", "pubsub_topic_name: PubSub topic name to publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property", "you're certain you know what you are doing! *** import warnings import pulumi", "unless you're certain you know what you are doing! *** import warnings import", "to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "None, last_accessed_gateway_time: Optional[str] = None): \"\"\" :param str gateway_auth_method: Indicates whether the device", "\"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to", "public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key used to verify the signature of", "`MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str:", "number(self) -> Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "= [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig',", "class DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\" :param", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str]", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str] =", "if details is not None: pulumi.set(__self__, \"details\", details) if message is not None:", "def message(self) -> Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter def number(self) -> Optional[float]:", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name:", "documented below. :param str expiration_time: The time at which this credential becomes invalid.", "class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str] = None, gateway_type: Optional[str] = None,", "or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]] = None,", "Optional[str]: \"\"\" Indicates whether the device is a gateway. Possible values are `ASSOCIATION_ONLY`,", "pulumi.get(self, \"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict): def", "pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is not None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\")", "key data. \"\"\" return pulumi.get(self, \"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "Optional[str]: \"\"\" - The most recent time at which the device accessed the", "Tokens (JWTs). Structure is documented below. \"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def", "topic name to publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def", "message) if number is not None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter def details(self)", "public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate format", "@pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) ->", "typing import Any, Dict, List, Mapping, Optional, Tuple, Union from .. import _utilities,", "at which this credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time is", "the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by", "return pulumi.get(self, \"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict):", "'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def", "to verify the signature of JSON Web Tokens (JWTs). Structure is documented below.", "of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str", "if update_time is not None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) ->", "last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The ID of the gateway the device accessed", "format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter def format(self) -> str: \"\"\" The format", ":param str pubsub_topic_name: PubSub topic name to publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\",", "@property @pulumi.getter def format(self) -> str: \"\"\" The field allows only `X509_CERTIFICATE_PEM`. \"\"\"", "whether the device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`.", "def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]] = None, message: Optional[str] = None, number:", "gateway the device accessed most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def", "pubsub_topic_name) if subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self)", "] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str] = None, cloud_update_time: Optional[str]", "\"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public key used to verify the signature of", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str): \"\"\"", "\"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\" The field allows `HTTP_ENABLED`", "pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property", "@pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]] = None, message: Optional[str]", "\"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key used", "is not None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]:", "Optional[str]: \"\"\" - The ID of the gateway the device accessed most recently.", "'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public key used", ":param 'DeviceCredentialPublicKeyArgs' public_key: A public key used to verify the signature of JSON", "@property @pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter def", "device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\"", "A public key certificate format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\")", "specified in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "values are `GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id: - The ID of the", "to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) ->", "pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key", "not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates", "return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict):", "binary_data: Optional[str] = None, update_time: Optional[str] = None): if binary_data is not None:", "'DeviceCredentialPublicKeyArgs' public_key: A public key used to verify the signature of JSON Web", "by hand unless you're certain you know what you are doing! *** import", "format: The field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format)", "@pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\" The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\"", "return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict):", "gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether the device is a gateway. Possible values", "and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\"", "\"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "-> str: \"\"\" The key data. \"\"\" return pulumi.get(self, \"key\") def _translate_property(self, prop):", "from . import outputs __all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState',", "import outputs __all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate',", "you are doing! *** import warnings import pulumi import pulumi.runtime from typing import", "allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) ->", "If empty, all strings are matched. Empty value can only be used for", "the device accessed the gateway specified in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def", "-> str: \"\"\" The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\")", "can only be used for the last `event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\")", "-> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter def message(self) -> Optional[str]: return", ".. import _utilities, _tables from . import outputs __all__ = [ 'DeviceConfig', 'DeviceCredential',", "\"\"\" PubSub topic name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") def", "pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\")", "credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time is not None: pulumi.set(__self__,", "\"\"\" - The ID of the gateway the device accessed most recently. \"\"\"", "Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're", "None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if", "device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return", "and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\"", "data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter def format(self) ->", "None): \"\"\" :param str gateway_auth_method: Indicates whether the device is a gateway. Possible", "if gateway_type is not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is not None:", "is a gateway. Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`.", "the signature of JSON Web Tokens (JWTs). Structure is documented below. \"\"\" return", "are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether the device is", "-> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate format and data. \"\"\" return pulumi.get(self,", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *,", "Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether the", "import Any, Dict, List, Mapping, Optional, Tuple, Union from .. import _utilities, _tables", "item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self)", "*, details: Optional[List[Mapping[str, Any]]] = None, message: Optional[str] = None, number: Optional[float] =", "of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *,", "`RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str key: The key data. \"\"\" pulumi.set(__self__,", "\"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The", "not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is not None: pulumi.set(__self__, \"gateway_type\", gateway_type)", "@property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\" The time at which this credential", "<reponame>dimpu47/pulumi-gcp # coding=utf-8 # *** WARNING: this file was generated by the Pulumi", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str): \"\"\" :param", "Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\")", "JSON Web Tokens (JWTs). Structure is documented below. \"\"\" return pulumi.get(self, \"public_key\") @property", "str gateway_auth_method: Indicates whether the device is a gateway. Possible values are `ASSOCIATION_ONLY`,", "are `GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id: - The ID of the gateway", "`HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\" The", "\"gateway_auth_method\", gateway_auth_method) if gateway_type is not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is", "pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\")", "Mapping, Optional, Tuple, Union from .. import _utilities, _tables from . import outputs", "Structure is documented below. :param str expiration_time: The time at which this credential", "# *** Do not edit by hand unless you're certain you know what", "Structure is documented below. \"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) ->", "\"\"\" Indicates whether the device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`,", "gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property", "gateway_auth_method: Indicates whether the device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`,", "Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter def message(self) -> Optional[str]: return pulumi.get(self,", "str certificate: The certificate data. :param str format: The field allows only `X509_CERTIFICATE_PEM`.", "if last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not None:", "None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\")", "coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge", "is documented below. \"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]:", "-> Optional[str]: \"\"\" - The ID of the gateway the device accessed most", "device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self) -> Optional[str]: return", "@pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\"", "def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate format and data. \"\"\"", "if device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is not None:", "key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str key: The", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str] = None,", "Tokens (JWTs). Structure is documented below. :param str expiration_time: The time at which", "*** Do not edit by hand unless you're certain you know what you", "`event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\",", "subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str:", "@pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The most recent time at which", "pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def", "'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict):", "key certificate format and data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return", "details: Optional[List[Mapping[str, Any]]] = None, message: Optional[str] = None, number: Optional[float] = None):", "return pulumi.get(self, \"format\") @property @pulumi.getter def key(self) -> str: \"\"\" The key data.", "-> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return pulumi.get(self,", "details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter def message(self) -> Optional[str]:", "if version is not None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) ->", "update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def", "string exactly, this configuration will be used. The string must not include the", "@property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self)", "gateway. Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. \"\"\" return", "= None, number: Optional[float] = None): if details is not None: pulumi.set(__self__, \"details\",", "must not include the leading '/' character. If empty, all strings are matched.", "Optional[str]: \"\"\" If the subfolder name matches this string exactly, this configuration will", "Possible values are `GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id: - The ID of", "is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self,", "accessed most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]:", "The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop):", "\"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__,", "`ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\" Indicates", "events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\" If", "class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str] = None): \"\"\" :param", "None, message: Optional[str] = None, number: Optional[float] = None): if details is not", "the gateway specified in last_accessed_gateway. \"\"\" if gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\",", "of JSON Web Tokens (JWTs). Structure is documented below. \"\"\" return pulumi.get(self, \"public_key\")", "this credential becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict):", "or prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param str", "'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str]", "outputs __all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem',", "format) @property @pulumi.getter def certificate(self) -> str: \"\"\" The certificate data. \"\"\" return", "value can only be used for the last `event_notification_configs` item. \"\"\" return pulumi.get(self,", "events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub", "Optional[str]: \"\"\" The time at which this credential becomes invalid. \"\"\" return pulumi.get(self,", "pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *,", "If the subfolder name matches this string exactly, this configuration will be used.", "cloud_update_time) if device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is not", "time at which the device accessed the gateway specified in last_accessed_gateway. \"\"\" if", "gateway specified in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "@property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\" The field allows `HTTP_ENABLED` or `HTTP_DISABLED`.", "and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter def key(self) -> str: \"\"\"", "Optional[str] = None, last_accessed_gateway_time: Optional[str] = None): \"\"\" :param str gateway_auth_method: Indicates whether", "device accessed the gateway specified in last_accessed_gateway. \"\"\" if gateway_auth_method is not None:", "*, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate format and", "a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\")", "str): \"\"\" :param str mqtt_enabled_state: The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__,", "and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether the device is a gateway. Default", "*, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public", "pulumi.get(self, \"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict): def", "a gateway. Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. \"\"\"", "used for the last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is", "pulumi.set(__self__, \"public_key\", public_key) if expiration_time is not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\")", "format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate':", "def __init__(__self__, *, certificate: str, format: str): \"\"\" :param str certificate: The certificate", "topic name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") def _translate_property(self, prop):", "Optional[str] = None, device_ack_time: Optional[str] = None, version: Optional[str] = None): if binary_data", "*, http_enabled_state: str): \"\"\" :param str http_enabled_state: The field allows `HTTP_ENABLED` or `HTTP_DISABLED`.", "return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict):", "def subfolder_matches(self) -> Optional[str]: \"\"\" If the subfolder name matches this string exactly,", "None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time is not None: pulumi.set(__self__, \"update_time\", update_time) @property", "\"\"\" The format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and", "last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]:", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state:", "field allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "Tool. *** # *** Do not edit by hand unless you're certain you", "None: pulumi.set(__self__, \"message\", message) if number is not None: pulumi.set(__self__, \"number\", number) @property", "not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is not None: pulumi.set(__self__, \"version\", version)", "\"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter def certificate(self) -> str:", "prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str): \"\"\" :param str http_enabled_state:", "pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name", "value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\")", "\"certificate\") @property @pulumi.getter def format(self) -> str: \"\"\" The field allows only `X509_CERTIFICATE_PEM`.", "are matched. Empty value can only be used for the last `event_notification_configs` item.", "public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate format and data.", "data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A", "pulumi.get(self, \"format\") @property @pulumi.getter def key(self) -> str: \"\"\" The key data. \"\"\"", "str gateway_type: Indicates whether the device is a gateway. Default value is `NON_GATEWAY`.", "binary_data) if update_time is not None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self)", "\"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\" The time at which this", "gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is not None: pulumi.set(__self__,", "\"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__,", "cloud_update_time: Optional[str] = None, device_ack_time: Optional[str] = None, version: Optional[str] = None): if", "public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate format and data. \"\"\" return", "last `event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *,", "RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name: PubSub topic name", "edit by hand unless you're certain you know what you are doing! ***", "return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property", "becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time is not None: pulumi.set(__self__, \"expiration_time\",", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str):", "\"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate:", "gateway the device accessed most recently. :param str last_accessed_gateway_time: - The most recent", "Optional[str] = None): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish device", "field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return", "the device accessed most recently. :param str last_accessed_gateway_time: - The most recent time", "`DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether the device is a gateway.", "DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str] = None, update_time: Optional[str] = None): if", "pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\" If the subfolder name", "cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return", "\"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey',", "public key certificate format and data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop):", "-> Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if", "Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\")", "@property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The ID of the gateway", "return pulumi.get(self, \"details\") @property @pulumi.getter def message(self) -> Optional[str]: return pulumi.get(self, \"message\") @property", "values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether the device", "device events. :param str subfolder_matches: If the subfolder name matches this string exactly,", "the leading '/' character. If empty, all strings are matched. Empty value can", "@pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name: PubSub", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str, format: str):", "pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self) -> str: \"\"\" The field allows only", "class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state: The field", "= None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time is", "str: \"\"\" The format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`,", "__init__(__self__, *, pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish", "\"message\", message) if number is not None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter def", "or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\"", "@pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\" The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\"", "http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\" The field allows `HTTP_ENABLED` or", "def key(self) -> str: \"\"\" The key data. \"\"\" return pulumi.get(self, \"key\") def", "Optional[List[Mapping[str, Any]]] = None, message: Optional[str] = None, number: Optional[float] = None): if", "\"\"\" if gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is not", "\"\"\" return pulumi.get(self, \"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "`ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether the device is a gateway. Default value", "*, certificate: str, format: str): \"\"\" :param str certificate: The certificate data. :param", "-> Optional[str]: \"\"\" Indicates whether the device is a gateway. Possible values are", "of the gateway the device accessed most recently. :param str last_accessed_gateway_time: - The", "\"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__,", "(JWTs). Structure is documented below. :param str expiration_time: The time at which this", "are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self)", ":param str last_accessed_gateway_time: - The most recent time at which the device accessed", "\"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self) -> str: \"\"\" The field", "\"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\" Indicates whether", "`GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id: - The ID of the gateway the", "None, number: Optional[float] = None): if details is not None: pulumi.set(__self__, \"details\", details)", "not None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time is not None: pulumi.set(__self__, \"update_time\", update_time)", "str): \"\"\" :param str http_enabled_state: The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__,", "__init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A", "or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\"", "import pulumi.runtime from typing import Any, Dict, List, Mapping, Optional, Tuple, Union from", "def __init__(__self__, *, pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name: PubSub topic name to", "gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str] = None): \"\"\"", "= None, cloud_update_time: Optional[str] = None, device_ack_time: Optional[str] = None, version: Optional[str] =", "update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "-> Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key used to", "ID of the gateway the device accessed most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\")", "value can only be used for the last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\",", "DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str] = None, gateway_type: Optional[str] = None, last_accessed_gateway_id:", "Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time", "at which the device accessed the gateway specified in last_accessed_gateway. \"\"\" if gateway_auth_method", "A public key certificate format and data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self,", "\"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", ":param str certificate: The certificate data. :param str format: The field allows only", "return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict):", "if cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is not None:", "class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str, key: str): \"\"\" :param str format:", "pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time", "\"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether the device", "pubsub_topic_name: PubSub topic name to publish device events. :param str subfolder_matches: If the", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str, format:", "\"format\", format) @property @pulumi.getter def certificate(self) -> str: \"\"\" The certificate data. \"\"\"", "is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is not None: pulumi.set(__self__, \"version\",", "\"\"\" :param str format: The format of the key. Possible values are `RSA_PEM`,", "key: The key data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter", "character. If empty, all strings are matched. Empty value can only be used", "pulumi.get(self, \"details\") @property @pulumi.getter def message(self) -> Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter", "topic name to publish device events. :param str subfolder_matches: If the subfolder name", "pulumi.runtime from typing import Any, Dict, List, Mapping, Optional, Tuple, Union from ..", "pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name", "this credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time is not None:", "- The most recent time at which the device accessed the gateway specified", "\"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The ID of the", "\"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__,", "version(self) -> Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "(JWTs). Structure is documented below. \"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self)", "- The ID of the gateway the device accessed most recently. :param str", "http_enabled_state: The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\")", "`HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "def format(self) -> str: \"\"\" The format of the key. Possible values are", "Possible values are `GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def", "-> Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str] = None, cloud_update_time:", "Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter def number(self) -> Optional[float]: return pulumi.get(self, \"number\")", "@pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\" Indicates whether the device is a gateway.", "str, subfolder_matches: Optional[str] = None): \"\"\" :param str pubsub_topic_name: PubSub topic name to", "`X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]:", "class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public", "\"\"\" :param str certificate: The certificate data. :param str format: The field allows", "`ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter def key(self) -> str:", "None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if", "Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\")", "def __init__(__self__, *, binary_data: Optional[str] = None, update_time: Optional[str] = None): if binary_data", "\"\"\" The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self,", "str format: The field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\",", "Optional[str] = None, version: Optional[str] = None): if binary_data is not None: pulumi.set(__self__,", "__init__(__self__, *, gateway_auth_method: Optional[str] = None, gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str] =", "most recent time at which the device accessed the gateway specified in last_accessed_gateway.", "last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether the device is", "recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" -", "The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def", "*** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool.", "@property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key used to verify", "the last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not None:", "below. :param str expiration_time: The time at which this credential becomes invalid. \"\"\"", "is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\",", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param", "gateway_type is not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is not None: pulumi.set(__self__,", "key data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key) @property @pulumi.getter def format(self)", "\"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\")", "Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "str): \"\"\" :param str certificate: The certificate data. :param str format: The field", "message: Optional[str] = None, number: Optional[float] = None): if details is not None:", "= None, version: Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\",", "message is not None: pulumi.set(__self__, \"message\", message) if number is not None: pulumi.set(__self__,", "is not None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time is not None: pulumi.set(__self__, \"update_time\",", "by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit", "@pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key used to verify the", "last_accessed_gateway_time: Optional[str] = None): \"\"\" :param str gateway_auth_method: Indicates whether the device is", "which the device accessed the gateway specified in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\")", "`MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\" The", "def mqtt_enabled_state(self) -> str: \"\"\" The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return", ":param str format: The field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__,", "= None, update_time: Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\",", "a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates", "def device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self) -> Optional[str]:", "from typing import Any, Dict, List, Mapping, Optional, Tuple, Union from .. import", "public_key_certificate: A public key certificate format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property", "\"\"\" The field allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def _translate_property(self, prop):", "used. The string must not include the leading '/' character. If empty, all", "'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class", "@pulumi.output_type class DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str] = None, update_time: Optional[str] =", "*, binary_data: Optional[str] = None, update_time: Optional[str] = None): if binary_data is not", "format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\"", "str mqtt_enabled_state: The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property", "public_key: A public key used to verify the signature of JSON Web Tokens", "@pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A", "@property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\" Indicates whether the device is a", "return pulumi.get(self, \"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict):", "pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self,", "None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if", "\"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self, prop):", "class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name: PubSub topic", "pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key", "pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\" The field allows", "expiration_time(self) -> Optional[str]: \"\"\" The time at which this credential becomes invalid. \"\"\"", "@pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\" The time at which this credential becomes", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str, key: str):", "be used for the last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches", "= None, message: Optional[str] = None, number: Optional[float] = None): if details is", "- The ID of the gateway the device accessed most recently. \"\"\" return", "expiration_time is not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey':", "publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]:", "Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self) -> Optional[str]: return pulumi.get(self, \"version\")", "Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand", "format(self) -> str: \"\"\" The format of the key. Possible values are `RSA_PEM`,", "The most recent time at which the device accessed the gateway specified in", "Web Tokens (JWTs). Structure is documented below. :param str expiration_time: The time at", ":param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\",", "`HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "@property @pulumi.getter def certificate(self) -> str: \"\"\" The certificate data. \"\"\" return pulumi.get(self,", "certificate data. \"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self) -> str: \"\"\"", "ID of the gateway the device accessed most recently. :param str last_accessed_gateway_time: -", "the signature of JSON Web Tokens (JWTs). Structure is documented below. :param str", "None, version: Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data)", "leading '/' character. If empty, all strings are matched. Empty value can only", "def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key:", "PubSub topic name to publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\")", "at which the device accessed the gateway specified in last_accessed_gateway. \"\"\" return pulumi.get(self,", "most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\"", "str: \"\"\" The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def", "return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self) -> Optional[str]: return pulumi.get(self, \"version\") def", "def version(self) -> Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "\"\"\" The time at which this credential becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\")", "= None): if details is not None: pulumi.set(__self__, \"details\", details) if message is", "# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches:", "device_ack_time: Optional[str] = None, version: Optional[str] = None): if binary_data is not None:", "The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop):", "the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self,", "Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time", "expiration_time: The time at which this credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key)", "str last_accessed_gateway_id: - The ID of the gateway the device accessed most recently.", "\"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate", "data. \"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self) -> str: \"\"\" The", "str: \"\"\" PubSub topic name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\")", "Optional[str] = None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public key used to verify", "def gateway_type(self) -> Optional[str]: \"\"\" Indicates whether the device is a gateway. Default", "certain you know what you are doing! *** import warnings import pulumi import", "which the device accessed the gateway specified in last_accessed_gateway. \"\"\" if gateway_auth_method is", "name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") def _translate_property(self, prop): return", "accessed the gateway specified in last_accessed_gateway. \"\"\" if gateway_auth_method is not None: pulumi.set(__self__,", "allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "def cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]:", "\"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\" The time", "None: pulumi.set(__self__, \"details\", details) if message is not None: pulumi.set(__self__, \"message\", message) if", "mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\" The field allows `MQTT_ENABLED` or", "exactly, this configuration will be used. The string must not include the leading", "be used. The string must not include the leading '/' character. If empty,", "import _utilities, _tables from . import outputs __all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey',", "_tables from . import outputs __all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus',", "@pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) ->", "prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str, format: str): \"\"\" :param", "DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]] = None, message: Optional[str] = None,", "pulumi.get(self, \"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict): def", "gateway_auth_method: Optional[str] = None, gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time:", "the device accessed most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self)", "pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter def certificate(self) -> str: \"\"\"", "\"\"\" The certificate data. \"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self) ->", "key certificate format and data. \"\"\" pulumi.set(__self__, \"public_key_certificate\", public_key_certificate) @property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self)", "return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The ID", "\"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\" If the", "\"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic", "and data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish device events.", "name to publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self)", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate:", "def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key used to verify the signature", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]]", "certificate: The certificate data. :param str format: The field allows only `X509_CERTIFICATE_PEM`. \"\"\"", "accessed the gateway specified in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop):", "\"device_ack_time\") @property @pulumi.getter def version(self) -> Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self, prop):", "or prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs'", "RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str] = None): \"\"\" :param str", "subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to publish", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *,", "pulumi.set(__self__, \"details\", details) if message is not None: pulumi.set(__self__, \"message\", message) if number", "None, update_time: Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data)", "`RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter def key(self) ->", "data. :param str format: The field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate)", "pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\")", "verify the signature of JSON Web Tokens (JWTs). Structure is documented below. \"\"\"", "PubSub topic name to publish device events. :param str subfolder_matches: If the subfolder", "matched. Empty value can only be used for the last `event_notification_configs` item. \"\"\"", "\"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time is not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property", "in last_accessed_gateway. \"\"\" if gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type", "gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether", "= None): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish device events.", "@pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\" If the subfolder name matches this string", "\"details\") @property @pulumi.getter def message(self) -> Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter def", "\"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter def certificate(self) -> str: \"\"\" The", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *,", "-> 'outputs.DeviceCredentialPublicKey': \"\"\" A public key used to verify the signature of JSON", "Optional, Tuple, Union from .. import _utilities, _tables from . import outputs __all__", "number: Optional[float] = None): if details is not None: pulumi.set(__self__, \"details\", details) if", "@pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str, key: str): \"\"\" :param str", "pulumi.set(__self__, \"binary_data\", binary_data) if update_time is not None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\")", "@pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str] = None, gateway_type: Optional[str] =", "= None, last_accessed_gateway_time: Optional[str] = None): \"\"\" :param str gateway_auth_method: Indicates whether the", ":param str gateway_auth_method: Indicates whether the device is a gateway. Possible values are", "pubsub_topic_name: str, subfolder_matches: Optional[str] = None): \"\"\" :param str pubsub_topic_name: PubSub topic name", "None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self,", "__all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig',", "return pulumi.get(self, \"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict):", "@property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\" The field allows `MQTT_ENABLED` or `MQTT_DISABLED`.", "know what you are doing! *** import warnings import pulumi import pulumi.runtime from", "str last_accessed_gateway_time: - The most recent time at which the device accessed the", "`event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version", "\"pubsub_topic_name\", pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to", "\"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryStateNotificationConfig(dict): def __init__(__self__, *, pubsub_topic_name: str):", "*** import warnings import pulumi import pulumi.runtime from typing import Any, Dict, List,", "if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if update_time is not None:", "\"binary_data\", binary_data) if cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is", "pulumi.set(__self__, \"key\", key) @property @pulumi.getter def format(self) -> str: \"\"\" The format of", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method: Optional[str]", "The string must not include the leading '/' character. If empty, all strings", "\"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property", "the subfolder name matches this string exactly, this configuration will be used. The", "pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self) -> Optional[str]: return pulumi.get(self, \"version\") def _translate_property(self,", "PubSub topic name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") def _translate_property(self,", "key(self) -> str: \"\"\" The key data. \"\"\" return pulumi.get(self, \"key\") def _translate_property(self,", "pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict): def", "def __init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state: The field allows `MQTT_ENABLED`", "signature of JSON Web Tokens (JWTs). Structure is documented below. \"\"\" return pulumi.get(self,", "return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict):", "Empty value can only be used for the last `event_notification_configs` item. \"\"\" return", "@pulumi.getter def message(self) -> Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter def number(self) ->", "pulumi.set(__self__, \"number\", number) @property @pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\")", "return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\" If the subfolder", "*, pubsub_topic_name: str): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish device", "def __init__(__self__, *, binary_data: Optional[str] = None, cloud_update_time: Optional[str] = None, device_ack_time: Optional[str]", "= None, gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str] =", "or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__, *, certificate: str, format: str): \"\"\"", "`RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str key: The key data. \"\"\" pulumi.set(__self__, \"format\",", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name:", "def number(self) -> Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "'RegistryMqttConfig', 'RegistryStateNotificationConfig', ] @pulumi.output_type class DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str] = None,", "Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is not None: pulumi.set(__self__, \"version\", version) @property", "the device accessed the gateway specified in last_accessed_gateway. \"\"\" if gateway_auth_method is not", "accessed most recently. :param str last_accessed_gateway_time: - The most recent time at which", "Optional[str] = None, gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str]", "is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type:", "Web Tokens (JWTs). Structure is documented below. \"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\")", "return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property", "= None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str] = None): \"\"\" :param str", "gateway_type: Indicates whether the device is a gateway. Default value is `NON_GATEWAY`. Possible", "data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "details is not None: pulumi.set(__self__, \"details\", details) if message is not None: pulumi.set(__self__,", "not None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self,", "of JSON Web Tokens (JWTs). Structure is documented below. :param str expiration_time: The", "device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "*, binary_data: Optional[str] = None, cloud_update_time: Optional[str] = None, device_ack_time: Optional[str] = None,", "__init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state: The field allows `MQTT_ENABLED` or", "configuration will be used. The string must not include the leading '/' character.", "binary_data) if cloud_update_time is not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is not", "last_accessed_gateway_time: - The most recent time at which the device accessed the gateway", "@property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self)", "are `GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) ->", "def format(self) -> str: \"\"\" The field allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self,", "def __init__(__self__, *, gateway_auth_method: Optional[str] = None, gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str]", "pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The most recent", "-> str: \"\"\" The certificate data. \"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter def", "The certificate data. :param str format: The field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__,", "The format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`.", "-> str: \"\"\" The field allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def", "@pulumi.getter def key(self) -> str: \"\"\" The key data. \"\"\" return pulumi.get(self, \"key\")", "`NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\")", "None): if details is not None: pulumi.set(__self__, \"details\", details) if message is not", "None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\")", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time:", "@property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self, prop): return", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict): def __init__(__self__, *,", "str key: The key data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key) @property", "is not None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return", "Do not edit by hand unless you're certain you know what you are", "import warnings import pulumi import pulumi.runtime from typing import Any, Dict, List, Mapping,", "str, key: str): \"\"\" :param str format: The format of the key. Possible", "Optional[str] = None): \"\"\" :param str gateway_auth_method: Indicates whether the device is a", "prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str, Any]]] = None, message:", "pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to publish device events. \"\"\" return", "last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str] = None): \"\"\" :param str gateway_auth_method: Indicates", "http_enabled_state: str): \"\"\" :param str http_enabled_state: The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\"", "if expiration_time is not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) ->", "only be used for the last `event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def", "The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict): def __init__(__self__, *, binary_data:", "is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id: - The", "a gateway. Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. :param", "def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The most recent time at which the", "is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\"", "are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any,", "`MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not", "WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceGatewayConfig(dict): def __init__(__self__, *, gateway_auth_method:", "key: str): \"\"\" :param str format: The format of the key. Possible values", "and `NON_GATEWAY`. :param str last_accessed_gateway_id: - The ID of the gateway the device", "version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def", "last_accessed_gateway_id) if last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self)", "values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str,", "@pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "def http_enabled_state(self) -> str: \"\"\" The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return", "The field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property", "\"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter def key(self) -> str: \"\"\" The key", "`NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id: - The ID", "invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time is not None: pulumi.set(__self__, \"expiration_time\", expiration_time)", "@pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str): \"\"\" :param str http_enabled_state: The", "gateway_type) if last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not", "pulumi.get(self, \"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def", "values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str key: The key data.", "if subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) ->", "values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter", "\"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The most recent time", "prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str] = None):", "\"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\" If the subfolder name matches", "not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id)", "format: The format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and", "@property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self)", "\"message\") @property @pulumi.getter def number(self) -> Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self, prop):", "certificate data. :param str format: The field allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\",", "'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig', ]", "return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\" The time at", "None, cloud_update_time: Optional[str] = None, device_ack_time: Optional[str] = None, version: Optional[str] = None):", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str]", "str subfolder_matches: If the subfolder name matches this string exactly, this configuration will", "'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate format and data. \"\"\"", "= None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public key used to verify the", "RegistryCredential(dict): def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key", "def certificate(self) -> str: \"\"\" The certificate data. \"\"\" return pulumi.get(self, \"certificate\") @property", "or prop @pulumi.output_type class DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str] = None, update_time:", "None, device_ack_time: Optional[str] = None, version: Optional[str] = None): if binary_data is not", "Optional[str] = None, cloud_update_time: Optional[str] = None, device_ack_time: Optional[str] = None, version: Optional[str]", "def expiration_time(self) -> Optional[str]: \"\"\" The time at which this credential becomes invalid.", "-> str: \"\"\" The format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`,", "device is a gateway. Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str", "\"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\" Indicates whether the device is", "_utilities, _tables from . import outputs __all__ = [ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig',", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str,", "@property @pulumi.getter def message(self) -> Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter def number(self)", "-> Optional[str]: return pulumi.get(self, \"message\") @property @pulumi.getter def number(self) -> Optional[float]: return pulumi.get(self,", "DeviceConfig(dict): def __init__(__self__, *, binary_data: Optional[str] = None, cloud_update_time: Optional[str] = None, device_ack_time:", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__, *,", "device accessed most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) ->", "return pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self) -> str: \"\"\" The field allows", "item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches)", "device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time) if version is not None: pulumi.set(__self__,", "publish device events. :param str subfolder_matches: If the subfolder name matches this string", "used for the last `event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self, prop):", "time at which this credential becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self,", "mqtt_enabled_state: The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\")", "\"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The", "\"number\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict): def __init__(__self__,", "None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A public", "number) @property @pulumi.getter def details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter", "or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def __init__(__self__, *, pubsub_topic_name: str, subfolder_matches: Optional[str] =", "or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "gateway_auth_method) if gateway_type is not None: pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is not", "str): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish device events. \"\"\"", "certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter def certificate(self) -> str: \"\"\" The certificate", "None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic", "None): \"\"\" :param 'DeviceCredentialPublicKeyArgs' public_key: A public key used to verify the signature", "is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\",", "is not None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return", "pubsub_topic_name) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub topic name to publish", "\"gateway_type\", gateway_type) if last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is", "\"\"\" :param str pubsub_topic_name: PubSub topic name to publish device events. \"\"\" pulumi.set(__self__,", "doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Dict,", "[ 'DeviceConfig', 'DeviceCredential', 'DeviceCredentialPublicKey', 'DeviceGatewayConfig', 'DeviceLastErrorStatus', 'DeviceState', 'RegistryCredential', 'RegistryCredentialPublicKeyCertificate', 'RegistryEventNotificationConfigItem', 'RegistryHttpConfig', 'RegistryMqttConfig', 'RegistryStateNotificationConfig',", "version is not None: pulumi.set(__self__, \"version\", version) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]:", "__init__(__self__, *, details: Optional[List[Mapping[str, Any]]] = None, message: Optional[str] = None, number: Optional[float]", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *,", "only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter def certificate(self)", "class DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str] = None, update_time: Optional[str] = None):", "last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not None: pulumi.set(__self__,", ":param str mqtt_enabled_state: The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state)", "subfolder_matches: Optional[str] = None): \"\"\" :param str pubsub_topic_name: PubSub topic name to publish", ":param str last_accessed_gateway_id: - The ID of the gateway the device accessed most", "prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] = None):", "def details(self) -> Optional[List[Mapping[str, Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter def message(self) ->", "can only be used for the last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name)", "or prop @pulumi.output_type class DeviceCredentialPublicKey(dict): def __init__(__self__, *, format: str, key: str): \"\"\"", "below. \"\"\" return pulumi.get(self, \"public_key\") @property @pulumi.getter(name=\"expirationTime\") def expiration_time(self) -> Optional[str]: \"\"\" The", "str format: The format of the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`,", "str: \"\"\" The key data. \"\"\" return pulumi.get(self, \"key\") def _translate_property(self, prop): return", "str: \"\"\" The certificate data. \"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self)", "and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\"", "\"binary_data\", binary_data) if update_time is not None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def", "format: str, key: str): \"\"\" :param str format: The format of the key.", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str): \"\"\"", "\"\"\" Indicates whether the device is a gateway. Default value is `NON_GATEWAY`. Possible", "The key data. \"\"\" return pulumi.get(self, \"key\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "*, gateway_auth_method: Optional[str] = None, gateway_type: Optional[str] = None, last_accessed_gateway_id: Optional[str] = None,", "not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time is not None: pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time)", "hand unless you're certain you know what you are doing! *** import warnings", "str expiration_time: The time at which this credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\",", "-> Optional[str]: \"\"\" Indicates whether the device is a gateway. Default value is", "None, last_accessed_gateway_id: Optional[str] = None, last_accessed_gateway_time: Optional[str] = None): \"\"\" :param str gateway_auth_method:", "PubSub topic name to publish device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\")", "this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** #", "\"details\", details) if message is not None: pulumi.set(__self__, \"message\", message) if number is", "\"\"\" :param str gateway_auth_method: Indicates whether the device is a gateway. Possible values", "A public key used to verify the signature of JSON Web Tokens (JWTs).", "class RegistryHttpConfig(dict): def __init__(__self__, *, http_enabled_state: str): \"\"\" :param str http_enabled_state: The field", "at which this credential becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop):", "\"\"\" :param str http_enabled_state: The field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\",", "only be used for the last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if", "_tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param", "binary_data: Optional[str] = None, cloud_update_time: Optional[str] = None, device_ack_time: Optional[str] = None, version:", "binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self) -> Optional[str]: return", "Empty value can only be used for the last `event_notification_configs` item. \"\"\" pulumi.set(__self__,", "the gateway the device accessed most recently. \"\"\" return pulumi.get(self, \"last_accessed_gateway_id\") @property @pulumi.getter(name=\"lastAccessedGatewayTime\")", "certificate: str, format: str): \"\"\" :param str certificate: The certificate data. :param str", "to publish device events. :param str subfolder_matches: If the subfolder name matches this", "name matches this string exactly, this configuration will be used. The string must", "__init__(__self__, *, format: str, key: str): \"\"\" :param str format: The format of", "@pulumi.getter def format(self) -> str: \"\"\" The format of the key. Possible values", "update_time: Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if", "pulumi.get(self, \"message\") @property @pulumi.getter def number(self) -> Optional[float]: return pulumi.get(self, \"number\") def _translate_property(self,", "`ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. :param str gateway_type: Indicates whether the device is a", "signature of JSON Web Tokens (JWTs). Structure is documented below. :param str expiration_time:", "Any]]]: return pulumi.get(self, \"details\") @property @pulumi.getter def message(self) -> Optional[str]: return pulumi.get(self, \"message\")", "not None: pulumi.set(__self__, \"subfolder_matches\", subfolder_matches) @property @pulumi.getter(name=\"pubsubTopicName\") def pubsub_topic_name(self) -> str: \"\"\" PubSub", "will be used. The string must not include the leading '/' character. If", "__init__(__self__, *, http_enabled_state: str): \"\"\" :param str http_enabled_state: The field allows `HTTP_ENABLED` or", "\"\"\" The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self,", "pulumi.set(__self__, \"last_accessed_gateway_time\", last_accessed_gateway_time) @property @pulumi.getter(name=\"gatewayAuthMethod\") def gateway_auth_method(self) -> Optional[str]: \"\"\" Indicates whether the", "@pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self) -> Optional[str]: \"\"\" - The ID of the gateway the", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *, details:", "invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. \"\"\" return pulumi.get(self, \"format\") @property @pulumi.getter def", "\"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return pulumi.get(self, \"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def", "prop @pulumi.output_type class DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str] = None, update_time: Optional[str]", "only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceState(dict): def __init__(__self__, *, binary_data: Optional[str] =", "pulumi.get(self, \"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryEventNotificationConfigItem(dict): def", "becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "Optional[str]: \"\"\" Indicates whether the device is a gateway. Default value is `NON_GATEWAY`.", "\"cloud_update_time\") @property @pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def", "\"public_key_certificate\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredentialPublicKeyCertificate(dict): def __init__(__self__,", "Any]]] = None, message: Optional[str] = None, number: Optional[float] = None): if details", "device accessed most recently. :param str last_accessed_gateway_time: - The most recent time at", "@property @pulumi.getter(name=\"lastAccessedGatewayTime\") def last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The most recent time at", "-> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) -> Optional[str]: return pulumi.get(self,", "matches this string exactly, this configuration will be used. The string must not", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *, public_key:", ":param str gateway_type: Indicates whether the device is a gateway. Default value is", "def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *,", "= None, device_ack_time: Optional[str] = None, version: Optional[str] = None): if binary_data is", "which this credential becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return", "Tuple, Union from .. import _utilities, _tables from . import outputs __all__ =", "'outputs.DeviceCredentialPublicKey': \"\"\" A public key used to verify the signature of JSON Web", "The time at which this credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if", "what you are doing! *** import warnings import pulumi import pulumi.runtime from typing", "last_accessed_gateway_time(self) -> Optional[str]: \"\"\" - The most recent time at which the device", "device accessed the gateway specified in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self,", "@property @pulumi.getter(name=\"publicKeyCertificate\") def public_key_certificate(self) -> 'outputs.RegistryCredentialPublicKeyCertificate': \"\"\" A public key certificate format and", "pulumi.set(__self__, \"format\", format) @property @pulumi.getter def certificate(self) -> str: \"\"\" The certificate data.", "`HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str:", "be used for the last `event_notification_configs` item. \"\"\" return pulumi.get(self, \"subfolder_matches\") def _translate_property(self,", "allows only `X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter def", "Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. :param str last_accessed_gateway_id:", "@pulumi.getter def certificate(self) -> str: \"\"\" The certificate data. \"\"\" return pulumi.get(self, \"certificate\")", "field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" return pulumi.get(self, \"http_enabled_state\") def _translate_property(self, prop): return", "\"\"\" :param str mqtt_enabled_state: The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" pulumi.set(__self__, \"mqtt_enabled_state\",", "update_time is not None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]:", "mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state: The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\"", "you know what you are doing! *** import warnings import pulumi import pulumi.runtime", "Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str key: The key", "not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\" A", "@pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"cloudUpdateTime\") def cloud_update_time(self) ->", "last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type", "last `event_notification_configs` item. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name) if subfolder_matches is not None: pulumi.set(__self__,", "def update_time(self) -> Optional[str]: return pulumi.get(self, \"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "The field allows only `X509_CERTIFICATE_PEM`. \"\"\" return pulumi.get(self, \"format\") def _translate_property(self, prop): return", "and `ES256_X509_PEM`. :param str key: The key data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__,", "all strings are matched. Empty value can only be used for the last", "which this credential becomes invalid. \"\"\" pulumi.set(__self__, \"public_key\", public_key) if expiration_time is not", "\"\"\" A public key used to verify the signature of JSON Web Tokens", "version: Optional[str] = None): if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if", "str pubsub_topic_name: PubSub topic name to publish device events. \"\"\" pulumi.set(__self__, \"pubsub_topic_name\", pubsub_topic_name)", "return pulumi.get(self, \"message\") @property @pulumi.getter def number(self) -> Optional[float]: return pulumi.get(self, \"number\") def", "Any, Dict, List, Mapping, Optional, Tuple, Union from .. import _utilities, _tables from", "__init__(__self__, *, binary_data: Optional[str] = None, cloud_update_time: Optional[str] = None, device_ack_time: Optional[str] =", "@pulumi.getter(name=\"deviceAckTime\") def device_ack_time(self) -> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self) ->", "\"key\", key) @property @pulumi.getter def format(self) -> str: \"\"\" The format of the", "if gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is not None:", "Possible values are `ASSOCIATION_ONLY`, `DEVICE_AUTH_TOKEN_ONLY`, and `ASSOCIATION_AND_DEVICE_AUTH_TOKEN`. \"\"\" return pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\")", "`X509_CERTIFICATE_PEM`. \"\"\" pulumi.set(__self__, \"certificate\", certificate) pulumi.set(__self__, \"format\", format) @property @pulumi.getter def certificate(self) ->", "RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state: str): \"\"\" :param str mqtt_enabled_state: The field allows", "-> Optional[str]: \"\"\" If the subfolder name matches this string exactly, this configuration", "-> Optional[str]: return pulumi.get(self, \"device_ack_time\") @property @pulumi.getter def version(self) -> Optional[str]: return pulumi.get(self,", "time at which the device accessed the gateway specified in last_accessed_gateway. \"\"\" return", "is not None: pulumi.set(__self__, \"expiration_time\", expiration_time) @property @pulumi.getter(name=\"publicKey\") def public_key(self) -> 'outputs.DeviceCredentialPublicKey': \"\"\"", "\"\"\" A public key certificate format and data. \"\"\" return pulumi.get(self, \"public_key_certificate\") def", "def __init__(__self__, *, http_enabled_state: str): \"\"\" :param str http_enabled_state: The field allows `HTTP_ENABLED`", "\"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\" The field allows `MQTT_ENABLED`", "\"\"\" return pulumi.get(self, \"format\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) -> str: \"\"\" The field allows", "most recently. :param str last_accessed_gateway_time: - The most recent time at which the", "is `NON_GATEWAY`. Possible values are `GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property", "is documented below. :param str expiration_time: The time at which this credential becomes", "file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # ***", "the key. Possible values are `RSA_PEM`, `RSA_X509_PEM`, `ES256_PEM`, and `ES256_X509_PEM`. :param str key:", "include the leading '/' character. If empty, all strings are matched. Empty value", ":param str key: The key data. \"\"\" pulumi.set(__self__, \"format\", format) pulumi.set(__self__, \"key\", key)", "verify the signature of JSON Web Tokens (JWTs). Structure is documented below. :param", "if binary_data is not None: pulumi.set(__self__, \"binary_data\", binary_data) if cloud_update_time is not None:", "name to publish device events. :param str subfolder_matches: If the subfolder name matches", "gateway specified in last_accessed_gateway. \"\"\" if gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method)", "str pubsub_topic_name: PubSub topic name to publish device events. :param str subfolder_matches: If", "@property @pulumi.getter def key(self) -> str: \"\"\" The key data. \"\"\" return pulumi.get(self,", "pulumi.get(self, \"gateway_auth_method\") @property @pulumi.getter(name=\"gatewayType\") def gateway_type(self) -> Optional[str]: \"\"\" Indicates whether the device", "allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self) ->", "pulumi import pulumi.runtime from typing import Any, Dict, List, Mapping, Optional, Tuple, Union", "\"\"\" pulumi.set(__self__, \"mqtt_enabled_state\", mqtt_enabled_state) @property @pulumi.getter(name=\"mqttEnabledState\") def mqtt_enabled_state(self) -> str: \"\"\" The field", "in last_accessed_gateway. \"\"\" return pulumi.get(self, \"last_accessed_gateway_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "or prop @pulumi.output_type class DeviceCredential(dict): def __init__(__self__, *, public_key: 'outputs.DeviceCredentialPublicKey', expiration_time: Optional[str] =", "_translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryMqttConfig(dict): def __init__(__self__, *, mqtt_enabled_state:", "`MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do", "prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class DeviceLastErrorStatus(dict): def __init__(__self__, *, details: Optional[List[Mapping[str,", "not None: pulumi.set(__self__, \"details\", details) if message is not None: pulumi.set(__self__, \"message\", message)", "device events. \"\"\" return pulumi.get(self, \"pubsub_topic_name\") @property @pulumi.getter(name=\"subfolderMatches\") def subfolder_matches(self) -> Optional[str]: \"\"\"", "pulumi.set(__self__, \"message\", message) if number is not None: pulumi.set(__self__, \"number\", number) @property @pulumi.getter", "device is a gateway. Default value is `NON_GATEWAY`. Possible values are `GATEWAY` and", "field allows `HTTP_ENABLED` or `HTTP_DISABLED`. \"\"\" pulumi.set(__self__, \"http_enabled_state\", http_enabled_state) @property @pulumi.getter(name=\"httpEnabledState\") def http_enabled_state(self)", "the gateway the device accessed most recently. :param str last_accessed_gateway_time: - The most", "\"update_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class RegistryCredential(dict): def __init__(__self__,", "def __init__(__self__, *, public_key_certificate: 'outputs.RegistryCredentialPublicKeyCertificate'): \"\"\" :param 'RegistryCredentialPublicKeyCertificateArgs' public_key_certificate: A public key certificate", "not None: pulumi.set(__self__, \"cloud_update_time\", cloud_update_time) if device_ack_time is not None: pulumi.set(__self__, \"device_ack_time\", device_ack_time)", "str: \"\"\" The field allows `MQTT_ENABLED` or `MQTT_DISABLED`. \"\"\" return pulumi.get(self, \"mqtt_enabled_state\") def", "pulumi.set(__self__, \"gateway_type\", gateway_type) if last_accessed_gateway_id is not None: pulumi.set(__self__, \"last_accessed_gateway_id\", last_accessed_gateway_id) if last_accessed_gateway_time", "not None: pulumi.set(__self__, \"update_time\", update_time) @property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self,", "values are `GATEWAY` and `NON_GATEWAY`. \"\"\" return pulumi.get(self, \"gateway_type\") @property @pulumi.getter(name=\"lastAccessedGatewayId\") def last_accessed_gateway_id(self)", "credential becomes invalid. \"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "last_accessed_gateway. \"\"\" if gateway_auth_method is not None: pulumi.set(__self__, \"gateway_auth_method\", gateway_auth_method) if gateway_type is", "\"\"\" return pulumi.get(self, \"expiration_time\") def _translate_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop @pulumi.output_type class", "@property @pulumi.getter(name=\"binaryData\") def binary_data(self) -> Optional[str]: return pulumi.get(self, \"binary_data\") @property @pulumi.getter(name=\"updateTime\") def update_time(self)", "The certificate data. \"\"\" return pulumi.get(self, \"certificate\") @property @pulumi.getter def format(self) -> str:" ]
[ "List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(), [], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime,", "freeServerIndex = 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] +=", "SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0]", "Most Number of Requests.py from sortedcontainers import SortedList class Solution: def busiestServers(self, k:", "+ requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName for", "the request as no server is busy continue freeServerIndex = serverList.bisect_left(requestNumber % k)", "of Requests.py from sortedcontainers import SortedList class Solution: def busiestServers(self, k: int, arrival:", "k: int, arrival: List[int], load: List[int]) -> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled =", "maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName, count in requestCounter.items() if count", "and priorityQueue[0][0] <= requestArrivalTime: # Free up the server if request is completed", "(requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName", "priorityQueue and priorityQueue[0][0] <= requestArrivalTime: # Free up the server if request is", "Contests/Biweekly Contest 36/Find Servers That Handled Most Number of Requests.py from sortedcontainers import", "len(serverList): freeServerIndex = 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName]", "freeServerIndex = serverList.bisect_left(requestNumber % k) if freeServerIndex >= len(serverList): freeServerIndex = 0 serverName", ">= len(serverList): freeServerIndex = 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName))", "server is busy continue freeServerIndex = serverList.bisect_left(requestNumber % k) if freeServerIndex >= len(serverList):", "priorityQueue[0][0] <= requestArrivalTime: # Free up the server if request is completed serverList.add(heappop(priorityQueue)[1])", "for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0] <= requestArrivalTime:", "Handled Most Number of Requests.py from sortedcontainers import SortedList class Solution: def busiestServers(self,", "load: List[int]) -> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(), [], SortedList(range(k)), 0", "serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled =", "0 for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0] <=", "if freeServerIndex >= len(serverList): freeServerIndex = 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime +", "if not serverList: # Drop the request as no server is busy continue", "requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName, count in", "= serverList.bisect_left(requestNumber % k) if freeServerIndex >= len(serverList): freeServerIndex = 0 serverName =", "not serverList: # Drop the request as no server is busy continue freeServerIndex", "# Free up the server if request is completed serverList.add(heappop(priorityQueue)[1]) if not serverList:", "[], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while priorityQueue and", "if request is completed serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop the request as", "is completed serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop the request as no server", "up the server if request is completed serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop", "1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName, count in requestCounter.items() if", "def busiestServers(self, k: int, arrival: List[int], load: List[int]) -> List[int]: requestCounter, priorityQueue, serverList,", "Free up the server if request is completed serverList.add(heappop(priorityQueue)[1]) if not serverList: #", "requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName,", "<filename>LeetCode/Weekly Contests/Biweekly Contest 36/Find Servers That Handled Most Number of Requests.py from sortedcontainers", "# Drop the request as no server is busy continue freeServerIndex = serverList.bisect_left(requestNumber", "while priorityQueue and priorityQueue[0][0] <= requestArrivalTime: # Free up the server if request", "List[int], load: List[int]) -> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(), [], SortedList(range(k)),", "= Counter(), [], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while", "serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop the request as no server is busy", "max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName, count in requestCounter.items() if count == maxRequestHandled]", "-> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(), [], SortedList(range(k)), 0 for requestNumber,", "load)): while priorityQueue and priorityQueue[0][0] <= requestArrivalTime: # Free up the server if", "SortedList class Solution: def busiestServers(self, k: int, arrival: List[int], load: List[int]) -> List[int]:", "busiestServers(self, k: int, arrival: List[int], load: List[int]) -> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled", "Number of Requests.py from sortedcontainers import SortedList class Solution: def busiestServers(self, k: int,", "busy continue freeServerIndex = serverList.bisect_left(requestNumber % k) if freeServerIndex >= len(serverList): freeServerIndex =", "serverName)) requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName, count", "Contest 36/Find Servers That Handled Most Number of Requests.py from sortedcontainers import SortedList", "Counter(), [], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while priorityQueue", "priorityQueue, serverList, maxRequestHandled = Counter(), [], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad) in", "server if request is completed serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop the request", "= max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName, count in requestCounter.items() if count ==", "class Solution: def busiestServers(self, k: int, arrival: List[int], load: List[int]) -> List[int]: requestCounter,", "% k) if freeServerIndex >= len(serverList): freeServerIndex = 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue,", "= 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] += 1", "36/Find Servers That Handled Most Number of Requests.py from sortedcontainers import SortedList class", "That Handled Most Number of Requests.py from sortedcontainers import SortedList class Solution: def", "serverList: # Drop the request as no server is busy continue freeServerIndex =", "requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(), [], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad)", "as no server is busy continue freeServerIndex = serverList.bisect_left(requestNumber % k) if freeServerIndex", "arrival: List[int], load: List[int]) -> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(), [],", "k) if freeServerIndex >= len(serverList): freeServerIndex = 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime", "requestArrivalTime: # Free up the server if request is completed serverList.add(heappop(priorityQueue)[1]) if not", "heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return", "requestLoad) in enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0] <= requestArrivalTime: # Free up", "serverList.bisect_left(requestNumber % k) if freeServerIndex >= len(serverList): freeServerIndex = 0 serverName = serverList.pop(freeServerIndex)", "Solution: def busiestServers(self, k: int, arrival: List[int], load: List[int]) -> List[int]: requestCounter, priorityQueue,", "= serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled,", "serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName])", "List[int]) -> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(), [], SortedList(range(k)), 0 for", "is busy continue freeServerIndex = serverList.bisect_left(requestNumber % k) if freeServerIndex >= len(serverList): freeServerIndex", "continue freeServerIndex = serverList.bisect_left(requestNumber % k) if freeServerIndex >= len(serverList): freeServerIndex = 0", "requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0] <= requestArrivalTime: #", "completed serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop the request as no server is", "Drop the request as no server is busy continue freeServerIndex = serverList.bisect_left(requestNumber %", "freeServerIndex >= len(serverList): freeServerIndex = 0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad,", "enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0] <= requestArrivalTime: # Free up the server", "from sortedcontainers import SortedList class Solution: def busiestServers(self, k: int, arrival: List[int], load:", "+= 1 maxRequestHandled = max(maxRequestHandled, requestCounter[serverName]) return [serverName for serverName, count in requestCounter.items()", "int, arrival: List[int], load: List[int]) -> List[int]: requestCounter, priorityQueue, serverList, maxRequestHandled = Counter(),", "(requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0] <= requestArrivalTime: # Free", "request is completed serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop the request as no", "in enumerate(zip(arrival, load)): while priorityQueue and priorityQueue[0][0] <= requestArrivalTime: # Free up the", "Servers That Handled Most Number of Requests.py from sortedcontainers import SortedList class Solution:", "0 serverName = serverList.pop(freeServerIndex) heappush(priorityQueue, (requestArrivalTime + requestLoad, serverName)) requestCounter[serverName] += 1 maxRequestHandled", "<= requestArrivalTime: # Free up the server if request is completed serverList.add(heappop(priorityQueue)[1]) if", "Requests.py from sortedcontainers import SortedList class Solution: def busiestServers(self, k: int, arrival: List[int],", "no server is busy continue freeServerIndex = serverList.bisect_left(requestNumber % k) if freeServerIndex >=", "sortedcontainers import SortedList class Solution: def busiestServers(self, k: int, arrival: List[int], load: List[int])", "the server if request is completed serverList.add(heappop(priorityQueue)[1]) if not serverList: # Drop the", "import SortedList class Solution: def busiestServers(self, k: int, arrival: List[int], load: List[int]) ->", "maxRequestHandled = Counter(), [], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival, load)):", "request as no server is busy continue freeServerIndex = serverList.bisect_left(requestNumber % k) if", "serverList, maxRequestHandled = Counter(), [], SortedList(range(k)), 0 for requestNumber, (requestArrivalTime, requestLoad) in enumerate(zip(arrival," ]
[ "normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True,", "of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as outfile: for j", "nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples,", "(x - scale) / scale # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True,", "import * import argparse import random from sklearn.ensemble import RandomForestClassifier import torchvision.models as", "np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list =", "\"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa = [] cnn32_ece =", "train_time, test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa)", "from sklearn.model_selection import ParameterSampler from scipy.stats.distributions import expon import json def run_naive_rf(): naive_rf_kappa", "naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix", "samples_space: totalaccuracy=[] # cohen_kappa vs num training samples (cnn32) for i in range(len(rounded_list)):", "datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs", "data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l()", "(naive_rf) for samples in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time", "write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa = [] cnn32_ece = [] cnn32_train_time", "average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes", ") cohen_kappa, ece, train_time, test_time = run_dn_image_es( res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa)", "resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py -m 3", "* import argparse import random from sklearn.ensemble import RandomForestClassifier import torchvision.models as models", "cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 * 32 *", "Coauthors: <NAME> <NAME> \"\"\" from toolbox import * import argparse import random from", "num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\",", "valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece,", "[] rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005]", "transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc =", "run_naive_rf(): naive_rf_kappa = [] naive_rf_ece = [] naive_rf_train_time = [] naive_rf_test_time = []", "cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece = [] cnn32_5l_train_time = [] cnn32_5l_test_time", "vs num training samples (naive_rf) for samples in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1)", "z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={}", "SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, )", "0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406],", "def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece = [] cnn32_5l_train_time = [] cnn32_5l_test_time =", "cnn32_train_time = [] cnn32_test_time = [] rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo':", "cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l =", "datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader,", "cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_2l,", "+ \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support() #", "'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k,", "cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels,", "total_train_time=0 for samples in samples_space: totalaccuracy=[] # cohen_kappa vs num training samples (cnn32)", "[] cnn32_5l_train_time = [] cnn32_5l_test_time = [] for classes in classes_space: # cohen_kappa", "cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\",", "for (k, v) in d.items()) for d in param_list] outputlist=[] total_train_time=0 for samples", "print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\",", "= list(combinations_45(nums, n_classes)) # normalize scale = np.mean(np.arange(0, 256)) normalize = lambda x:", "import RandomForestClassifier import torchvision.models as models import torchvision.datasets as datasets import torchvision.transforms as", "[32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v,", "classes_space: # cohen_kappa vs num training samples (cnn32_2l) for samples in samples_space: #", "argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args() n_classes = int(args.m) prefix = args.m", "= [] cnn32_train_time = [] cnn32_test_time = [] rng = np.random.RandomState(0) param_grid =", "create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time =", "train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader", "3) cifar_test_images = cifar_test_images.reshape(-1, 32 * 32 * 3) #run_naive_rf() data_transforms = transforms.Compose(", "[] cnn32_5l_test_time = [] for classes in classes_space: # cohen_kappa vs num training", "data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res", "torchvision.models as models import torchvision.datasets as datasets import torchvision.transforms as transforms from sklearn.model_selection", "transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False,", "as datasets import torchvision.transforms as transforms from sklearn.model_selection import ParameterSampler from scipy.stats.distributions import", "cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix +", "+ \"_class/\" samples_space = np.geomspace(10, 10000, num=8, dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space", "classes in classes_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms", "cohen_kappa vs num training samples (cnn32_2l) for samples in samples_space: # train data", "= create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time", "vs num training samples (resnet18) for samples in samples_space: # train data cifar_trainset", "with open(\"parameters.json\", \"w\") as outfile: for j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32", "write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = [] resnet18_ece = [] resnet18_train_time", "datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset", "np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 * 32 * 3) cifar_test_images = cifar_test_images.reshape(-1, 32", "write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = []", "n_jobs=-1) cohen_kappa, ece, train_time, test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples,", "cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece)", "= [] naive_rf_train_time = [] naive_rf_test_time = [] for classes in classes_space: #", "in samples_space: totalaccuracy=[] # cohen_kappa vs num training samples (cnn32) for i in", "import torchvision.datasets as datasets import torchvision.transforms as transforms from sklearn.model_selection import ParameterSampler from", "len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, )", "* 32 * 3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5,", "[] resnet18_ece = [] resnet18_train_time = [] resnet18_test_time = [] for classes in", "transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms", "= [] cnn32_5l_test_time = [] for classes in classes_space: # cohen_kappa vs num", "+ \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = [] resnet18_ece = [] resnet18_train_time =", "+ \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__", "d in param_list] outputlist=[] total_train_time=0 for samples in samples_space: totalaccuracy=[] # cohen_kappa vs", "[] naive_rf_train_time = [] naive_rf_test_time = [] for classes in classes_space: # cohen_kappa", "= run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\")", "[] cnn32_ece = [] cnn32_train_time = [] cnn32_test_time = [] rng = np.random.RandomState(0)", "in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time = run_rf_image_set( RF,", "32 * 3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5,", "for classes in classes_space: # cohen_kappa vs num training samples (naive_rf) for samples", "6)) for (k, v) in d.items()) for d in param_list] outputlist=[] total_train_time=0 for", "samples (cnn32_5l) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\",", "print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\",", "cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( res, train_loader, valid_loader,", "(cnn32) for i in range(len(rounded_list)): average_accuracy=0 for classes in classes_space: # train data", "as transforms from sklearn.model_selection import ParameterSampler from scipy.stats.distributions import expon import json def", "cohen_kappa vs num training samples (resnet18) for samples in samples_space: # train data", "ece, train_time, test_time = run_dn_image_es( res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time)", "def run_cnn32(): cnn32_kappa = [] cnn32_ece = [] cnn32_train_time = [] cnn32_test_time =", "normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 * 32 * 3) cifar_test_images", "outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as outfile: for", "finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time)", "samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, )", "classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_2l, train_loader,", "models import torchvision.datasets as datasets import torchvision.transforms as transforms from sklearn.model_selection import ParameterSampler", "train=True, download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test data", "param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v, 6)) for (k, v)", "samples in samples_space: totalaccuracy=[] # cohen_kappa vs num training samples (cnn32) for i", "cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es(", "download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr']", "write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = []", "test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa, ece,", ") total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number", "\"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece = [] cnn32_5l_train_time = []", "wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier", "test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix", "in param_list] outputlist=[] total_train_time=0 for samples in samples_space: totalaccuracy=[] # cohen_kappa vs num", "= models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader =", "= [] cnn32_test_time = [] rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,],", "cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\")", "= res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels,", "= {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20,", "cifar_test_images.reshape(-1, 32 * 32 * 3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5,", "= SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples,", "finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time)", "sklearn.model_selection import ParameterSampler from scipy.stats.distributions import expon import json def run_naive_rf(): naive_rf_kappa =", "parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args() n_classes = int(args.m) prefix", "= np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes,", "samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, )", "random from sklearn.ensemble import RandomForestClassifier import torchvision.models as models import torchvision.datasets as datasets", "naive_rf_kappa = [] naive_rf_ece = [] naive_rf_train_time = [] naive_rf_test_time = [] for", "+ \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix +", "naive_rf_test_time = [] for classes in classes_space: # cohen_kappa vs num training samples", "\"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa = [] cnn32_ece = [] cnn32_train_time = []", "= np.mean(np.arange(0, 256)) normalize = lambda x: (x - scale) / scale #", "= np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None )", "SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es(", "naive_rf_test_time) def run_cnn32(): cnn32_kappa = [] cnn32_ece = [] cnn32_train_time = [] cnn32_test_time", "res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix +", "def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece = [] cnn32_2l_train_time = [] cnn32_2l_test_time =", "cnn32_5l_train_time = [] cnn32_5l_test_time = [] for classes in classes_space: # cohen_kappa vs", "ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr, momentum, wd, )", "cifar_testset, samples, batch, ) cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader,", "cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset,", "* 32 * 3) cifar_test_images = cifar_test_images.reshape(-1, 32 * 32 * 3) #run_naive_rf()", "scipy.stats.distributions import expon import json def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece = []", "= [] rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd':", "run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix", "cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix +", "cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece = []", "write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa = []", "\"__main__\": torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py -m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\",", "cifar_test_images = cifar_test_images.reshape(-1, 32 * 32 * 3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(),", "in classes_space: # cohen_kappa vs num training samples (cnn32_5l) for samples in samples_space:", "valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa,", "= datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes))", "+ \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece", "outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix +", "data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels", "test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets)", "res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset,", "- scale) / scale # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True,", "classes in classes_space: # cohen_kappa vs num training samples (resnet18) for samples in", "naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece)", "[] for classes in classes_space: # cohen_kappa vs num training samples (cnn32_5l) for", "run_cnn32(): cnn32_kappa = [] cnn32_ece = [] cnn32_train_time = [] cnn32_test_time = []", ") resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix +", "scale # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None ) cifar_train_images", "\"_class/\" samples_space = np.geomspace(10, 10000, num=8, dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space =", "cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa =", "[] cnn32_2l_test_time = [] for classes in classes_space: # cohen_kappa vs num training", "\"\"\" Coauthors: <NAME> <NAME> \"\"\" from toolbox import * import argparse import random", "parser.parse_args() n_classes = int(args.m) prefix = args.m + \"_class/\" samples_space = np.geomspace(10, 10000,", "n_classes = int(args.m) prefix = args.m + \"_class/\" samples_space = np.geomspace(10, 10000, num=8,", "= np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader,", ") cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs,", "write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support()", "+ \"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py", "normalize scale = np.mean(np.arange(0, 256)) normalize = lambda x: (x - scale) /", "0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456,", "\"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = [] resnet18_ece =", "data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32", "write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if", "dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) # normalize scale =", "datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader,", "= [] cnn32_5l_ece = [] cnn32_5l_train_time = [] cnn32_5l_test_time = [] for classes", "resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time)", "train=True, download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10(", "= transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] ) run_resnet18()", "root=\"./\", train=False, download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images =", "res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes,", "\"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py -m", "help=\"class number\") args = parser.parse_args() n_classes = int(args.m) prefix = args.m + \"_class/\"", "* 3) cifar_test_images = cifar_test_images.reshape(-1, 32 * 32 * 3) #run_naive_rf() data_transforms =", "n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v, 6)) for (k, v) in d.items()) for", "sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\")", "= datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes))", "train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa,", "+ \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix +", "(resnet18) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True,", "cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l =", "valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa)", "root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset =", "= RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images,", "__name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py -m 3 parser =", "naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix +", "'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v, 6))", "= [] naive_rf_test_time = [] for classes in classes_space: # cohen_kappa vs num", "lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset,", "cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels,", "cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader", "'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list", "in range(len(rounded_list)): average_accuracy=0 for classes in classes_space: # train data cifar_trainset = datasets.CIFAR10(", "\"\"\" from toolbox import * import argparse import random from sklearn.ensemble import RandomForestClassifier", "[dict((k, round(v, 6)) for (k, v) in d.items()) for d in param_list] outputlist=[]", "\"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\",", "train_time, test_time = run_dn_image_es( res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time)", "run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time)", "\"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa", "run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece = [] cnn32_5l_train_time = [] cnn32_5l_test_time = []", "import argparse import random from sklearn.ensemble import RandomForestClassifier import torchvision.models as models import", "download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset", "# normalize scale = np.mean(np.arange(0, 256)) normalize = lambda x: (x - scale)", "round(v, 6)) for (k, v) in d.items()) for d in param_list] outputlist=[] total_train_time=0", "train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features", "write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece = [] cnn32_5l_train_time", "= datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) # test data", "write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example usage: python", "[] for classes in classes_space: # cohen_kappa vs num training samples (resnet18) for", "cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\",", "for samples in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time =", "transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo']", "resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\",", "test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix", "for i in range(len(rounded_list)): average_accuracy=0 for classes in classes_space: # train data cifar_trainset", "prefix = args.m + \"_class/\" samples_space = np.geomspace(10, 10000, num=8, dtype=int) nums =", "data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels", "\"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa", "cnn32_kappa = [] cnn32_ece = [] cnn32_train_time = [] cnn32_test_time = [] rng", "finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time)", "= lambda x: (x - scale) / scale # train data cifar_trainset =", "train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\",", "= [] resnet18_train_time = [] resnet18_test_time = [] for classes in classes_space: #", "= [] resnet18_ece = [] resnet18_train_time = [] resnet18_test_time = [] for classes", "RandomForestClassifier import torchvision.models as models import torchvision.datasets as datasets import torchvision.transforms as transforms", "transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es(", "32 * 32 * 3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5),", "training samples (cnn32_2l) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10(", "cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms", "= np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes,", "def run_resnet18(): resnet18_kappa = [] resnet18_ece = [] resnet18_train_time = [] resnet18_test_time =", "cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix +", "32 * 32 * 3) cifar_test_images = cifar_test_images.reshape(-1, 32 * 32 * 3)", "classes_space: # cohen_kappa vs num training samples (resnet18) for samples in samples_space: #", "test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l", "root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs =", "np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader,", "valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa)", "average_accuracy=0 for classes in classes_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True,", "= datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets)", "== \"__main__\": torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py -m 3 parser = argparse.ArgumentParser()", "num training samples (cnn32_2l) for samples in samples_space: # train data cifar_trainset =", "[] for classes in classes_space: # cohen_kappa vs num training samples (cnn32_2l) for", "for classes in classes_space: # cohen_kappa vs num training samples (cnn32_5l) for samples", ") cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd']", "[] resnet18_test_time = [] for classes in classes_space: # cohen_kappa vs num training", "samples (resnet18) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\",", "write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece = [] cnn32_2l_train_time", "python cifar_10.py -m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args()", "write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def", "test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece)", "cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) # test", "= [] cnn32_5l_train_time = [] cnn32_5l_test_time = [] for classes in classes_space: #", "cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None", "datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) #", "total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels,", "[] for classes in classes_space: # cohen_kappa vs num training samples (naive_rf) for", "= run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\")", "cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\",", "list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v, 6)) for (k, v) in d.items())", "yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic)", ") cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 * 32", "in d.items()) for d in param_list] outputlist=[] total_train_time=0 for samples in samples_space: totalaccuracy=[]", "vs num training samples (cnn32) for i in range(len(rounded_list)): average_accuracy=0 for classes in", "param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes,", "{'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng))", "parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args() n_classes = int(args.m) prefix = args.m +", "0.5, 0.5), (0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [", "\"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa", "Example usage: python cifar_10.py -m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args", "cnn32, train_loader, valid_loader, valid_loader, lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy)", "from scipy.stats.distributions import expon import json def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece =", "finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time)", "cnn32_2l_kappa = [] cnn32_2l_ece = [] cnn32_2l_train_time = [] cnn32_2l_test_time = [] for", "[0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list =", "in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix +", "samples in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time = run_rf_image_set(", "train_time, test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time)", "= datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets)", "cohen_kappa vs num training samples (cnn32) for i in range(len(rounded_list)): average_accuracy=0 for classes", ") cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr, momentum,", "+ \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18():", "cohen_kappa, ece, train_time, test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes,", "= argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args() n_classes = int(args.m) prefix =", "np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None ) cifar_test_images", "write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def", "num training samples (cnn32_5l) for samples in samples_space: # train data cifar_trainset =", "resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece)", "samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms", "(cnn32_2l) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True,", "= np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader,", "resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\":", "= list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v, 6)) for (k, v) in", "momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy()", ") cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa)", "# cohen_kappa vs num training samples (cnn32_5l) for samples in samples_space: # train", "resnet18_test_time = [] for classes in classes_space: # cohen_kappa vs num training samples", "\"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\",", "outfile: for j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\",", "download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader =", "in classes_space: # cohen_kappa vs num training samples (resnet18) for samples in samples_space:", "[] resnet18_train_time = [] resnet18_test_time = [] for classes in classes_space: # cohen_kappa", "cifar_train_images.reshape(-1, 32 * 32 * 3) cifar_test_images = cifar_test_images.reshape(-1, 32 * 32 *", "run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece = [] cnn32_2l_train_time = [] cnn32_2l_test_time = []", "outputdic={} with open(\"parameters.json\", \"w\") as outfile: for j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\")", "naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa =", "n_classes)) # normalize scale = np.mean(np.arange(0, 256)) normalize = lambda x: (x -", "finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time)", "cnn32_5l_ece = [] cnn32_5l_train_time = [] cnn32_5l_test_time = [] for classes in classes_space:", "classes_space: # cohen_kappa vs num training samples (cnn32_5l) for samples in samples_space: #", "* 3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]", "classes_space: # cohen_kappa vs num training samples (naive_rf) for samples in samples_space: RF", "(k, v) in d.items()) for d in param_list] outputlist=[] total_train_time=0 for samples in", "cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple(", "run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ]", "samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa)", "= run_dn_image_es( res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\")", "as models import torchvision.datasets as datasets import torchvision.transforms as transforms from sklearn.model_selection import", "ece, train_time, test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, )", "cnn32_2l_train_time = [] cnn32_2l_test_time = [] for classes in classes_space: # cohen_kappa vs", "transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset =", "cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix +", "samples (cnn32_2l) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\",", "num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels,", "cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels =", "RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels,", "cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece)", "cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa =", "transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose(", "train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\",", "write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def", "cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes))", "training samples (cnn32_5l) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10(", "= test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space)", "resnet18_ece = [] resnet18_train_time = [] resnet18_test_time = [] for classes in classes_space:", "root=\"./\", train=True, download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test", "torchvision.transforms as transforms from sklearn.model_selection import ParameterSampler from scipy.stats.distributions import expon import json", "print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\",", "\"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example", "[] cnn32_train_time = [] cnn32_test_time = [] rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025],", "test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix", "json def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece = [] naive_rf_train_time = [] naive_rf_test_time", "+ \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix +", "cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader,", "in classes_space: # cohen_kappa vs num training samples (naive_rf) for samples in samples_space:", "write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix", "= datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True)", "[] cnn32_2l_ece = [] cnn32_2l_train_time = [] cnn32_2l_test_time = [] for classes in", "resnet18_kappa = [] resnet18_ece = [] resnet18_train_time = [] resnet18_test_time = [] for", "datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0", "cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\",", "\"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece =", "train=False, download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1,", "test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy", "i in range(len(rounded_list)): average_accuracy=0 for classes in classes_space: # train data cifar_trainset =", "training samples (naive_rf) for samples in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece,", "root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader,", ") run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229,", "+ \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix +", "from sklearn.ensemble import RandomForestClassifier import torchvision.models as models import torchvision.datasets as datasets import", "= int(args.m) prefix = args.m + \"_class/\" samples_space = np.geomspace(10, 10000, num=8, dtype=int)", "train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr, momentum, wd, ) total_train_time+=train_time", "rounded_list = [dict((k, round(v, 6)) for (k, v) in d.items()) for d in", "RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time = run_rf_image_set( RF, cifar_train_images, cifar_train_labels,", "list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) # normalize scale = np.mean(np.arange(0, 256)) normalize", "random_state=rng)) rounded_list = [dict((k, round(v, 6)) for (k, v) in d.items()) for d", "cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr, momentum, wd,", "transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es(", "+ \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece = [] cnn32_5l_train_time =", "write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = []", ") cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels,", "torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py -m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class", "expon import json def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece = [] naive_rf_train_time =", "batch, ) cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr,", "transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 *", "classes in classes_space: # cohen_kappa vs num training samples (cnn32_2l) for samples in", "json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece)", "datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images", "/ scale # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None )", "classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as outfile: for j in", "import random from sklearn.ensemble import RandomForestClassifier import torchvision.models as models import torchvision.datasets as", "\"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\",", "print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\",", "if __name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example usage: python cifar_10.py -m 3 parser", "resnet18_train_time = [] resnet18_test_time = [] for classes in classes_space: # cohen_kappa vs", "\"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\",", "} param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v, 6)) for (k,", "= create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa, ece, train_time,", "samples (naive_rf) for samples in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time,", "cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix", "samples (cnn32) for i in range(len(rounded_list)): average_accuracy=0 for classes in classes_space: # train", ") cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa)", "in classes_space: # cohen_kappa vs num training samples (cnn32_2l) for samples in samples_space:", "cnn32_5l_test_time = [] for classes in classes_space: # cohen_kappa vs num training samples", "classes_space = list(combinations_45(nums, n_classes)) # normalize scale = np.mean(np.arange(0, 256)) normalize = lambda", "download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc", "list(combinations_45(nums, n_classes)) # normalize scale = np.mean(np.arange(0, 256)) normalize = lambda x: (x", "for d in param_list] outputlist=[] total_train_time=0 for samples in samples_space: totalaccuracy=[] # cohen_kappa", "v) in d.items()) for d in param_list] outputlist=[] total_train_time=0 for samples in samples_space:", "num=8, dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) # normalize scale", "\"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa", "= [] for classes in classes_space: # cohen_kappa vs num training samples (naive_rf)", "classes in classes_space: # cohen_kappa vs num training samples (naive_rf) for samples in", "outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix", "in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms )", "= [] cnn32_ece = [] cnn32_train_time = [] cnn32_test_time = [] rng =", "train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\",", "classes_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels", "cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_5l,", "for j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa)", "naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa = [] cnn32_ece = []", "cohen_kappa vs num training samples (naive_rf) for samples in samples_space: RF = RandomForestClassifier(n_estimators=100,", "train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets)", "datasets import torchvision.transforms as transforms from sklearn.model_selection import ParameterSampler from scipy.stats.distributions import expon", "train_loader, valid_loader, valid_loader, lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy)", "cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = [] resnet18_ece = [] resnet18_train_time = [] resnet18_test_time", "run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,", "naive_rf_train_time = [] naive_rf_test_time = [] for classes in classes_space: # cohen_kappa vs", "np.array(cifar_testset.targets) res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader,", "cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece = []", "classes, cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32,", "for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True,", "# cohen_kappa vs num training samples (resnet18) for samples in samples_space: # train", "totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size", "x: (x - scale) / scale # train data cifar_trainset = datasets.CIFAR10( root=\"./\",", "scale) / scale # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None", "= [] cnn32_2l_ece = [] cnn32_2l_train_time = [] cnn32_2l_test_time = [] for classes", "cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix", "np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset,", "= normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 * 32 * 3)", "cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time)", "rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] }", "cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix +", "print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\",", "= parser.parse_args() n_classes = int(args.m) prefix = args.m + \"_class/\" samples_space = np.geomspace(10,", "outputlist=[] total_train_time=0 for samples in samples_space: totalaccuracy=[] # cohen_kappa vs num training samples", "<NAME> \"\"\" from toolbox import * import argparse import random from sklearn.ensemble import", "num training samples (naive_rf) for samples in samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa,", "cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece)", "train_time, test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time)", "write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def", "test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l", "test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader, lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy)", "= [dict((k, round(v, 6)) for (k, v) in d.items()) for d in param_list]", "= normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False,", "= transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l()", "3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] )", "resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix +", "= cifar_test_images.reshape(-1, 32 * 32 * 3) #run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5,", "10000, num=8, dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) # normalize", "size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as outfile: for j in range(len(outputlist)): json.dump(outputlist[j],", ") cifar_train_images = normalize(cifar_trainset.data) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10(", "training samples (cnn32) for i in range(len(rounded_list)): average_accuracy=0 for classes in classes_space: #", "vs num training samples (cnn32_5l) for samples in samples_space: # train data cifar_trainset", "(cnn32_5l) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True,", "maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels,", "= datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes))", "# train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels =", "root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0", "args = parser.parse_args() n_classes = int(args.m) prefix = args.m + \"_class/\" samples_space =", "cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 * 32 * 3) cifar_test_images =", "samples_space = np.geomspace(10, 10000, num=8, dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums,", "= list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) # normalize scale = np.mean(np.arange(0, 256))", "\"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = [] resnet18_ece = [] resnet18_train_time = []", "cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time)", "for classes in classes_space: # cohen_kappa vs num training samples (cnn32_2l) for samples", "cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) res =", "param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid,", "training samples (resnet18) for samples in samples_space: # train data cifar_trainset = datasets.CIFAR10(", "vs num training samples (cnn32_2l) for samples in samples_space: # train data cifar_trainset", "cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs']", "average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample", "cohen_kappa vs num training samples (cnn32_5l) for samples in samples_space: # train data", "num training samples (cnn32) for i in range(len(rounded_list)): average_accuracy=0 for classes in classes_space:", "cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels =", "random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) # normalize scale = np.mean(np.arange(0, 256)) normalize =", "\"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\",", "d.items()) for d in param_list] outputlist=[] total_train_time=0 for samples in samples_space: totalaccuracy=[] #", "batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch,", "scale = np.mean(np.arange(0, 256)) normalize = lambda x: (x - scale) / scale", "for classes in classes_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True,", "cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset,", "cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32 =", "cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader,", "cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader,", "cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix", "lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples)", "usage: python cifar_10.py -m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args =", "0.5), (0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(),", "= [] for classes in classes_space: # cohen_kappa vs num training samples (cnn32_5l)", "nums = list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) # normalize scale = np.mean(np.arange(0,", ") cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix +", "for samples in samples_space: totalaccuracy=[] # cohen_kappa vs num training samples (cnn32) for", "data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l", ") cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True,", "= nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset,", "= [] naive_rf_ece = [] naive_rf_train_time = [] naive_rf_test_time = [] for classes", "= cifar_train_images.reshape(-1, 32 * 32 * 3) cifar_test_images = cifar_test_images.reshape(-1, 32 * 32", "cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = [] resnet18_ece = []", "valid_loader, valid_loader, lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32'", "def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece = [] naive_rf_train_time = [] naive_rf_test_time =", "\"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece =", "(0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485,", "[0.00005,0.0001,0.0005,0.001,0.005] } param_list = list(ParameterSampler(param_grid, n_iter=20, random_state=rng)) rounded_list = [dict((k, round(v, 6)) for", "= np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256], 'wd': [0.00005,0.0001,0.0005,0.001,0.005] } param_list", ") cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix +", "run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix", "classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix", "ece, train_time, test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time)", "torchvision.datasets as datasets import torchvision.transforms as transforms from sklearn.model_selection import ParameterSampler from scipy.stats.distributions", "\"w\") as outfile: for j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix", "for classes in classes_space: # cohen_kappa vs num training samples (resnet18) for samples", "cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader, test_loader,", "[] naive_rf_test_time = [] for classes in classes_space: # cohen_kappa vs num training", "= [] cnn32_2l_train_time = [] cnn32_2l_test_time = [] for classes in classes_space: #", "naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time)", "samples_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels", "args.m + \"_class/\" samples_space = np.geomspace(10, 10000, num=8, dtype=int) nums = list(range(10)) random.shuffle(nums)", "naive_rf_ece = [] naive_rf_train_time = [] naive_rf_test_time = [] for classes in classes_space:", "write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix", "+ \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa = [] cnn32_ece", "test_time = run_dn_image_es( res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18", "[] cnn32_5l_ece = [] cnn32_5l_train_time = [] cnn32_5l_test_time = [] for classes in", "ece, train_time, test_time = run_dn_image_es( cnn32_2l, train_loader, valid_loader, test_loader, ) cnn32_2l_kappa.append(cohen_kappa) cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time)", "<NAME> <NAME> \"\"\" from toolbox import * import argparse import random from sklearn.ensemble", "np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset,", "import expon import json def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece = [] naive_rf_train_time", "[] naive_rf_ece = [] naive_rf_train_time = [] naive_rf_test_time = [] for classes in", "+ \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32(): cnn32_kappa = [] cnn32_ece = [] cnn32_train_time =", "cnn32_2l_test_time = [] for classes in classes_space: # cohen_kappa vs num training samples", "write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix", "# cohen_kappa vs num training samples (naive_rf) for samples in samples_space: RF =", "# test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels =", "import torchvision.transforms as transforms from sklearn.model_selection import ParameterSampler from scipy.stats.distributions import expon import", "range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\",", "write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix", "# train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None ) cifar_train_images =", "in classes_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms )", "naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix + \"naive_rf_ece.txt\",", "[] cnn32_test_time = [] rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs':", "[transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] ) run_cnn32() run_cnn32_2l() run_cnn32_5l() data_transforms =", "cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_5l, train_loader, valid_loader,", "res = models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader", "root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader,", "lambda x: (x - scale) / scale # train data cifar_trainset = datasets.CIFAR10(", "int(args.m) prefix = args.m + \"_class/\" samples_space = np.geomspace(10, 10000, num=8, dtype=int) nums", "total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of", "train data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=None ) cifar_train_images = normalize(cifar_trainset.data)", "samples_space: RF = RandomForestClassifier(n_estimators=100, n_jobs=-1) cohen_kappa, ece, train_time, test_time = run_rf_image_set( RF, cifar_train_images,", "+ \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix + \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix +", "cifar_train_images = cifar_train_images.reshape(-1, 32 * 32 * 3) cifar_test_images = cifar_test_images.reshape(-1, 32 *", "import torchvision.models as models import torchvision.datasets as datasets import torchvision.transforms as transforms from", "#run_naive_rf() data_transforms = transforms.Compose( [transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] ) run_cnn32()", "+ \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l():", "valid_loader, test_loader, ) cnn32_5l_kappa.append(cohen_kappa) cnn32_5l_ece.append(ece) cnn32_5l_train_time.append(train_time) cnn32_5l_test_time.append(test_time) print(\"cnn32_5l finished\") write_result(prefix + \"cnn32_5l_kappa.txt\", cnn32_5l_kappa)", "= SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i] lr=param['lr'] momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader =", "samples, batch, ) cohen_kappa, ece, train_time, test_time,accuracy = test_dn_image_es_multiple( cnn32, train_loader, valid_loader, valid_loader,", "cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( res,", "range(len(rounded_list)): average_accuracy=0 for classes in classes_space: # train data cifar_trainset = datasets.CIFAR10( root=\"./\",", "cnn32_5l_kappa) write_result(prefix + \"cnn32_5l_ece.txt\", cnn32_5l_ece) write_result(prefix + \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time)", "# test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None ) cifar_test_images =", "sklearn.ensemble import RandomForestClassifier import torchvision.models as models import torchvision.datasets as datasets import torchvision.transforms", "cnn32_ece = [] cnn32_train_time = [] cnn32_test_time = [] rng = np.random.RandomState(0) param_grid", "data cifar_trainset = datasets.CIFAR10( root=\"./\", train=True, download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) #", "cnn32_5l_kappa = [] cnn32_5l_ece = [] cnn32_5l_train_time = [] cnn32_5l_test_time = [] for", "= [] for classes in classes_space: # cohen_kappa vs num training samples (cnn32_2l)", "cnn32_test_time = [] rng = np.random.RandomState(0) param_grid = {'lr':[0.0001,0.001,0.0125,0.025], 'mo': [0.01,0.05,0.1,0.2,], 'bs': [32,64,128,256],", "= [] cnn32_2l_test_time = [] for classes in classes_space: # cohen_kappa vs num", "valid_loader, lr, momentum, wd, ) total_train_time+=train_time average_accuracy+=accuracy average_accuracy=average_accuracy/len(classes_space) totalaccuracy.append(average_accuracy) yy=np.asarray(totalaccuracy) z=np.argmax(yy) classifier='CNN32' num_classes=int(n_classes)", "outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as outfile:", "data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), ] )", "param_list] outputlist=[] total_train_time=0 for samples in samples_space: totalaccuracy=[] # cohen_kappa vs num training", "from toolbox import * import argparse import random from sklearn.ensemble import RandomForestClassifier import", "create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa, ece, train_time, test_time,accuracy", "number\") args = parser.parse_args() n_classes = int(args.m) prefix = args.m + \"_class/\" samples_space", "32 * 3) cifar_test_images = cifar_test_images.reshape(-1, 32 * 32 * 3) #run_naive_rf() data_transforms", "SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, )", "outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as outfile: for j in range(len(outputlist)): json.dump(outputlist[j], outfile)", "download=True, transform=data_transforms ) cifar_train_labels = np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\",", "+ \"cnn32_5l_train_time.txt\", cnn32_5l_train_time) write_result(prefix + \"cnn32_5l_test_time.txt\", cnn32_5l_test_time) def run_resnet18(): resnet18_kappa = [] resnet18_ece", "classifier='CNN32' num_classes=int(n_classes) sample_size=int(samples) outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with", "= np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms )", "train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader", "+ \"cnn32_ece.txt\", cnn32_ece) write_result(prefix + \"cnn32_train_time.txt\", cnn32_train_time) write_result(prefix + \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l():", "run_dn_image_es( res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece) resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix", ") naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf finished\") write_result(prefix + \"naive_rf_kappa.txt\", naive_rf_kappa) write_result(prefix +", "# cohen_kappa vs num training samples (cnn32) for i in range(len(rounded_list)): average_accuracy=0 for", "download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data) cifar_test_labels = np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32", "classes in classes_space: # cohen_kappa vs num training samples (cnn32_5l) for samples in", "cifar_10.py -m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args() n_classes", "test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time,", "train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch, )", "cnn32_2l_ece.append(ece) cnn32_2l_train_time.append(train_time) cnn32_2l_test_time.append(test_time) print(\"cnn32_2l finished\") write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece)", "256)) normalize = lambda x: (x - scale) / scale # train data", "\"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece = [] cnn32_2l_train_time = []", "outputdic=rounded_list[z].copy() outputdic['classifier']=classifier outputdic['number of classes']=num_classes outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as", "+ \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa = [] cnn32_5l_ece", "download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l = SimpleCNN32Filter5Layers(len(classes)) train_loader, valid_loader, test_loader =", "run_cnn32_2l() run_cnn32_5l() data_transforms = transforms.Compose( [ transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),", "classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( res, train_loader,", "RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time) naive_rf_test_time.append(test_time) print(\"naive_rf", "cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece = [] cnn32_2l_train_time = [] cnn32_2l_test_time", "= [] resnet18_test_time = [] for classes in classes_space: # cohen_kappa vs num", "j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix + \"cnn32_kappa.txt\", cnn32_kappa) write_result(prefix", "data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32_5l", "= run_rf_image_set( RF, cifar_train_images, cifar_train_labels, cifar_test_images, cifar_test_labels, samples, classes, ) naive_rf_kappa.append(cohen_kappa) naive_rf_ece.append(ece) naive_rf_train_time.append(train_time)", "wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples,", "samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( res, train_loader, valid_loader, test_loader, )", "train=False, download=True, transform=data_transforms ) cifar_test_labels = np.array(cifar_testset.targets) cnn32 = SimpleCNN32Filter(len(classes)) total_train_time=0 maxaccuracy=0 param=rounded_list[i]", "argparse import random from sklearn.ensemble import RandomForestClassifier import torchvision.models as models import torchvision.datasets", "\"resnet18_ece.txt\", resnet18_ece) write_result(prefix + \"resnet18_train_time.txt\", resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__ ==", "toolbox import * import argparse import random from sklearn.ensemble import RandomForestClassifier import torchvision.models", "np.mean(np.arange(0, 256)) normalize = lambda x: (x - scale) / scale # train", "totalaccuracy=[] # cohen_kappa vs num training samples (cnn32) for i in range(len(rounded_list)): average_accuracy=0", "import json def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece = [] naive_rf_train_time = []", "momentum=param['mo'] wd=param['wd'] batch=param['bs'] train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset,", "cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( res, train_loader, valid_loader, test_loader,", "ParameterSampler from scipy.stats.distributions import expon import json def run_naive_rf(): naive_rf_kappa = [] naive_rf_ece", "+ \"cnn32_test_time.txt\", cnn32_test_time) def run_cnn32_2l(): cnn32_2l_kappa = [] cnn32_2l_ece = [] cnn32_2l_train_time =", "transforms from sklearn.model_selection import ParameterSampler from scipy.stats.distributions import expon import json def run_naive_rf():", "import ParameterSampler from scipy.stats.distributions import expon import json def run_naive_rf(): naive_rf_kappa = []", "normalize = lambda x: (x - scale) / scale # train data cifar_trainset", "np.geomspace(10, 10000, num=8, dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes)) #", "open(\"parameters.json\", \"w\") as outfile: for j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\")", "= np.geomspace(10, 10000, num=8, dtype=int) nums = list(range(10)) random.shuffle(nums) classes_space = list(combinations_45(nums, n_classes))", "np.array(cifar_trainset.targets) # test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=data_transforms ) cifar_test_labels", "# cohen_kappa vs num training samples (cnn32_2l) for samples in samples_space: # train", "resnet18_train_time) write_result(prefix + \"resnet18_test_time.txt\", resnet18_test_time) if __name__ == \"__main__\": torch.multiprocessing.freeze_support() # Example usage:", "+ \"naive_rf_ece.txt\", naive_rf_ece) write_result(prefix + \"naive_rf_train_time.txt\", naive_rf_train_time) write_result(prefix + \"naive_rf_test_time.txt\", naive_rf_test_time) def run_cnn32():", "run_resnet18(): resnet18_kappa = [] resnet18_ece = [] resnet18_train_time = [] resnet18_test_time = []", "as outfile: for j in range(len(outputlist)): json.dump(outputlist[j], outfile) outfile.write(\"\\n\") print(\"cnn32 finished\") write_result(prefix +", "cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples, batch, ) cohen_kappa, ece, train_time, test_time,accuracy =", "write_result(prefix + \"cnn32_2l_kappa.txt\", cnn32_2l_kappa) write_result(prefix + \"cnn32_2l_ece.txt\", cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix", "num training samples (resnet18) for samples in samples_space: # train data cifar_trainset =", "# Example usage: python cifar_10.py -m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\")", "[] cnn32_2l_train_time = [] cnn32_2l_test_time = [] for classes in classes_space: # cohen_kappa", "test data cifar_testset = datasets.CIFAR10( root=\"./\", train=False, download=True, transform=None ) cifar_test_images = normalize(cifar_testset.data)", "cnn32_2l_ece) write_result(prefix + \"cnn32_2l_train_time.txt\", cnn32_2l_train_time) write_result(prefix + \"cnn32_2l_test_time.txt\", cnn32_2l_test_time) def run_cnn32_5l(): cnn32_5l_kappa =", "-m 3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args() n_classes =", "3 parser = argparse.ArgumentParser() parser.add_argument(\"-m\", help=\"class number\") args = parser.parse_args() n_classes = int(args.m)", "classes, cifar_trainset, cifar_testset, samples, ) cohen_kappa, ece, train_time, test_time = run_dn_image_es( cnn32_5l, train_loader,", "= args.m + \"_class/\" samples_space = np.geomspace(10, 10000, num=8, dtype=int) nums = list(range(10))", "= SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels, cifar_test_labels, classes, cifar_trainset, cifar_testset, samples,", "cohen_kappa, ece, train_time, test_time = run_dn_image_es( res, train_loader, valid_loader, test_loader, ) resnet18_kappa.append(cohen_kappa) resnet18_ece.append(ece)", "cnn32_2l_ece = [] cnn32_2l_train_time = [] cnn32_2l_test_time = [] for classes in classes_space:", "= [] for classes in classes_space: # cohen_kappa vs num training samples (resnet18)", "outputdic['sample size']=sample_size outputlist.append(outputdic) outputdic={} with open(\"parameters.json\", \"w\") as outfile: for j in range(len(outputlist)):", ") cifar_test_labels = np.array(cifar_testset.targets) cnn32_2l = SimpleCNN32Filter2Layers(len(classes)) train_loader, valid_loader, test_loader = create_loaders_es( cifar_train_labels,", "models.resnet18(pretrained=True) num_ftrs = res.fc.in_features res.fc = nn.Linear(num_ftrs, len(classes)) train_loader, valid_loader, test_loader = create_loaders_es(", "= np.array(cifar_testset.targets) cifar_train_images = cifar_train_images.reshape(-1, 32 * 32 * 3) cifar_test_images = cifar_test_images.reshape(-1,", "resnet18_train_time.append(train_time) resnet18_test_time.append(test_time) print(\"resnet18 finished\") write_result(prefix + \"resnet18_kappa.txt\", resnet18_kappa) write_result(prefix + \"resnet18_ece.txt\", resnet18_ece) write_result(prefix" ]
[ "input(\"Enter Country Name : \") covid = Covid() data = covid.get_status_by_country_name(country) cadr =", "key: data[key] for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v", "data[key] for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v =", "country = input(\"Enter Country Name : \") covid = Covid() data = covid.get_status_by_country_name(country)", "{\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v = list(cadr.values()) print(cadr) pyplot.title(country) pyplot.bar(range(len(cadr)), v, tick_label=n)", "cadr = { key: data[key] for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n", "& {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v = list(cadr.values()) print(cadr) pyplot.title(country) pyplot.bar(range(len(cadr)), v,", "= covid.get_status_by_country_name(country) cadr = { key: data[key] for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"}", "from covid import Covid import matplotlib.pyplot as pyplot country = input(\"Enter Country Name", "import matplotlib.pyplot as pyplot country = input(\"Enter Country Name : \") covid =", "<reponame>Lal-Jr/COVID-Tracker<filename>COVID-Tracker.py from covid import Covid import matplotlib.pyplot as pyplot country = input(\"Enter Country", "data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v = list(cadr.values()) print(cadr) pyplot.title(country) pyplot.bar(range(len(cadr)),", "import Covid import matplotlib.pyplot as pyplot country = input(\"Enter Country Name : \")", "Covid() data = covid.get_status_by_country_name(country) cadr = { key: data[key] for key in data.keys()", "key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v = list(cadr.values()) print(cadr)", "covid.get_status_by_country_name(country) cadr = { key: data[key] for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} }", "= { key: data[key] for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n =", "in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v = list(cadr.values()) print(cadr) pyplot.title(country)", "} n = list(cadr.keys()) v = list(cadr.values()) print(cadr) pyplot.title(country) pyplot.bar(range(len(cadr)), v, tick_label=n) pyplot.show()", "Covid import matplotlib.pyplot as pyplot country = input(\"Enter Country Name : \") covid", "Name : \") covid = Covid() data = covid.get_status_by_country_name(country) cadr = { key:", "Country Name : \") covid = Covid() data = covid.get_status_by_country_name(country) cadr = {", "data = covid.get_status_by_country_name(country) cadr = { key: data[key] for key in data.keys() &", "covid = Covid() data = covid.get_status_by_country_name(country) cadr = { key: data[key] for key", "{ key: data[key] for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys())", "for key in data.keys() & {\"confirmed\",\"active\",\"deaths\",\"recovered\"} } n = list(cadr.keys()) v = list(cadr.values())", ": \") covid = Covid() data = covid.get_status_by_country_name(country) cadr = { key: data[key]", "covid import Covid import matplotlib.pyplot as pyplot country = input(\"Enter Country Name :", "matplotlib.pyplot as pyplot country = input(\"Enter Country Name : \") covid = Covid()", "as pyplot country = input(\"Enter Country Name : \") covid = Covid() data", "pyplot country = input(\"Enter Country Name : \") covid = Covid() data =", "\") covid = Covid() data = covid.get_status_by_country_name(country) cadr = { key: data[key] for", "= input(\"Enter Country Name : \") covid = Covid() data = covid.get_status_by_country_name(country) cadr", "= Covid() data = covid.get_status_by_country_name(country) cadr = { key: data[key] for key in" ]
[ "= AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio", "= Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set lock for key None.\"): await cache.lock()", "data = {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data cache = Cache(mock_store) await", "cache = Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set lock for key None.\"): await", "cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock", "async def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker)", "Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock with locker None.\"): await cache.unlock() @mark.asyncio async", "datetime.now(), False), (\"test\", None, False), ], ) @mark.asyncio async def test_update(value, expires_at, updated,", "cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker = Mock()", "== updated @mark.asyncio async def test_lock(): key = \"test\" mock_locker = Mock() mock_locker.lock", "mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set", "cache.expires_at = expires_at response = await cache.update() assert response == updated @mark.asyncio async", "async def test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock with locker", "mock_store(): return AsyncMock() @mark.asyncio async def test_load(mock_store): data = {\"data\": \"test\", \"expires_at\": datetime.now()}", "Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.unlock()", "def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) with", "= {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data cache = Cache(mock_store) await cache.load(\"test_cache\")", "= await cache.update() assert response == updated @mark.asyncio async def test_lock(): key =", "import Cache from limberframework.cache.exceptions import CacheLockError @fixture def mock_store(): return AsyncMock() @mark.asyncio async", "raises(CacheLockError, match=\"Cannot unset lock for key None.\"): await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set():", "cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock = AsyncMock() cache", "expires_at, updated, mock_store): mock_store.put.return_value = True key = \"test_key\" cache = Cache(mock_store) cache._key", "assert response == updated @mark.asyncio async def test_lock(): key = \"test\" mock_locker =", "mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock = AsyncMock() cache =", "cache = Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset lock for key None.\"): await", "Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set lock for key None.\"): await cache.lock() @mark.asyncio", "test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set lock for key None.\"):", "= key cache.value = value cache.expires_at = expires_at response = await cache.update() assert", "updated, mock_store): mock_store.put.return_value = True key = \"test_key\" cache = Cache(mock_store) cache._key =", "value cache.expires_at = expires_at response = await cache.update() assert response == updated @mark.asyncio", "= AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio", "lock for key None.\"): await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache = Cache(Mock())", "lock for key None.\"): await cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache = Cache(Mock())", "response == updated @mark.asyncio async def test_lock(): key = \"test\" mock_locker = Mock()", "with raises(CacheLockError, match=\"Cannot unset lock for key None.\"): await cache.unlock() @mark.asyncio async def", "async def test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock with locker", "with raises(CacheLockError, match=\"Cannot set lock for key None.\"): await cache.lock() @mark.asyncio async def", "Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock with locker None.\"): await cache.lock() @mark.asyncio async", "for key None.\"): await cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache = Cache(Mock()) with", "= Cache(mock_store) cache._key = key cache.value = value cache.expires_at = expires_at response =", "lock with locker None.\"): await cache.unlock() @mark.asyncio async def test_secure(): mock_lock = AsyncMock()", "import fixture, mark, raises from limberframework.cache.cache import Cache from limberframework.cache.exceptions import CacheLockError @fixture", "cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None, None, False), (None,", "test_load(mock_store): data = {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data cache = Cache(mock_store)", "= Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock with locker None.\"): await cache.unlock() @mark.asyncio", "== \"test_cache\" assert cache.value == data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [", "raises(CacheLockError, match=\"Cannot set lock with locker None.\"): await cache.lock() @mark.asyncio async def test_unlock():", "\"test\" mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key =", "{\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data cache = Cache(mock_store) await cache.load(\"test_cache\") assert", "await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with raises(CacheLockError,", "match=\"Cannot set lock for key None.\"): await cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache", "\"test_key\" cache = Cache(mock_store) cache._key = key cache.value = value cache.expires_at = expires_at", "Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset lock", "cache.update() assert response == updated @mark.asyncio async def test_lock(): key = \"test\" mock_locker", "pytest import fixture, mark, raises from limberframework.cache.cache import Cache from limberframework.cache.exceptions import CacheLockError", "= Cache(mock_store) await cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert cache.value == data[\"data\"] assert", "Cache(Mock(), mock_locker) cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker", "Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset lock for key None.\"): await cache.unlock() @mark.asyncio", "None.\"): await cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot", "with raises(CacheLockError, match=\"Cannot set lock with locker None.\"): await cache.lock() @mark.asyncio async def", "cache = Cache(Mock(), mock_locker) cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def", "updated @mark.asyncio async def test_lock(): key = \"test\" mock_locker = Mock() mock_locker.lock =", "datetime from unittest.mock import AsyncMock, Mock from pytest import fixture, mark, raises from", "cache = Cache(Mock(), mock_locker) cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def", "Cache(Mock(), mock_locker) cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache", "== data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None,", "cache = Cache(mock_store) await cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert cache.value == data[\"data\"]", "@mark.asyncio async def test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set lock", "data cache = Cache(mock_store) await cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert cache.value ==", "limberframework.cache.exceptions import CacheLockError @fixture def mock_store(): return AsyncMock() @mark.asyncio async def test_load(mock_store): data", "= Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await", "None.\"): await cache.unlock() @mark.asyncio async def test_secure(): mock_lock = AsyncMock() mock_unlock = AsyncMock()", "assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None, None, False),", "def test_lock(): key = \"test\" mock_locker = Mock() mock_locker.lock = AsyncMock() cache =", "cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock with locker None.\"): await cache.lock()", "@mark.asyncio async def test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock with", "key = \"test\" mock_locker = Mock() mock_locker.lock = AsyncMock() cache = Cache(Mock(), mock_locker)", "], ) @mark.asyncio async def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value = True key", "cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock with locker None.\"): await cache.unlock()", "test_lock(): key = \"test\" mock_locker = Mock() mock_locker.lock = AsyncMock() cache = Cache(Mock(),", "await cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot set", "AsyncMock, Mock from pytest import fixture, mark, raises from limberframework.cache.cache import Cache from", "= AsyncMock() mock_unlock = AsyncMock() cache = Cache(Mock()) cache.lock = mock_lock cache.unlock =", "assert cache._key == \"test_cache\" assert cache.value == data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize(", "await cache.update() assert response == updated @mark.asyncio async def test_lock(): key = \"test\"", "cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert cache.value == data[\"data\"] assert cache.expires_at == data[\"expires_at\"]", "from unittest.mock import AsyncMock, Mock from pytest import fixture, mark, raises from limberframework.cache.cache", "= AsyncMock() cache = Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset lock for key", "@mark.asyncio async def test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock with", "datetime.now(), True), (None, None, False), (None, datetime.now(), False), (\"test\", None, False), ], )", "unset lock for key None.\"): await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache =", "key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock =", "match=\"Cannot unset lock for key None.\"): await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache", "cache.value = value cache.expires_at = expires_at response = await cache.update() assert response ==", "\"test_cache\" assert cache.value == data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\",", "import datetime from unittest.mock import AsyncMock, Mock from pytest import fixture, mark, raises", "def test_unlock(): key = \"test\" mock_locker = Mock() mock_locker.unlock = AsyncMock() cache =", "with raises(CacheLockError, match=\"Cannot unset lock with locker None.\"): await cache.unlock() @mark.asyncio async def", "response = await cache.update() assert response == updated @mark.asyncio async def test_lock(): key", "with locker None.\"): await cache.unlock() @mark.asyncio async def test_secure(): mock_lock = AsyncMock() mock_unlock", "set lock with locker None.\"): await cache.lock() @mark.asyncio async def test_unlock(): key =", "assert cache.value == data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(),", "@mark.asyncio async def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value = True key = \"test_key\"", "mock_store.put.return_value = True key = \"test_key\" cache = Cache(mock_store) cache._key = key cache.value", "\"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None, None, False), (None, datetime.now(), False), (\"test\", None,", "@fixture def mock_store(): return AsyncMock() @mark.asyncio async def test_load(mock_store): data = {\"data\": \"test\",", "await cache.lock() @mark.asyncio async def test_unlock(): key = \"test\" mock_locker = Mock() mock_locker.unlock", "= Cache(Mock(), mock_locker) cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set():", "mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset lock for", "AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async", "async def test_secure(): mock_lock = AsyncMock() mock_unlock = AsyncMock() cache = Cache(Mock()) cache.lock", "AsyncMock() @mark.asyncio async def test_load(mock_store): data = {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value =", "def test_load(mock_store): data = {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data cache =", "@mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None, None, False), (None, datetime.now(), False), (\"test\",", "with locker None.\"): await cache.lock() @mark.asyncio async def test_unlock(): key = \"test\" mock_locker", "Cache(mock_store) await cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert cache.value == data[\"data\"] assert cache.expires_at", "key cache.value = value cache.expires_at = expires_at response = await cache.update() assert response", "True), (None, None, False), (None, datetime.now(), False), (\"test\", None, False), ], ) @mark.asyncio", "= Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock with locker None.\"): await cache.lock() @mark.asyncio", "mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot", "raises(CacheLockError, match=\"Cannot unset lock with locker None.\"): await cache.unlock() @mark.asyncio async def test_secure():", "key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with", "cache.unlock() @mark.asyncio async def test_secure(): mock_lock = AsyncMock() mock_unlock = AsyncMock() cache =", "@mark.asyncio async def test_unlock(): key = \"test\" mock_locker = Mock() mock_locker.unlock = AsyncMock()", "datetime.now()} mock_store.get.return_value = data cache = Cache(mock_store) await cache.load(\"test_cache\") assert cache._key == \"test_cache\"", "False), (\"test\", None, False), ], ) @mark.asyncio async def test_update(value, expires_at, updated, mock_store):", "mock_locker) cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker =", "datetime import datetime from unittest.mock import AsyncMock, Mock from pytest import fixture, mark,", "cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache = Cache(Mock(),", "limberframework.cache.cache import Cache from limberframework.cache.exceptions import CacheLockError @fixture def mock_store(): return AsyncMock() @mark.asyncio", "set lock for key None.\"): await cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache =", "locker None.\"): await cache.lock() @mark.asyncio async def test_unlock(): key = \"test\" mock_locker =", "mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.unlock() mock_locker.unlock.assert_called_once_with(key)", "@mark.asyncio async def test_secure(): mock_lock = AsyncMock() mock_unlock = AsyncMock() cache = Cache(Mock())", "cache = Cache(mock_store) cache._key = key cache.value = value cache.expires_at = expires_at response", "unittest.mock import AsyncMock, Mock from pytest import fixture, mark, raises from limberframework.cache.cache import", ") @mark.asyncio async def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value = True key =", "from limberframework.cache.exceptions import CacheLockError @fixture def mock_store(): return AsyncMock() @mark.asyncio async def test_load(mock_store):", "mock_locker = Mock() mock_locker.lock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key", "[ (\"test\", datetime.now(), True), (None, None, False), (None, datetime.now(), False), (\"test\", None, False),", "= Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset lock for key None.\"): await cache.unlock()", "expires_at response = await cache.update() assert response == updated @mark.asyncio async def test_lock():", "mock_unlock = AsyncMock() cache = Cache(Mock()) cache.lock = mock_lock cache.unlock = mock_unlock async", "= key await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock", "await cache.unlock() @mark.asyncio async def test_secure(): mock_lock = AsyncMock() mock_unlock = AsyncMock() cache", "fixture, mark, raises from limberframework.cache.cache import Cache from limberframework.cache.exceptions import CacheLockError @fixture def", "cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot", "def test_secure(): mock_lock = AsyncMock() mock_unlock = AsyncMock() cache = Cache(Mock()) cache.lock =", "async def test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set lock for", "await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset", "async def test_unlock(): key = \"test\" mock_locker = Mock() mock_locker.unlock = AsyncMock() cache", "def test_lock_key_not_set(): cache = Cache(Mock(), Mock()) with raises(CacheLockError, match=\"Cannot set lock for key", "unset lock with locker None.\"): await cache.unlock() @mark.asyncio async def test_secure(): mock_lock =", "mock_locker) with raises(CacheLockError, match=\"Cannot unset lock for key None.\"): await cache.unlock() @mark.asyncio async", "key None.\"): await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError,", "True key = \"test_key\" cache = Cache(mock_store) cache._key = key cache.value = value", "from limberframework.cache.cache import Cache from limberframework.cache.exceptions import CacheLockError @fixture def mock_store(): return AsyncMock()", "\"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data cache = Cache(mock_store) await cache.load(\"test_cache\") assert cache._key", "Cache(mock_store) cache._key = key cache.value = value cache.expires_at = expires_at response = await", "for key None.\"): await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache = Cache(Mock()) with", "from pytest import fixture, mark, raises from limberframework.cache.cache import Cache from limberframework.cache.exceptions import", "= AsyncMock() cache = Cache(Mock()) cache.lock = mock_lock cache.unlock = mock_unlock async with", "None.\"): await cache.lock() @mark.asyncio async def test_unlock(): key = \"test\" mock_locker = Mock()", "= expires_at response = await cache.update() assert response == updated @mark.asyncio async def", "import AsyncMock, Mock from pytest import fixture, mark, raises from limberframework.cache.cache import Cache", "None, False), ], ) @mark.asyncio async def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value =", "= data cache = Cache(mock_store) await cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert cache.value", "AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async", "AsyncMock() cache = Cache(Mock()) cache.lock = mock_lock cache.unlock = mock_unlock async with cache.secure():", "= Mock() mock_locker.lock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await", "cache.lock() @mark.asyncio async def test_unlock(): key = \"test\" mock_locker = Mock() mock_locker.unlock =", "= Cache(Mock()) cache.lock = mock_lock cache.unlock = mock_unlock async with cache.secure(): pass mock_lock.assert_called_once()", "= \"test_key\" cache = Cache(mock_store) cache._key = key cache.value = value cache.expires_at =", "<filename>tests/cache/test_cache.py from datetime import datetime from unittest.mock import AsyncMock, Mock from pytest import", "key None.\"): await cache.lock() @mark.asyncio async def test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError,", "data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None, None, False), (None, datetime.now(), False),", "def test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock with locker None.\"):", "\"test\" mock_locker = Mock() mock_locker.lock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key =", "return AsyncMock() @mark.asyncio async def test_load(mock_store): data = {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value", "mark, raises from limberframework.cache.cache import Cache from limberframework.cache.exceptions import CacheLockError @fixture def mock_store():", "test_update(value, expires_at, updated, mock_store): mock_store.put.return_value = True key = \"test_key\" cache = Cache(mock_store)", "test_secure(): mock_lock = AsyncMock() mock_unlock = AsyncMock() cache = Cache(Mock()) cache.lock = mock_lock", "mock_lock = AsyncMock() mock_unlock = AsyncMock() cache = Cache(Mock()) cache.lock = mock_lock cache.unlock", "def mock_store(): return AsyncMock() @mark.asyncio async def test_load(mock_store): data = {\"data\": \"test\", \"expires_at\":", "def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value = True key = \"test_key\" cache =", "raises(CacheLockError, match=\"Cannot set lock for key None.\"): await cache.lock() @mark.asyncio async def test_lock_locker_not_set():", "data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None, None,", "= \"test\" mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key", "test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock with locker None.\"): await", "\"expires_at\": datetime.now()} mock_store.get.return_value = data cache = Cache(mock_store) await cache.load(\"test_cache\") assert cache._key ==", "await cache.unlock() mock_locker.unlock.assert_called_once_with(key) @mark.asyncio async def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock = AsyncMock()", "from datetime import datetime from unittest.mock import AsyncMock, Mock from pytest import fixture,", "async def test_load(mock_store): data = {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data cache", "@mark.asyncio async def test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(),", "== data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True), (None, None, False), (None, datetime.now(),", "def test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock with locker None.\"):", "mock_store.get.return_value = data cache = Cache(mock_store) await cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert", "cache.value == data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\", [ (\"test\", datetime.now(), True),", "None.\"): await cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot", "async def test_lock(): key = \"test\" mock_locker = Mock() mock_locker.lock = AsyncMock() cache", "locker None.\"): await cache.unlock() @mark.asyncio async def test_secure(): mock_lock = AsyncMock() mock_unlock =", "match=\"Cannot set lock with locker None.\"): await cache.lock() @mark.asyncio async def test_unlock(): key", "mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key", "raises from limberframework.cache.cache import Cache from limberframework.cache.exceptions import CacheLockError @fixture def mock_store(): return", "= key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache = Cache(Mock(), Mock())", "None, False), (None, datetime.now(), False), (\"test\", None, False), ], ) @mark.asyncio async def", "cache.unlock() @mark.asyncio async def test_unlock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot unset lock", "= Cache(Mock(), mock_locker) cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set():", "= True key = \"test_key\" cache = Cache(mock_store) cache._key = key cache.value =", "Mock()) with raises(CacheLockError, match=\"Cannot set lock for key None.\"): await cache.lock() @mark.asyncio async", "key = \"test\" mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker)", "= \"test\" mock_locker = Mock() mock_locker.lock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key", "cache._key = key cache.value = value cache.expires_at = expires_at response = await cache.update()", "(None, datetime.now(), False), (\"test\", None, False), ], ) @mark.asyncio async def test_update(value, expires_at,", "lock with locker None.\"): await cache.lock() @mark.asyncio async def test_unlock(): key = \"test\"", "test_unlock_key_not_set(): mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) with raises(CacheLockError,", "AsyncMock() cache = Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset lock for key None.\"):", "(\"test\", None, False), ], ) @mark.asyncio async def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value", "import CacheLockError @fixture def mock_store(): return AsyncMock() @mark.asyncio async def test_load(mock_store): data =", "CacheLockError @fixture def mock_store(): return AsyncMock() @mark.asyncio async def test_load(mock_store): data = {\"data\":", "Mock from pytest import fixture, mark, raises from limberframework.cache.cache import Cache from limberframework.cache.exceptions", "= value cache.expires_at = expires_at response = await cache.update() assert response == updated", "match=\"Cannot unset lock with locker None.\"): await cache.unlock() @mark.asyncio async def test_secure(): mock_lock", "test_unlock(): key = \"test\" mock_locker = Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(),", "Cache from limberframework.cache.exceptions import CacheLockError @fixture def mock_store(): return AsyncMock() @mark.asyncio async def", "await cache.load(\"test_cache\") assert cache._key == \"test_cache\" assert cache.value == data[\"data\"] assert cache.expires_at ==", "@mark.asyncio async def test_lock(): key = \"test\" mock_locker = Mock() mock_locker.lock = AsyncMock()", "cache._key == \"test_cache\" assert cache.value == data[\"data\"] assert cache.expires_at == data[\"expires_at\"] @mark.parametrize( \"value,expires_at,updated\",", "(\"test\", datetime.now(), True), (None, None, False), (None, datetime.now(), False), (\"test\", None, False), ],", "async def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value = True key = \"test_key\" cache", "(None, None, False), (None, datetime.now(), False), (\"test\", None, False), ], ) @mark.asyncio async", "key = \"test_key\" cache = Cache(mock_store) cache._key = key cache.value = value cache.expires_at", "False), ], ) @mark.asyncio async def test_update(value, expires_at, updated, mock_store): mock_store.put.return_value = True", "Cache(Mock()) cache.lock = mock_lock cache.unlock = mock_unlock async with cache.secure(): pass mock_lock.assert_called_once() mock_unlock.assert_called_once()", "mock_store): mock_store.put.return_value = True key = \"test_key\" cache = Cache(mock_store) cache._key = key", "@mark.asyncio async def test_load(mock_store): data = {\"data\": \"test\", \"expires_at\": datetime.now()} mock_store.get.return_value = data", "test_lock_locker_not_set(): cache = Cache(Mock()) with raises(CacheLockError, match=\"Cannot set lock with locker None.\"): await", "= Mock() mock_locker.unlock = AsyncMock() cache = Cache(Mock(), mock_locker) with raises(CacheLockError, match=\"Cannot unset", "False), (None, datetime.now(), False), (\"test\", None, False), ], ) @mark.asyncio async def test_update(value,", "AsyncMock() mock_unlock = AsyncMock() cache = Cache(Mock()) cache.lock = mock_lock cache.unlock = mock_unlock", "mock_locker) cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key) @mark.asyncio async def test_lock_key_not_set(): cache =", "Mock() mock_locker.lock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.lock()", "mock_locker.lock = AsyncMock() cache = Cache(Mock(), mock_locker) cache._key = key await cache.lock() mock_locker.lock.assert_called_once_with(key)", "cache = Cache(Mock()) cache.lock = mock_lock cache.unlock = mock_unlock async with cache.secure(): pass" ]
[ "AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import AuthFailOn401, AuthFailOn403, Authorization from .principal_service import PrincipalService", "import authorize, authorize_token from .authentication import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import", "AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import AuthFailOn401, AuthFailOn403, Authorization from .principal_service import", ".auth_helper import authorize, authorize_token from .authentication import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization", "import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import AuthFailOn401, AuthFailOn403, Authorization from .principal_service", "from .authentication import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import AuthFailOn401, AuthFailOn403, Authorization", ".authentication import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import AuthFailOn401, AuthFailOn403, Authorization from", "from .auth_helper import authorize, authorize_token from .authentication import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from", "authorize, authorize_token from .authentication import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import AuthFailOn401,", "authorize_token from .authentication import AuthenticationDetails, AuthenticationManager, AuthenticationProvider, AuthenticationScheme from .authorization import AuthFailOn401, AuthFailOn403," ]
[ "DatabaseConnection # Maybe this is a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {}", "the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to DB db = add_db(path,", "db if it doesn't exist if not p.exists(): # NOTE: assumes the database", "init_db(path: str, bot: commands.Bot): p = Path(MAIN_DB) # Create db if it doesn't", "exist_ok=True) p.touch() # Connect to DB db = add_db(path, bot) # Add tables", "file resides in a subdirectory # within the project root # # TODO:", "DatabaseConnection: if path not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path] def", "# TODO: Actually make this not completely explode if the db file resides", "MAIN_DB from .db import DatabaseConnection # Maybe this is a little clumsy? _CONNECTIONS:", "..config import MAIN_DB from .db import DatabaseConnection # Maybe this is a little", "bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str,", "pathlib import Path from discord.ext import commands from ..config import MAIN_DB from .db", "resides in # the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to DB", "Maybe this is a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {} def add_db(path:", "exist if not p.exists(): # NOTE: assumes the database file resides in a", "this not completely explode if the db file resides in # the root", "file resides in # the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to", "from pathlib import Path from discord.ext import commands from ..config import MAIN_DB from", "_CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB]", "= Path(MAIN_DB) # Create db if it doesn't exist if not p.exists(): #", "a subdirectory # within the project root # # TODO: Actually make this", "assumes the database file resides in a subdirectory # within the project root", "a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {} def add_db(path: str, bot: commands.Bot)", "_CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection: return", "Connect to DB db = add_db(path, bot) # Add tables (if not already", "= DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def", "Create db if it doesn't exist if not p.exists(): # NOTE: assumes the", "not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) ->", "bot) # Add tables (if not already exists) with open(\"db/vjemmie.db.sql\", \"r\") as f:", "in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection:", "# Add tables (if not already exists) with open(\"db/vjemmie.db.sql\", \"r\") as f: script", "p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to DB db = add_db(path, bot) # Add", "NOTE: assumes the database file resides in a subdirectory # within the project", "not completely explode if the db file resides in # the root directory.", "make this not completely explode if the db file resides in # the", "import Dict from pathlib import Path from discord.ext import commands from ..config import", "clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {} def add_db(path: str, bot: commands.Bot) -> DatabaseConnection:", "Add tables (if not already exists) with open(\"db/vjemmie.db.sql\", \"r\") as f: script =", "add_db(path, bot) # Add tables (if not already exists) with open(\"db/vjemmie.db.sql\", \"r\") as", "def init_db(path: str, bot: commands.Bot): p = Path(MAIN_DB) # Create db if it", "commands from ..config import MAIN_DB from .db import DatabaseConnection # Maybe this is", "DatabaseConnection] = {} def add_db(path: str, bot: commands.Bot) -> DatabaseConnection: if path not", "path not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB)", "if not p.exists(): # NOTE: assumes the database file resides in a subdirectory", "# # TODO: Actually make this not completely explode if the db file", "db file resides in # the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect", "from typing import Dict from pathlib import Path from discord.ext import commands from", "import DatabaseConnection # Maybe this is a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] =", "import MAIN_DB from .db import DatabaseConnection # Maybe this is a little clumsy?", "p.exists(): # NOTE: assumes the database file resides in a subdirectory # within", "directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to DB db = add_db(path, bot) #", "{} def add_db(path: str, bot: commands.Bot) -> DatabaseConnection: if path not in _CONNECTIONS:", "# NOTE: assumes the database file resides in a subdirectory # within the", "_CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot):", "# the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to DB db =", "_CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot): p = Path(MAIN_DB) # Create db if", "database file resides in a subdirectory # within the project root # #", "discord.ext import commands from ..config import MAIN_DB from .db import DatabaseConnection # Maybe", "from ..config import MAIN_DB from .db import DatabaseConnection # Maybe this is a", "typing import Dict from pathlib import Path from discord.ext import commands from ..config", "little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {} def add_db(path: str, bot: commands.Bot) ->", "Dict from pathlib import Path from discord.ext import commands from ..config import MAIN_DB", "= {} def add_db(path: str, bot: commands.Bot) -> DatabaseConnection: if path not in", "project root # # TODO: Actually make this not completely explode if the", "# Create db if it doesn't exist if not p.exists(): # NOTE: assumes", "doesn't exist if not p.exists(): # NOTE: assumes the database file resides in", "Path(MAIN_DB) # Create db if it doesn't exist if not p.exists(): # NOTE:", "it doesn't exist if not p.exists(): # NOTE: assumes the database file resides", "the project root # # TODO: Actually make this not completely explode if", "the database file resides in a subdirectory # within the project root #", "tables (if not already exists) with open(\"db/vjemmie.db.sql\", \"r\") as f: script = f.read()", "= add_db(path, bot) # Add tables (if not already exists) with open(\"db/vjemmie.db.sql\", \"r\")", ".db import DatabaseConnection # Maybe this is a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection]", "TODO: Actually make this not completely explode if the db file resides in", "(if not already exists) with open(\"db/vjemmie.db.sql\", \"r\") as f: script = f.read() db.cursor.executescript(script)", "import commands from ..config import MAIN_DB from .db import DatabaseConnection # Maybe this", "DB db = add_db(path, bot) # Add tables (if not already exists) with", "not p.exists(): # NOTE: assumes the database file resides in a subdirectory #", "Actually make this not completely explode if the db file resides in #", "add_db(path: str, bot: commands.Bot) -> DatabaseConnection: if path not in _CONNECTIONS: _CONNECTIONS[path] =", "commands.Bot): p = Path(MAIN_DB) # Create db if it doesn't exist if not", "# Maybe this is a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {} def", "import Path from discord.ext import commands from ..config import MAIN_DB from .db import", "if the db file resides in # the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch()", "p.touch() # Connect to DB db = add_db(path, bot) # Add tables (if", "Dict[str, DatabaseConnection] = {} def add_db(path: str, bot: commands.Bot) -> DatabaseConnection: if path", "to DB db = add_db(path, bot) # Add tables (if not already exists)", "resides in a subdirectory # within the project root # # TODO: Actually", "within the project root # # TODO: Actually make this not completely explode", "Path from discord.ext import commands from ..config import MAIN_DB from .db import DatabaseConnection", "explode if the db file resides in # the root directory. p.parent.mkdir(parents=True, exist_ok=True)", "is a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {} def add_db(path: str, bot:", "# Connect to DB db = add_db(path, bot) # Add tables (if not", "in # the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to DB db", "def add_db(path: str, bot: commands.Bot) -> DatabaseConnection: if path not in _CONNECTIONS: _CONNECTIONS[path]", "DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path:", "DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot): p = Path(MAIN_DB) # Create", "str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot): p = Path(MAIN_DB)", "-> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot): p = Path(MAIN_DB) #", "from discord.ext import commands from ..config import MAIN_DB from .db import DatabaseConnection #", "root # # TODO: Actually make this not completely explode if the db", "return _CONNECTIONS[path] def get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot:", "str, bot: commands.Bot) -> DatabaseConnection: if path not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path,", "bot: commands.Bot): p = Path(MAIN_DB) # Create db if it doesn't exist if", "if it doesn't exist if not p.exists(): # NOTE: assumes the database file", "return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot): p = Path(MAIN_DB) # Create db", "in a subdirectory # within the project root # # TODO: Actually make", "str, bot: commands.Bot): p = Path(MAIN_DB) # Create db if it doesn't exist", "bot: commands.Bot) -> DatabaseConnection: if path not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot)", "the db file resides in # the root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() #", "def get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot): p", "-> DatabaseConnection: if path not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path]", "this is a little clumsy? _CONNECTIONS: Dict[str, DatabaseConnection] = {} def add_db(path: str,", "from .db import DatabaseConnection # Maybe this is a little clumsy? _CONNECTIONS: Dict[str,", "commands.Bot) -> DatabaseConnection: if path not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return", "root directory. p.parent.mkdir(parents=True, exist_ok=True) p.touch() # Connect to DB db = add_db(path, bot)", "_CONNECTIONS: Dict[str, DatabaseConnection] = {} def add_db(path: str, bot: commands.Bot) -> DatabaseConnection: if", "if path not in _CONNECTIONS: _CONNECTIONS[path] = DatabaseConnection(path, bot) return _CONNECTIONS[path] def get_db(path:", "subdirectory # within the project root # # TODO: Actually make this not", "# within the project root # # TODO: Actually make this not completely", "completely explode if the db file resides in # the root directory. p.parent.mkdir(parents=True,", "db = add_db(path, bot) # Add tables (if not already exists) with open(\"db/vjemmie.db.sql\",", "get_db(path: str=MAIN_DB) -> DatabaseConnection: return _CONNECTIONS[MAIN_DB] def init_db(path: str, bot: commands.Bot): p =", "p = Path(MAIN_DB) # Create db if it doesn't exist if not p.exists():" ]
[ "graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs) #", "sys import graphyte import requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code", "org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs):", "print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for org in", "reverse=True) for org, count in counts: print(f\"{org}: {count}\") def main(): orgs = get_orgs()", "requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code != 200: print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1)", "orgs = get_orgs() report_console(orgs) if len(sys.argv) > 1: graphyte.init(sys.argv[1]) report_graphite(orgs) if __name__ ==", "python3 import sys import graphyte import requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp:", "requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code != 200: print(f\"Received status", "report_graphite(orgs): for org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count)", "counts: print(f\"{org}: {count}\") def main(): orgs = get_orgs() report_console(orgs) if len(sys.argv) > 1:", "= f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"])", "in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts", "orgs) # Sort and print by descending order of tasks completed counts =", "return resp.json()[\"results\"] def report_graphite(orgs): for org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count =", "by descending order of tasks completed counts = sorted(counts, key=lambda x: x[1], reverse=True)", "!= 200: print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for", "{resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\"", "def report_graphite(orgs): for org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\",", "exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count", "org, count in counts: print(f\"{org}: {count}\") def main(): orgs = get_orgs() report_console(orgs) if", "code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for org in orgs: name_base", "for org in orgs) # Sort and print by descending order of tasks", "in counts: print(f\"{org}: {count}\") def main(): orgs = get_orgs() report_console(orgs) if len(sys.argv) >", "counts = sorted(counts, key=lambda x: x[1], reverse=True) for org, count in counts: print(f\"{org}:", "completed counts = sorted(counts, key=lambda x: x[1], reverse=True) for org, count in counts:", "x: x[1], reverse=True) for org, count in counts: print(f\"{org}: {count}\") def main(): orgs", "order of tasks completed counts = sorted(counts, key=lambda x: x[1], reverse=True) for org,", "{count}\") def main(): orgs = get_orgs() report_console(orgs) if len(sys.argv) > 1: graphyte.init(sys.argv[1]) report_graphite(orgs)", "resp.json()[\"results\"] def report_graphite(orgs): for org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"]", "import graphyte import requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code !=", "org[\"completed_task_instance_count\"]) for org in orgs) # Sort and print by descending order of", "#!/usr/bin/env python3 import sys import graphyte import requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as", "for org in orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def", "200: print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for org", "descending order of tasks completed counts = sorted(counts, key=lambda x: x[1], reverse=True) for", "count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org", "count in counts: print(f\"{org}: {count}\") def main(): orgs = get_orgs() report_console(orgs) if len(sys.argv)", "print(f\"{org}: {count}\") def main(): orgs = get_orgs() report_console(orgs) if len(sys.argv) > 1: graphyte.init(sys.argv[1])", "def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs) # Sort and", "import requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code != 200: print(f\"Received", "def main(): orgs = get_orgs() report_console(orgs) if len(sys.argv) > 1: graphyte.init(sys.argv[1]) report_graphite(orgs) if", "= get_orgs() report_console(orgs) if len(sys.argv) > 1: graphyte.init(sys.argv[1]) report_graphite(orgs) if __name__ == '__main__':", "get_orgs() report_console(orgs) if len(sys.argv) > 1: graphyte.init(sys.argv[1]) report_graphite(orgs) if __name__ == '__main__': main()", "def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code != 200: print(f\"Received status code", "in orgs) # Sort and print by descending order of tasks completed counts", "key=lambda x: x[1], reverse=True) for org, count in counts: print(f\"{org}: {count}\") def main():", "status code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for org in orgs:", "x[1], reverse=True) for org, count in counts: print(f\"{org}: {count}\") def main(): orgs =", "((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs) # Sort and print by descending order", "and print by descending order of tasks completed counts = sorted(counts, key=lambda x:", "= ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs) # Sort and print by descending", "graphyte import requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code != 200:", "counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs) # Sort and print by", "get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code != 200: print(f\"Received status code {resp.status_code}:", "= org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in", "org in orgs) # Sort and print by descending order of tasks completed", "of tasks completed counts = sorted(counts, key=lambda x: x[1], reverse=True) for org, count", "Sort and print by descending order of tasks completed counts = sorted(counts, key=lambda", "{resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs): for org in orgs: name_base =", "sorted(counts, key=lambda x: x[1], reverse=True) for org, count in counts: print(f\"{org}: {count}\") def", "main(): orgs = get_orgs() report_console(orgs) if len(sys.argv) > 1: graphyte.init(sys.argv[1]) report_graphite(orgs) if __name__", "as resp: if resp.status_code != 200: print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1) return", "= sorted(counts, key=lambda x: x[1], reverse=True) for org, count in counts: print(f\"{org}: {count}\")", "import sys import graphyte import requests def get_orgs(): with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if", "org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs)", "resp: if resp.status_code != 200: print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"]", "count) def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs) # Sort", "report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for org in orgs) # Sort and print", "print by descending order of tasks completed counts = sorted(counts, key=lambda x: x[1],", "orgs: name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts =", "name_base = f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts = ((org[\"name\"],", "for org, count in counts: print(f\"{org}: {count}\") def main(): orgs = get_orgs() report_console(orgs)", "resp.status_code != 200: print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def report_graphite(orgs):", "if resp.status_code != 200: print(f\"Received status code {resp.status_code}: {resp.text}\") exit(1) return resp.json()[\"results\"] def", "with requests.get(\"https://codein.withgoogle.com/api/program/current/organization/\") as resp: if resp.status_code != 200: print(f\"Received status code {resp.status_code}: {resp.text}\")", "# Sort and print by descending order of tasks completed counts = sorted(counts,", "f\"gci.{org['program_year']}.{org['slug']}\" count = org[\"completed_task_instance_count\"] graphyte.send(f\"{name_base}.tasks_completed\", count) def report_console(orgs): counts = ((org[\"name\"], org[\"completed_task_instance_count\"]) for", "tasks completed counts = sorted(counts, key=lambda x: x[1], reverse=True) for org, count in" ]
[ "t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType = t.Generator[T, R, None] FuncType = t.Callable[[], GenType]", "self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self) ->", "R, None] FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length:", "import inspect import threading import typing as t from collections import deque from", "self.__read_event.clear() if is_exc is None: await self.close() raise StopAsyncIteration(*item.args) from item elif is_exc:", "asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor = executor self.__closed = threading.Event() self.__close_event =", "if self.__gen_task is None: return if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather(", "import Statistic T = t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType = t.Generator[T, R, None]", "StopIteration as e: if self.closed: return self.__queue.append((e, None)) self._set_read_event() except Exception as e:", "self._statistic.started += 1 try: gen = iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen): throw", "item elif is_exc: await self.close() raise item from item self._statistic.yielded += 1 return", "typing import Awaitable from weakref import finalize from aiomisc.counters import Statistic T =", "t.Awaitable[T]: while len(self.__queue) == 0: await self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set() if", "asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def closed(self) -> bool: return", "def __aenter__(self) -> \"IteratorWrapper\": return self async def __aexit__( self, exc_type: t.Any, exc_val:", "self.close() class IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator", "self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) -> None: self.__closed.set() self.__queue.clear() if self.__gen_task is None:", "len(self.__queue) == 0: await self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue) ==", "asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is not", "\"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def __init__( self, gen_func: FuncType, loop:", "1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e: if self.closed: return self.__queue.append((e,", "current_loop = loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor = executor self.__closed", "max_size: int = 0, executor: Executor = None, statistic_name: t.Optional[str] = None, ):", "= loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor = executor self.__closed =", "None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started += 1", "self, exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any, ) -> None: if self.closed: return", "self.__closed.is_set() @staticmethod def __throw(_: t.Any) -> t.NoReturn: pass def _set_read_event(self) -> None: def", "t.Deque[t.Any] = deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable =", "return await self.close() class IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None],", "class IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length: int yielded: int enqueued: int class", "__aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is not None: return self self.__gen_task = self.loop.create_task(self._run())", "None, statistic_name: t.Optional[str] = None, ): current_loop = loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop", "-> None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started +=", "def __anext__(self) -> t.Awaitable[T]: while len(self.__queue) == 0: await self.__read_event.wait() item, is_exc =", "= max_size @property def closed(self) -> bool: return self.__closed.is_set() @staticmethod def __throw(_: t.Any)", "threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task]", "@property def closed(self) -> bool: return self.__closed.is_set() @staticmethod def __throw(_: t.Any) -> t.NoReturn:", "\"_statistic\", ) def __init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop = None, max_size: int", "self.close() raise StopAsyncIteration(*item.args) from item elif is_exc: await self.close() raise item from item", "def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is not None: return self self.__gen_task =", "gen_func: FuncType, loop: asyncio.AbstractEventLoop = None, max_size: int = 0, executor: Executor =", "return self async def __aexit__( self, exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any, )", "self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def closed(self) -> bool: return self.__closed.is_set()", "= IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def closed(self) -> bool: return self.__closed.is_set() @staticmethod", "self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e: if self.closed: return self.__queue.append((e, None))", "iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator = iterator finalize(self, finalizer) def __anext__(self)", "self.loop: asyncio.AbstractEventLoop = current_loop self.executor = executor self.__closed = threading.Event() self.__close_event = asyncio.Event()", "raise item from item self._statistic.yielded += 1 return item async def __aenter__(self) ->", "return self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None: self.__closed.set()", "= iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen): throw = gen.throw # type: ignore", "_run(self) -> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) -> None: self.__closed.set()", "def _set_read_event(self) -> None: def setter() -> None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter)", "self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]: while len(self.__queue) == 0: await self.__read_event.wait() item,", "asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func:", "= threading.Event() self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def", "= asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def closed(self) -> bool:", "self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item self._statistic.enqueued += 1 self._set_read_event()", "int queue_length: int yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\",", "\"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def __init__( self,", "import Awaitable from weakref import finalize from aiomisc.counters import Statistic T = t.TypeVar(\"T\")", "if inspect.isgenerator(gen): throw = gen.throw # type: ignore while not self.closed: item =", "self.executor = executor self.__closed = threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] = deque()", "is None: return if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop,", "self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] =", "t.Callable[[], None], ): self.__iterator = iterator finalize(self, finalizer) def __anext__(self) -> Awaitable[t.Any]: return", "int class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\",", "await self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear() if", "executor: Executor = None, statistic_name: t.Optional[str] = None, ): current_loop = loop or", "= self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async def", "asyncio import inspect import threading import typing as t from collections import deque", "threading.Event() self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def closed(self)", "self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e: if self.closed: return self.__queue.append((e, None)) self._set_read_event() except", "loop=self.loop, return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is not None: return", "\"executor\", \"loop\", \"_statistic\", ) def __init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop = None,", "iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen): throw = gen.throw # type: ignore while", "except StopIteration as e: if self.closed: return self.__queue.append((e, None)) self._set_read_event() except Exception as", "or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor = executor self.__closed = threading.Event() self.__close_event", "None self.__gen_func: t.Callable = gen_func self.__write_event = threading.Event() self.__read_event = asyncio.Event() self._statistic =", "self.__closed.set() self.__queue.clear() if self.__gen_task is None: return if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait()", "\"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def __init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop", "-= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread)", "pass def _set_read_event(self) -> None: def setter() -> None: if self.__read_event.is_set(): return self.__read_event.set()", "return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started += 1 try: gen =", "1 try: gen = iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen): throw = gen.throw", "self, gen_func: FuncType, loop: asyncio.AbstractEventLoop = None, max_size: int = 0, executor: Executor", "IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\",", "def setter() -> None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None:", "if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started += 1 try:", "StopAsyncIteration(*item.args) from item elif is_exc: await self.close() raise item from item self._statistic.yielded +=", "close(self) -> None: self.__closed.set() self.__queue.clear() if self.__gen_task is None: return if not self.__gen_task.done():", "if self.closed: return await self.close() class IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator, finalizer:", "= gen_func self.__write_event = threading.Event() self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size =", "finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any: return await", "Executor = None, statistic_name: t.Optional[str] = None, ): current_loop = loop or asyncio.get_event_loop()", "R = t.TypeVar(\"R\") GenType = t.Generator[T, R, None] FuncType = t.Callable[[], GenType] class", "self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear() if is_exc is None: await self.close() raise", "-> t.AsyncIterator[t.Any]: if self.__gen_task is not None: return self self.__gen_task = self.loop.create_task(self._run()) return", "= t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType = t.Generator[T, R, None] FuncType = t.Callable[[],", "= 0, executor: Executor = None, statistic_name: t.Optional[str] = None, ): current_loop =", "asyncio.AbstractEventLoop = None, max_size: int = 0, executor: Executor = None, statistic_name: t.Optional[str]", "item = next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item,", "started: int queue_size: int queue_length: int yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__", "return self.__closed.is_set() @staticmethod def __throw(_: t.Any) -> t.NoReturn: pass def _set_read_event(self) -> None:", "None, ): current_loop = loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor =", "if is_exc is None: await self.close() raise StopAsyncIteration(*item.args) from item elif is_exc: await", "import threading import typing as t from collections import deque from concurrent.futures import", "# type: ignore while not self.closed: item = next(gen) while len(self.__queue) > self.__queue_maxsize:", "deque from concurrent.futures import Executor from typing import Awaitable from weakref import finalize", "t.Callable = gen_func self.__write_event = threading.Event() self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size", "if self.__gen_task is not None: return self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer)", "while len(self.__queue) == 0: await self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue)", "= None, statistic_name: t.Optional[str] = None, ): current_loop = loop or asyncio.get_event_loop() self.loop:", "if self.closed: return self.__queue.append((e, None)) self._set_read_event() except Exception as e: if self.closed: return", "Exception as e: if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1", "\"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def __init__(", "-> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) -> None: self.__closed.set() self.__queue.clear()", "( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", )", "import Executor from typing import Awaitable from weakref import finalize from aiomisc.counters import", "import finalize from aiomisc.counters import Statistic T = t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType", "\"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def", "= t.TypeVar(\"R\") GenType = t.Generator[T, R, None] FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic):", "import typing as t from collections import deque from concurrent.futures import Executor from", "IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length: int yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator):", "finalizer: t.Callable[[], None], ): self.__iterator = iterator finalize(self, finalizer) def __anext__(self) -> Awaitable[t.Any]:", "import deque from concurrent.futures import Executor from typing import Awaitable from weakref import", "bool: return self.__closed.is_set() @staticmethod def __throw(_: t.Any) -> t.NoReturn: pass def _set_read_event(self) ->", "await self.close() raise StopAsyncIteration(*item.args) from item elif is_exc: await self.close() raise item from", "item async def __aenter__(self) -> \"IteratorWrapper\": return self async def __aexit__( self, exc_type:", "self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]: while len(self.__queue) == 0: await self.__read_event.wait()", "as t from collections import deque from concurrent.futures import Executor from typing import", "self.__finalizer) def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]: while", "1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async", "throw = self.__throw if inspect.isgenerator(gen): throw = gen.throw # type: ignore while not", "self async def __aexit__( self, exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any, ) ->", "== 0: await self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0:", "not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self)", "class IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator =", "item from item self._statistic.yielded += 1 return item async def __aenter__(self) -> \"IteratorWrapper\":", "= t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length: int yielded: int", "len(self.__queue) == 0: self.__read_event.clear() if is_exc is None: await self.close() raise StopAsyncIteration(*item.args) from", "item self._statistic.yielded += 1 return item async def __aenter__(self) -> \"IteratorWrapper\": return self", "None)) self._set_read_event() except Exception as e: if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally:", "True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any:", "statistic_name: t.Optional[str] = None, ): current_loop = loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop =", "return IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) ->", "if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item self._statistic.enqueued += 1 self._set_read_event() if", "self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self)", "self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started += 1 try: gen = iter(self.__gen_func())", "-> None: self._statistic.started += 1 try: gen = iter(self.__gen_func()) throw = self.__throw if", "t.NoReturn: pass def _set_read_event(self) -> None: def setter() -> None: if self.__read_event.is_set(): return", "\"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def __init__( self, gen_func: FuncType,", "is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear() if is_exc is None:", "from concurrent.futures import Executor from typing import Awaitable from weakref import finalize from", "= current_loop self.executor = executor self.__closed = threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any]", "def close(self) -> None: self.__closed.set() self.__queue.clear() if self.__gen_task is None: return if not", "t.Optional[asyncio.Task] = None self.__gen_func: t.Callable = gen_func self.__write_event = threading.Event() self.__read_event = asyncio.Event()", "async def __aenter__(self) -> \"IteratorWrapper\": return self async def __aexit__( self, exc_type: t.Any,", "GenType = t.Generator[T, R, None] FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int", "IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]:", "__anext__(self) -> t.Awaitable[T]: while len(self.__queue) == 0: await self.__read_event.wait() item, is_exc = self.__queue.popleft()", "await self.close() class IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ):", "-> None: if self.closed: return await self.close() class IteratorProxy(t.AsyncIterator): def __init__( self, iterator:", "self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def closed(self) ->", "gen = iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen): throw = gen.throw # type:", "= executor self.__closed = threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize", "gen_func self.__write_event = threading.Event() self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size", "item, is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear() if is_exc is", "self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started += 1 try: gen", "self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any: return await self.loop.run_in_executor(self.executor,", "inspect import threading import typing as t from collections import deque from concurrent.futures", "0: self.__read_event.clear() if is_exc is None: await self.close() raise StopAsyncIteration(*item.args) from item elif", "exc_val: t.Any, exc_tb: t.Any, ) -> None: if self.closed: return await self.close() class", "def __init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator = iterator finalize(self,", "None, max_size: int = 0, executor: Executor = None, statistic_name: t.Optional[str] = None,", "self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async", "None: self._statistic.started += 1 try: gen = iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen):", "None: return if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True,", "is_exc: await self.close() raise item from item self._statistic.yielded += 1 return item async", "not self.closed: item = next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError())", "enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\",", "1 return item async def __aenter__(self) -> \"IteratorWrapper\": return self async def __aexit__(", "executor self.__closed = threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize =", "self._statistic.yielded += 1 return item async def __aenter__(self) -> \"IteratorWrapper\": return self async", "t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) -> None: self.__closed.set() self.__queue.clear() if", "= t.Generator[T, R, None] FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size:", "weakref import finalize from aiomisc.counters import Statistic T = t.TypeVar(\"T\") R = t.TypeVar(\"R\")", "self.closed: return await self.close() class IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[],", "self._set_read_event() except Exception as e: if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started", "self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is not None:", "self.__queue.append((e, None)) self._set_read_event() except Exception as e: if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set)", "= None, max_size: int = 0, executor: Executor = None, statistic_name: t.Optional[str] =", "from item self._statistic.yielded += 1 return item async def __aenter__(self) -> \"IteratorWrapper\": return", "None: def setter() -> None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) ->", "\"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def __init__( self, gen_func:", "self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task", "None: if self.closed: return await self.close() class IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator,", "throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear()", ") def __init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop = None, max_size: int =", "t.Optional[str] = None, ): current_loop = loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop", "self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set():", "if len(self.__queue) == 0: self.__read_event.clear() if is_exc is None: await self.close() raise StopAsyncIteration(*item.args)", "as e: if self.closed: return self.__queue.append((e, None)) self._set_read_event() except Exception as e: if", "\"loop\", \"_statistic\", ) def __init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop = None, max_size:", "import asyncio import inspect import threading import typing as t from collections import", "from aiomisc.counters import Statistic T = t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType = t.Generator[T,", "self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close())", "self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable = gen_func self.__write_event = threading.Event() self.__read_event =", "try: gen = iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen): throw = gen.throw #", "__aenter__(self) -> \"IteratorWrapper\": return self async def __aexit__( self, exc_type: t.Any, exc_val: t.Any,", "self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator = iterator finalize(self, finalizer) def", "asyncio.AbstractEventLoop = current_loop self.executor = executor self.__closed = threading.Event() self.__close_event = asyncio.Event() self.__queue:", "max_size @property def closed(self) -> bool: return self.__closed.is_set() @staticmethod def __throw(_: t.Any) ->", "= threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize = max_size self.__gen_task:", "__init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator = iterator finalize(self, finalizer)", "self.close() raise item from item self._statistic.yielded += 1 return item async def __aenter__(self)", "return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is not None: return self", "ignore while not self.closed: item = next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if", "queue_length: int yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\",", "FuncType, loop: asyncio.AbstractEventLoop = None, max_size: int = 0, executor: Executor = None,", "not None: return self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self) ->", "= ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\", \"_statistic\",", "0: await self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear()", "self.closed: return self.__queue.append((e, None)) self._set_read_event() except Exception as e: if self.closed: return self.__queue.append((e,", "= self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear() if is_exc is None: await", "current_loop self.executor = executor self.__closed = threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] =", "Awaitable from weakref import finalize from aiomisc.counters import Statistic T = t.TypeVar(\"T\") R", "self.__queue.clear() if self.__gen_task is None: return if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await", "def __aexit__( self, exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any, ) -> None: if", "from typing import Awaitable from weakref import finalize from aiomisc.counters import Statistic T", "self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear() if is_exc", "aiomisc.counters import Statistic T = t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType = t.Generator[T, R,", "len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item self._statistic.enqueued", "from weakref import finalize from aiomisc.counters import Statistic T = t.TypeVar(\"T\") R =", "-> t.Awaitable[T]: while len(self.__queue) == 0: await self.__read_event.wait() item, is_exc = self.__queue.popleft() self.__write_event.set()", "if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def", "= None self.__gen_func: t.Callable = gen_func self.__write_event = threading.Event() self.__read_event = asyncio.Event() self._statistic", "None: await self.close() raise StopAsyncIteration(*item.args) from item elif is_exc: await self.close() raise item", "_in_thread(self) -> None: self._statistic.started += 1 try: gen = iter(self.__gen_func()) throw = self.__throw", "self.__gen_task is None: return if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task,", "self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e: if self.closed:", "= asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] = None", "= max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable = gen_func self.__write_event = threading.Event()", "> self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item self._statistic.enqueued +=", "= self.__throw if inspect.isgenerator(gen): throw = gen.throw # type: ignore while not self.closed:", "loop: asyncio.AbstractEventLoop = None, max_size: int = 0, executor: Executor = None, statistic_name:", "t from collections import deque from concurrent.futures import Executor from typing import Awaitable", "self.__queue.popleft() self.__write_event.set() if len(self.__queue) == 0: self.__read_event.clear() if is_exc is None: await self.close()", "t.Any, exc_val: t.Any, exc_tb: t.Any, ) -> None: if self.closed: return await self.close()", ") -> None: if self.closed: return await self.close() class IteratorProxy(t.AsyncIterator): def __init__( self,", "<gh_stars>0 import asyncio import inspect import threading import typing as t from collections", "elif is_exc: await self.close() raise item from item self._statistic.yielded += 1 return item", "async def close(self) -> None: self.__closed.set() self.__queue.clear() if self.__gen_task is None: return if", "self.__write_event.clear() except StopIteration as e: if self.closed: return self.__queue.append((e, None)) self._set_read_event() except Exception", "is_exc is None: await self.close() raise StopAsyncIteration(*item.args) from item elif is_exc: await self.close()", "_set_read_event(self) -> None: def setter() -> None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def", "queue_size: int queue_length: int yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ = (", "setter() -> None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started", "t.Generator[T, R, None] FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size: int", "None: return self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None:", "self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable", "self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def", "= deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable = gen_func", "+= 1 return item async def __aenter__(self) -> \"IteratorWrapper\": return self async def", "def _run(self) -> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) -> None:", "self.__gen_task is not None: return self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def", "return await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) -> None: self.__closed.set() self.__queue.clear() if self.__gen_task", "\"IteratorWrapper\": return self async def __aexit__( self, exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any,", "t.Any) -> t.NoReturn: pass def _set_read_event(self) -> None: def setter() -> None: if", "e: if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set)", "loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor = executor self.__closed = threading.Event()", "t.Any, exc_tb: t.Any, ) -> None: if self.closed: return await self.close() class IteratorProxy(t.AsyncIterator):", "Statistic T = t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType = t.Generator[T, R, None] FuncType", "inspect.isgenerator(gen): throw = gen.throw # type: ignore while not self.closed: item = next(gen)", "self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self)", "t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator = iterator finalize(self, finalizer) def __anext__(self) ->", "finalize from aiomisc.counters import Statistic T = t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType =", "__aexit__( self, exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any, ) -> None: if self.closed:", "self.__throw if inspect.isgenerator(gen): throw = gen.throw # type: ignore while not self.closed: item", "+= 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e: if self.closed: return", "= gen.throw # type: ignore while not self.closed: item = next(gen) while len(self.__queue)", "self._statistic.queue_size = max_size @property def closed(self) -> bool: return self.__closed.is_set() @staticmethod def __throw(_:", "= None, ): current_loop = loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor", "None: self.__closed.set() self.__queue.clear() if self.__gen_task is None: return if not self.__gen_task.done(): self.__gen_task.cancel() await", "is not None: return self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self, self.__finalizer) def __finalizer(self)", "IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property def closed(self) -> bool: return self.__closed.is_set() @staticmethod def", "self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async def", "T = t.TypeVar(\"T\") R = t.TypeVar(\"R\") GenType = t.Generator[T, R, None] FuncType =", "throw = gen.throw # type: ignore while not self.closed: item = next(gen) while", "int = 0, executor: Executor = None, statistic_name: t.Optional[str] = None, ): current_loop", "self.__closed = threading.Event() self.__close_event = asyncio.Event() self.__queue: t.Deque[t.Any] = deque() self.__queue_maxsize = max_size", "t.TypeVar(\"R\") GenType = t.Generator[T, R, None] FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started:", "int queue_size: int queue_length: int yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ =", "next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del", "== 0: self.__read_event.clear() if is_exc is None: await self.close() raise StopAsyncIteration(*item.args) from item", "as e: if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event()", "max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable = gen_func self.__write_event = threading.Event() self.__read_event", "+= 1 try: gen = iter(self.__gen_func()) throw = self.__throw if inspect.isgenerator(gen): throw =", "@staticmethod def __throw(_: t.Any) -> t.NoReturn: pass def _set_read_event(self) -> None: def setter()", "type: ignore while not self.closed: item = next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1)", "typing as t from collections import deque from concurrent.futures import Executor from typing", "t.Any, ) -> None: if self.closed: return await self.close() class IteratorProxy(t.AsyncIterator): def __init__(", "0, executor: Executor = None, statistic_name: t.Optional[str] = None, ): current_loop = loop", "is None: await self.close() raise StopAsyncIteration(*item.args) from item elif is_exc: await self.close() raise", "int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\",", "from collections import deque from concurrent.futures import Executor from typing import Awaitable from", "IteratorProxy(t.AsyncIterator): def __init__( self, iterator: t.AsyncIterator, finalizer: t.Callable[[], None], ): self.__iterator = iterator", "-> t.NoReturn: pass def _set_read_event(self) -> None: def setter() -> None: if self.__read_event.is_set():", "class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\",", "self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item self._statistic.enqueued += 1", "\"__write_event\", \"executor\", \"loop\", \"_statistic\", ) def __init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop =", "def closed(self) -> bool: return self.__closed.is_set() @staticmethod def __throw(_: t.Any) -> t.NoReturn: pass", "-> bool: return self.__closed.is_set() @staticmethod def __throw(_: t.Any) -> t.NoReturn: pass def _set_read_event(self)", "from item elif is_exc: await self.close() raise item from item self._statistic.yielded += 1", "def __throw(_: t.Any) -> t.NoReturn: pass def _set_read_event(self) -> None: def setter() ->", "return item async def __aenter__(self) -> \"IteratorWrapper\": return self async def __aexit__( self,", "__finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]: while len(self.__queue) ==", "): current_loop = loop or asyncio.get_event_loop() self.loop: asyncio.AbstractEventLoop = current_loop self.executor = executor", "while not self.closed: item = next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed:", "raise StopAsyncIteration(*item.args) from item elif is_exc: await self.close() raise item from item self._statistic.yielded", "while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False)) del item", "e: if self.closed: return self.__queue.append((e, None)) self._set_read_event() except Exception as e: if self.closed:", "if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e: if self.closed: return self.__queue.append((e, None)) self._set_read_event()", "t.AsyncIterator[t.Any]: if self.__gen_task is not None: return self self.__gen_task = self.loop.create_task(self._run()) return IteratorProxy(self,", "return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self)", "if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async", "await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) -> None: self.__closed.set() self.__queue.clear() if self.__gen_task is", "self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) ->", "del item self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e:", "return self.__queue.append((e, None)) self._set_read_event() except Exception as e: if self.closed: return self.__queue.append((e, True))", "except Exception as e: if self.closed: return self.__queue.append((e, True)) self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -=", "await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]: if", "async def __aexit__( self, exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any, ) -> None:", "def _in_thread(self) -> None: self._statistic.started += 1 try: gen = iter(self.__gen_func()) throw =", "__throw(_: t.Any) -> t.NoReturn: pass def _set_read_event(self) -> None: def setter() -> None:", "t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length: int yielded: int enqueued:", "False)) del item self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as", "Executor from typing import Awaitable from weakref import finalize from aiomisc.counters import Statistic", "item self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration as e: if", "concurrent.futures import Executor from typing import Awaitable from weakref import finalize from aiomisc.counters", "GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length: int yielded: int enqueued: int", "closed(self) -> bool: return self.__closed.is_set() @staticmethod def __throw(_: t.Any) -> t.NoReturn: pass def", "threading import typing as t from collections import deque from concurrent.futures import Executor", "None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]: while len(self.__queue) == 0: await", "None] FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length: int", "self.closed: item = next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return", "-> None: self.__closed.set() self.__queue.clear() if self.__gen_task is None: return if not self.__gen_task.done(): self.__gen_task.cancel()", "self._in_thread) async def close(self) -> None: self.__closed.set() self.__queue.clear() if self.__gen_task is None: return", "self.__queue.append((item, False)) del item self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except StopIteration", "collections import deque from concurrent.futures import Executor from typing import Awaitable from weakref", ") def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is not None: return self self.__gen_task", "self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable = gen_func self.__write_event =", "-> None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]: while len(self.__queue) == 0:", "async def _run(self) -> t.Any: return await self.loop.run_in_executor(self.executor, self._in_thread) async def close(self) ->", "self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]:", "-> \"IteratorWrapper\": return self async def __aexit__( self, exc_type: t.Any, exc_val: t.Any, exc_tb:", "def __init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop = None, max_size: int = 0,", "deque() self.__queue_maxsize = max_size self.__gen_task: t.Optional[asyncio.Task] = None self.__gen_func: t.Callable = gen_func self.__write_event", "-> None: def setter() -> None: if self.__read_event.is_set(): return self.__read_event.set() self.loop.call_soon_threadsafe(setter) def _in_thread(self)", "self.loop.call_soon_threadsafe(self.__read_event.set) finally: self._statistic.started -= 1 self._set_read_event() self.loop.call_soon_threadsafe(self.__close_event.set) async def _run(self) -> t.Any: return", "gen.throw # type: ignore while not self.closed: item = next(gen) while len(self.__queue) >", "= next(gen) while len(self.__queue) > self.__queue_maxsize: self.__write_event.wait(0.1) if self.closed: throw(asyncio.CancelledError()) return self.__queue.append((item, False))", "exc_tb: t.Any, ) -> None: if self.closed: return await self.close() class IteratorProxy(t.AsyncIterator): def", "def __finalizer(self) -> None: self.__closed.set() self.loop.create_task(self.close()) async def __anext__(self) -> t.Awaitable[T]: while len(self.__queue)", "await self.close() raise item from item self._statistic.yielded += 1 return item async def", "exc_type: t.Any, exc_val: t.Any, exc_tb: t.Any, ) -> None: if self.closed: return await", "async def __anext__(self) -> t.Awaitable[T]: while len(self.__queue) == 0: await self.__read_event.wait() item, is_exc", "None], ): self.__iterator = iterator finalize(self, finalizer) def __anext__(self) -> Awaitable[t.Any]: return self.__iterator.__anext__()", "FuncType = t.Callable[[], GenType] class IteratorWrapperStatistic(Statistic): started: int queue_size: int queue_length: int yielded:", "__init__( self, gen_func: FuncType, loop: asyncio.AbstractEventLoop = None, max_size: int = 0, executor:", "int yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\",", "yielded: int enqueued: int class IteratorWrapper(t.AsyncIterator): __slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\",", "self.__gen_func: t.Callable = gen_func self.__write_event = threading.Event() self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name)", "return self.__queue.append((item, False)) del item self._statistic.enqueued += 1 self._set_read_event() if self.__write_event.is_set(): self.__write_event.clear() except", "await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, ) def __aiter__(self) -> t.AsyncIterator[t.Any]: if self.__gen_task is", "__slots__ = ( \"__close_event\", \"__closed\", \"__gen_func\", \"__gen_task\", \"__queue\", \"__queue_maxsize\", \"__read_event\", \"__write_event\", \"executor\", \"loop\",", "self.__write_event = threading.Event() self.__read_event = asyncio.Event() self._statistic = IteratorWrapperStatistic(statistic_name) self._statistic.queue_size = max_size @property", "return if not self.__gen_task.done(): self.__gen_task.cancel() await self.__close_event.wait() await asyncio.gather( self.__gen_task, loop=self.loop, return_exceptions=True, )", "self.loop.call_soon_threadsafe(setter) def _in_thread(self) -> None: self._statistic.started += 1 try: gen = iter(self.__gen_func()) throw" ]
[ "hierarchical tree # ## Grouping clusters in bottom-up fashion np.random.seed(123) variables = ['X',", "ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' % labels)", "AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' % labels) ac =", "distance matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1', 'row", "fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos, y-pos, width, height cax = axm.matshow(df_rowclust, interpolation='nearest',", "import numpy as np import pandas as pd import matplotlib.pyplot as plt from", "from sklearn.cluster import AgglomerativeClustering # # Organizing clusters as a hierarchical tree #", "range(row_clusters.shape[0])]) # 3. correct approach: Input matrix row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters,", "approach: Squareform distance matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label", "yield different distance values although the overall clustering could be the same. #", "v1.5.1, please use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') # reorder data with respect", "< v1.5.1, please use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') # reorder data with", "1) # for i in range(row_clusters.shape[0])]) # 2. correct approach: Condensed distance matrix", "= axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() #", "range(row_clusters.shape[0])]) # 2. correct approach: Condensed distance matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete')", "= np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables, index=labels) print(df) # ## Performing hierarchical", "df = pd.DataFrame(X, columns=variables, index=labels) print(df) # ## Performing hierarchical clustering on a", "columns=['row label 1', 'row label 2', # 'distance', 'no. of items in clust.'],", "list(df_rowclust.index)) plt.show() # ## Applying agglomerative clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean',", "respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes spines from", "row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) # We can either pass a", "print(df) # ## Performing hierarchical clustering on a distance matrix row_dist = pd.DataFrame(squareform(pdist(df,", "1) for i in range(row_clusters.shape[0])]) # 3. correct approach: Input matrix row_clusters =", "# x-pos, y-pos, width, height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] +", "# note: for matplotlib < v1.5.1, please use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left')", "0.6]) # x-pos, y-pos, width, height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels(['']", "0.6, 0.6]) # x-pos, y-pos, width, height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax)", "pd.DataFrame(X, columns=variables, index=labels) print(df) # ## Performing hierarchical clustering on a distance matrix", "to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes spines from dendrogram", "a heat map # plot row dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white') axd", "3. correct approach: Input matrix row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label", "please use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') # reorder data with respect to", "the same. # 1. incorrect approach: Squareform distance matrix #row_clusters = linkage(row_dist, method='complete',", "in range(row_clusters.shape[0])]) # 2. correct approach: Condensed distance matrix row_clusters = linkage(pdist(df, metric='euclidean'),", "in axd.spines.values(): i.set_visible(False) # plot heatmap axm = fig.add_axes([0.23, 0.1, 0.6, 0.6]) #", "matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) # We can either pass", "of items in clust.'], index=['cluster %d' % (i + 1) for i in", "1) for i in range(row_clusters.shape[0])]) # make dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf", "% (i + 1) # for i in range(row_clusters.shape[0])]) # 2. correct approach:", "from scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster import", "= linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no.", "incorrect approach: Squareform distance matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row", "list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ## Applying agglomerative clustering via scikit-learn ac", "would yield different distance values although the overall clustering could be the same.", "df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes spines from dendrogram for i", "clust.'], index=['cluster %d' % (i + 1) for i in range(row_clusters.shape[0])]) # 3.", "# ## Applying agglomerative clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels", "y-pos, width, height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels(['']", "# make dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show()", "Condensed distance matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row", "'distance', 'no. of items in clust.'], # index=['cluster %d' % (i + 1)", "index=['cluster %d' % (i + 1) # for i in range(row_clusters.shape[0])]) # 2.", "['X', 'Y', 'Z'] labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5,", "row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance',", "dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ## Attaching dendrograms to", "different distance values although the overall clustering could be the same. # 1.", "cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show()", "fashion np.random.seed(123) variables = ['X', 'Y', 'Z'] labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3',", "can pass the \"original\" data array and define the `metric='euclidean'` argument in `linkage`.", "array and define the `metric='euclidean'` argument in `linkage`. However, we should not pass", "labels: %s' % labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster", "row dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2, 0.6])", "the \"original\" data array and define the `metric='euclidean'` argument in `linkage`. However, we", "axd.set_yticks([]) # remove axes spines from dendrogram for i in axd.spines.values(): i.set_visible(False) #", "method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1', 'row label 2', # 'distance', 'no.", "as pd import matplotlib.pyplot as plt from scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy", "labels = ac.fit_predict(X) print('Cluster labels: %s' % labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete')", "np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables, index=labels) print(df) # ## Performing hierarchical clustering", "pdist, squareform from scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster import AgglomerativeClustering # #", "clustering could be the same. # 1. incorrect approach: Squareform distance matrix #row_clusters", "distance matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row label", "+ list(df_rowclust.index)) plt.show() # ## Applying agglomerative clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3,", "axd.set_xticks([]) axd.set_yticks([]) # remove axes spines from dendrogram for i in axd.spines.values(): i.set_visible(False)", "distance') plt.show() # ## Attaching dendrograms to a heat map # plot row", "for i in axd.spines.values(): i.set_visible(False) # plot heatmap axm = fig.add_axes([0.23, 0.1, 0.6,", "argument in `linkage`. However, we should not pass the squareform distance matrix, which", "the `pdist` function, or we can pass the \"original\" data array and define", "1. incorrect approach: Squareform distance matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, #", "values although the overall clustering could be the same. # 1. incorrect approach:", "%d' % (i + 1) # for i in range(row_clusters.shape[0])]) # 2. correct", "Attaching dendrograms to a heat map # plot row dendrogram fig = plt.figure(figsize=(8,", "from dendrogram for i in axd.spines.values(): i.set_visible(False) # plot heatmap axm = fig.add_axes([0.23,", "i in axd.spines.values(): i.set_visible(False) # plot heatmap axm = fig.add_axes([0.23, 0.1, 0.6, 0.6])", "method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no. of items", "% labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s'", "# remove axes spines from dendrogram for i in axd.spines.values(): i.set_visible(False) # plot", "# plot heatmap axm = fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos, y-pos, width,", "clusters in bottom-up fashion np.random.seed(123) variables = ['X', 'Y', 'Z'] labels = ['ID_0',", "%s' % labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels:", "pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) # We can either pass a condensed distance", "clusters as a hierarchical tree # ## Grouping clusters in bottom-up fashion np.random.seed(123)", "from scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster import AgglomerativeClustering # # Organizing clusters", "plt from scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster", "linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1', 'row label 2', # 'distance',", "Applying agglomerative clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X)", "#pd.DataFrame(row_clusters, # columns=['row label 1', 'row label 2', # 'distance', 'no. of items", "as plt from scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy import linkage, dendrogram from", "import pdist, squareform from scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster import AgglomerativeClustering #", "Grouping clusters in bottom-up fashion np.random.seed(123) variables = ['X', 'Y', 'Z'] labels =", "% (i + 1) for i in range(row_clusters.shape[0])]) # 3. correct approach: Input", "'ID_1', 'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables, index=labels)", "ac.fit_predict(X) print('Cluster labels: %s' % labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels =", "2', 'distance', 'no. of items in clust.'], index=['cluster %d' % (i + 1)", "= dendrogram(row_clusters, orientation='left') # reorder data with respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]]", "sklearn.cluster import AgglomerativeClustering # # Organizing clusters as a hierarchical tree # ##", "pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no. of items in clust.'],", "be the same. # 1. incorrect approach: Squareform distance matrix #row_clusters = linkage(row_dist,", "to a heat map # plot row dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white')", "`pdist` function, or we can pass the \"original\" data array and define the", "\"original\" data array and define the `metric='euclidean'` argument in `linkage`. However, we should", "scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' %", "heatmap axm = fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos, y-pos, width, height cax", "axd = fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note: for matplotlib < v1.5.1, please", "fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ## Applying agglomerative clustering", "squareform distance matrix, which would yield different distance values although the overall clustering", "plt.show() # ## Attaching dendrograms to a heat map # plot row dendrogram", "(i + 1) # for i in range(row_clusters.shape[0])]) # 2. correct approach: Condensed", "print('Cluster labels: %s' % labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X)", "on a distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) # We", "plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note: for matplotlib", "'distance', 'no. of items in clust.'], index=['cluster %d' % (i + 1) for", "as a hierarchical tree # ## Grouping clusters in bottom-up fashion np.random.seed(123) variables", "i in range(row_clusters.shape[0])]) # make dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout()", "3])*10 df = pd.DataFrame(X, columns=variables, index=labels) print(df) # ## Performing hierarchical clustering on", "the overall clustering could be the same. # 1. incorrect approach: Squareform distance", "# for i in range(row_clusters.shape[0])]) # 2. correct approach: Condensed distance matrix row_clusters", "axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ## Applying agglomerative clustering via", "metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1', 'row label 2', # 'distance', 'no. of", "could be the same. # 1. incorrect approach: Squareform distance matrix #row_clusters =", "make dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() #", "items in clust.'], # index=['cluster %d' % (i + 1) # for i", "index=['cluster %d' % (i + 1) for i in range(row_clusters.shape[0])]) # make dendrogram", "label 2', 'distance', 'no. of items in clust.'], index=['cluster %d' % (i +", "although the overall clustering could be the same. # 1. incorrect approach: Squareform", "import pandas as pd import matplotlib.pyplot as plt from scipy.spatial.distance import pdist, squareform", "index=labels) print(row_dist) # We can either pass a condensed distance matrix (upper triangular)", "reorder data with respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove", "clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels:", "1', 'row label 2', 'distance', 'no. of items in clust.'], index=['cluster %d' %", "row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ## Attaching", "metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no. of items in", "heat map # plot row dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white') axd =", "print(row_dist) # We can either pass a condensed distance matrix (upper triangular) from", "numpy as np import pandas as pd import matplotlib.pyplot as plt from scipy.spatial.distance", "overall clustering could be the same. # 1. incorrect approach: Squareform distance matrix", "axes spines from dendrogram for i in axd.spines.values(): i.set_visible(False) # plot heatmap axm", "0.2, 0.6]) # note: for matplotlib < v1.5.1, please use orientation='right' row_dendr =", "axd.spines.values(): i.set_visible(False) # plot heatmap axm = fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos,", "bottom-up fashion np.random.seed(123) variables = ['X', 'Y', 'Z'] labels = ['ID_0', 'ID_1', 'ID_2',", "clust.'], # index=['cluster %d' % (i + 1) # for i in range(row_clusters.shape[0])])", "= fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos, y-pos, width, height cax = axm.matshow(df_rowclust,", "# We can either pass a condensed distance matrix (upper triangular) from the", "#row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1', 'row label 2',", "Organizing clusters as a hierarchical tree # ## Grouping clusters in bottom-up fashion", "dendrogram from sklearn.cluster import AgglomerativeClustering # # Organizing clusters as a hierarchical tree", "as np import pandas as pd import matplotlib.pyplot as plt from scipy.spatial.distance import", "'row label 2', # 'distance', 'no. of items in clust.'], # index=['cluster %d'", "plot heatmap axm = fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos, y-pos, width, height", "correct approach: Input matrix row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1',", "+ 1) # for i in range(row_clusters.shape[0])]) # 2. correct approach: Condensed distance", "in range(row_clusters.shape[0])]) # make dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean", "= ac.fit_predict(X) print('Cluster labels: %s' % labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels", "i in range(row_clusters.shape[0])]) # 3. correct approach: Input matrix row_clusters = linkage(df.values, method='complete',", "index=['cluster %d' % (i + 1) for i in range(row_clusters.shape[0])]) # 3. correct", "'Y', 'Z'] labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10", "agglomerative clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster", "linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no. of", "row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance',", "x-pos, y-pos, width, height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns))", "width, height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] +", "However, we should not pass the squareform distance matrix, which would yield different", "we can pass the \"original\" data array and define the `metric='euclidean'` argument in", "function, or we can pass the \"original\" data array and define the `metric='euclidean'`", "note: for matplotlib < v1.5.1, please use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') #", "We can either pass a condensed distance matrix (upper triangular) from the `pdist`", "triangular) from the `pdist` function, or we can pass the \"original\" data array", "# 'distance', 'no. of items in clust.'], # index=['cluster %d' % (i +", "cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ## Applying agglomerative", "% (i + 1) for i in range(row_clusters.shape[0])]) # make dendrogram row_dendr =", "'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables, index=labels) print(df)", "+ list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ## Applying agglomerative clustering via scikit-learn", "= dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ## Attaching dendrograms", "plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ## Attaching dendrograms to a heat map #", "matrix row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2',", "pass the \"original\" data array and define the `metric='euclidean'` argument in `linkage`. However,", "height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index))", "in clust.'], index=['cluster %d' % (i + 1) for i in range(row_clusters.shape[0])]) #", "in clust.'], # index=['cluster %d' % (i + 1) # for i in", "squareform from scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster import AgglomerativeClustering # # Organizing", "Squareform distance matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1',", "distance values although the overall clustering could be the same. # 1. incorrect", ") plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ## Attaching dendrograms to a heat map", "%d' % (i + 1) for i in range(row_clusters.shape[0])]) # make dendrogram row_dendr", "columns=['row label 1', 'row label 2', 'distance', 'no. of items in clust.'], index=['cluster", "should not pass the squareform distance matrix, which would yield different distance values", "affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' % labels) ac = AgglomerativeClustering(n_clusters=2,", "= linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no.", "# 2. correct approach: Condensed distance matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters,", "AgglomerativeClustering # # Organizing clusters as a hierarchical tree # ## Grouping clusters", "dendrogram for i in axd.spines.values(): i.set_visible(False) # plot heatmap axm = fig.add_axes([0.23, 0.1,", "import linkage, dendrogram from sklearn.cluster import AgglomerativeClustering # # Organizing clusters as a", "plt.ylabel('Euclidean distance') plt.show() # ## Attaching dendrograms to a heat map # plot", "in `linkage`. However, we should not pass the squareform distance matrix, which would", "0.1, 0.6, 0.6]) # x-pos, y-pos, width, height cax = axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r')", "8), facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note: for matplotlib <", "= ['X', 'Y', 'Z'] labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X =", "of items in clust.'], # index=['cluster %d' % (i + 1) # for", "a condensed distance matrix (upper triangular) from the `pdist` function, or we can", "'row label 2', 'distance', 'no. of items in clust.'], index=['cluster %d' % (i", "import AgglomerativeClustering # # Organizing clusters as a hierarchical tree # ## Grouping", "= fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note: for matplotlib < v1.5.1, please use", "axm.matshow(df_rowclust, interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ##", "approach: Input matrix row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row", "for i in range(row_clusters.shape[0])]) # 3. correct approach: Input matrix row_clusters = linkage(df.values,", "same. # 1. incorrect approach: Squareform distance matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean')", "plt.show() # ## Applying agglomerative clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete')", "# ## Grouping clusters in bottom-up fashion np.random.seed(123) variables = ['X', 'Y', 'Z']", "distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) # We can either", "we should not pass the squareform distance matrix, which would yield different distance", "# reorder data with respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) #", "= linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1', 'row label 2', #", "define the `metric='euclidean'` argument in `linkage`. However, we should not pass the squareform", "# ## Performing hierarchical clustering on a distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')),", "# columns=['row label 1', 'row label 2', # 'distance', 'no. of items in", "(i + 1) for i in range(row_clusters.shape[0])]) # 3. correct approach: Input matrix", "# # Organizing clusters as a hierarchical tree # ## Grouping clusters in", "via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s'", "metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no. of items", "(upper triangular) from the `pdist` function, or we can pass the \"original\" data", "['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables,", "in bottom-up fashion np.random.seed(123) variables = ['X', 'Y', 'Z'] labels = ['ID_0', 'ID_1',", "can either pass a condensed distance matrix (upper triangular) from the `pdist` function,", "clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes spines from dendrogram for", "matplotlib.pyplot as plt from scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy import linkage, dendrogram", "fig = plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note:", "linkage, dendrogram from sklearn.cluster import AgglomerativeClustering # # Organizing clusters as a hierarchical", "orientation='left') # reorder data with respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([])", "matrix (upper triangular) from the `pdist` function, or we can pass the \"original\"", "a hierarchical tree # ## Grouping clusters in bottom-up fashion np.random.seed(123) variables =", "pd import matplotlib.pyplot as plt from scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy import", "fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note: for matplotlib < v1.5.1, please use orientation='right'", "## Attaching dendrograms to a heat map # plot row dendrogram fig =", "either pass a condensed distance matrix (upper triangular) from the `pdist` function, or", "items in clust.'], index=['cluster %d' % (i + 1) for i in range(row_clusters.shape[0])])", "interpolation='nearest', cmap='hot_r') fig.colorbar(cax) axm.set_xticklabels([''] + list(df_rowclust.columns)) axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ## Applying", "+ 1) for i in range(row_clusters.shape[0])]) # make dendrogram row_dendr = dendrogram(row_clusters, labels=labels,", "np import pandas as pd import matplotlib.pyplot as plt from scipy.spatial.distance import pdist,", "pandas as pd import matplotlib.pyplot as plt from scipy.spatial.distance import pdist, squareform from", "= pd.DataFrame(X, columns=variables, index=labels) print(df) # ## Performing hierarchical clustering on a distance", "pass the squareform distance matrix, which would yield different distance values although the", "`metric='euclidean'` argument in `linkage`. However, we should not pass the squareform distance matrix,", "in range(row_clusters.shape[0])]) # 3. correct approach: Input matrix row_clusters = linkage(df.values, method='complete', metric='euclidean')", "scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster import AgglomerativeClustering", "= plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note: for", "with respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes spines", "# 3. correct approach: Input matrix row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row", "labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' %", "+ 1) for i in range(row_clusters.shape[0])]) # 3. correct approach: Input matrix row_clusters", "## Applying agglomerative clustering via scikit-learn ac = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels =", "matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2',", "approach: Condensed distance matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label 1',", "tree # ## Grouping clusters in bottom-up fashion np.random.seed(123) variables = ['X', 'Y',", "label 1', 'row label 2', 'distance', 'no. of items in clust.'], index=['cluster %d'", "# 1. incorrect approach: Squareform distance matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters,", "dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ##", "color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ## Attaching dendrograms to a heat", "for i in range(row_clusters.shape[0])]) # make dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf )", "matrix #row_clusters = linkage(row_dist, method='complete', metric='euclidean') #pd.DataFrame(row_clusters, # columns=['row label 1', 'row label", "%d' % (i + 1) for i in range(row_clusters.shape[0])]) # 3. correct approach:", "data array and define the `metric='euclidean'` argument in `linkage`. However, we should not", "np.random.seed(123) variables = ['X', 'Y', 'Z'] labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4']", "'ID_4'] X = np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables, index=labels) print(df) # ##", "`linkage`. However, we should not pass the squareform distance matrix, which would yield", "axm.set_yticklabels([''] + list(df_rowclust.index)) plt.show() # ## Applying agglomerative clustering via scikit-learn ac =", "clustering on a distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) #", "remove axes spines from dendrogram for i in axd.spines.values(): i.set_visible(False) # plot heatmap", "label 2', # 'distance', 'no. of items in clust.'], # index=['cluster %d' %", "import matplotlib.pyplot as plt from scipy.spatial.distance import pdist, squareform from scipy.cluster.hierarchy import linkage,", "'Z'] labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10 df", "'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables, index=labels) print(df) #", "= df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes spines from dendrogram for i in", "distance matrix, which would yield different distance values although the overall clustering could", "method='complete') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no. of items in", "map # plot row dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09,", "2. correct approach: Condensed distance matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row", "which would yield different distance values although the overall clustering could be the", "# plot row dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09, 0.1,", "plot row dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2,", "clust.'], index=['cluster %d' % (i + 1) for i in range(row_clusters.shape[0])]) # make", "0.1, 0.2, 0.6]) # note: for matplotlib < v1.5.1, please use orientation='right' row_dendr", "pass a condensed distance matrix (upper triangular) from the `pdist` function, or we", "1', 'row label 2', # 'distance', 'no. of items in clust.'], # index=['cluster", "the `metric='euclidean'` argument in `linkage`. However, we should not pass the squareform distance", "X = np.random.random_sample([5, 3])*10 df = pd.DataFrame(X, columns=variables, index=labels) print(df) # ## Performing", "range(row_clusters.shape[0])]) # make dendrogram row_dendr = dendrogram(row_clusters, labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance')", "distance matrix (upper triangular) from the `pdist` function, or we can pass the", "or we can pass the \"original\" data array and define the `metric='euclidean'` argument", "2', # 'distance', 'no. of items in clust.'], # index=['cluster %d' % (i", "0.6]) # note: for matplotlib < v1.5.1, please use orientation='right' row_dendr = dendrogram(row_clusters,", "correct approach: Condensed distance matrix row_clusters = linkage(pdist(df, metric='euclidean'), method='complete') pd.DataFrame(row_clusters, columns=['row label", "labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10 df =", "hierarchical clustering on a distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist)", "not pass the squareform distance matrix, which would yield different distance values although", "# ## Attaching dendrograms to a heat map # plot row dendrogram fig", "data with respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes", "Performing hierarchical clustering on a distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels)", "= pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) # We can either pass a condensed", "dendrogram fig = plt.figure(figsize=(8, 8), facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2, 0.6]) #", "for matplotlib < v1.5.1, please use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') # reorder", "# Organizing clusters as a hierarchical tree # ## Grouping clusters in bottom-up", "metric='euclidean')), columns=labels, index=labels) print(row_dist) # We can either pass a condensed distance matrix", "i in range(row_clusters.shape[0])]) # 2. correct approach: Condensed distance matrix row_clusters = linkage(pdist(df,", "Input matrix row_clusters = linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row label", "df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([]) axd.set_yticks([]) # remove axes spines from dendrogram for i in axd.spines.values():", "from the `pdist` function, or we can pass the \"original\" data array and", "label 1', 'row label 2', # 'distance', 'no. of items in clust.'], #", "orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') # reorder data with respect to clustering df_rowclust", "matplotlib < v1.5.1, please use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') # reorder data", "'no. of items in clust.'], index=['cluster %d' % (i + 1) for i", "ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' % labels)", "axm = fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos, y-pos, width, height cax =", "variables = ['X', 'Y', 'Z'] labels = ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X", "dendrogram(row_clusters, orientation='left') # reorder data with respect to clustering df_rowclust = df.iloc[row_dendr['leaves'][::-1]] axd.set_xticks([])", "## Performing hierarchical clustering on a distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels,", "the squareform distance matrix, which would yield different distance values although the overall", "a distance matrix row_dist = pd.DataFrame(squareform(pdist(df, metric='euclidean')), columns=labels, index=labels) print(row_dist) # We can", "and define the `metric='euclidean'` argument in `linkage`. However, we should not pass the", "linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' % labels) ac = AgglomerativeClustering(n_clusters=2, affinity='euclidean',", "# index=['cluster %d' % (i + 1) # for i in range(row_clusters.shape[0])]) #", "dendrograms to a heat map # plot row dendrogram fig = plt.figure(figsize=(8, 8),", "use orientation='right' row_dendr = dendrogram(row_clusters, orientation='left') # reorder data with respect to clustering", "= AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='complete') labels = ac.fit_predict(X) print('Cluster labels: %s' % labels) ac", "= ['ID_0', 'ID_1', 'ID_2', 'ID_3', 'ID_4'] X = np.random.random_sample([5, 3])*10 df = pd.DataFrame(X,", "## Grouping clusters in bottom-up fashion np.random.seed(123) variables = ['X', 'Y', 'Z'] labels", "condensed distance matrix (upper triangular) from the `pdist` function, or we can pass", "index=labels) print(df) # ## Performing hierarchical clustering on a distance matrix row_dist =", "columns=labels, index=labels) print(row_dist) # We can either pass a condensed distance matrix (upper", "scipy.cluster.hierarchy import linkage, dendrogram from sklearn.cluster import AgglomerativeClustering # # Organizing clusters as", "linkage(df.values, method='complete', metric='euclidean') pd.DataFrame(row_clusters, columns=['row label 1', 'row label 2', 'distance', 'no. of", "(i + 1) for i in range(row_clusters.shape[0])]) # make dendrogram row_dendr = dendrogram(row_clusters,", "spines from dendrogram for i in axd.spines.values(): i.set_visible(False) # plot heatmap axm =", "matrix, which would yield different distance values although the overall clustering could be", "facecolor='white') axd = fig.add_axes([0.09, 0.1, 0.2, 0.6]) # note: for matplotlib < v1.5.1,", "'no. of items in clust.'], # index=['cluster %d' % (i + 1) #", "row_dendr = dendrogram(row_clusters, orientation='left') # reorder data with respect to clustering df_rowclust =", "for i in range(row_clusters.shape[0])]) # 2. correct approach: Condensed distance matrix row_clusters =", "labels=labels, color_threshold=np.inf ) plt.tight_layout() plt.ylabel('Euclidean distance') plt.show() # ## Attaching dendrograms to a", "i.set_visible(False) # plot heatmap axm = fig.add_axes([0.23, 0.1, 0.6, 0.6]) # x-pos, y-pos,", "columns=variables, index=labels) print(df) # ## Performing hierarchical clustering on a distance matrix row_dist" ]
[ "== 1: #if counts of each charcter is equal to 1 return character", "a Counter where it kept track of the # of times each character", "the first time that a non repeating charactter is in the string (s)", "string with underscore def first_not_repeating_character(s): counts = Counter(s) # a new counter, not", "= which means its an O(n) operation, S 0(n) - Storing in Counter", "not empty, contains string (s) #ex: Counter({'a': 3, 'b': 0, 'c': -3, 'd':", "Counter(s) # a new counter, not empty, contains string (s) #ex: Counter({'a': 3,", "return that first chracter - otherwise \"_\" if there is no instance of", "as dictionary values. #we want to return the first time that a non", "value (key,value) - Created a Counter where it kept track of the #", "as value (key,value) - Created a Counter where it kept track of the", "times where n is the number of unique character. find a o(1) situation", "elements as keys and their corresponding counts as value (key,value) - Created a", "of each charcter is equal to 1 return character #that's the input we", "counts[character] == 1: #if counts of each charcter is equal to 1 return", "#if none, return string with underscore def first_not_repeating_character(s): counts = Counter(s) # a", "where n is the number of unique characters Counter first goes through list", "string - checked if character appeared once, and if it did, i wanted", "# of times it appears - iterated through a given string - checked", "in the string (s) #if none, return string with underscore def first_not_repeating_character(s): counts", "to return that first chracter - otherwise \"_\" if there is no instance", "through each char n times = which means its an O(n) operation, S", "knew there was a library from collections called Counter - Counter stores elements", "even though 0(n),we can make it go through the list once. - not", "Counter({'a': 3, 'b': 0, 'c': -3, 'd': -6}) for character in s: #iterate", "- otherwise \"_\" if there is no instance of a non-repeating character \"\"\"", "collection where elements are stored as dictionary keys and # their counts are", "string if counts[character] == 1: #if counts of each charcter is equal to", "\"\"\" \"\"\" - i knew there was a library from collections called Counter", "where elements are stored as dictionary keys and # their counts are stored", "it kept track of the # of times each character appeared - key", "with underscore def first_not_repeating_character(s): counts = Counter(s) # a new counter, not empty,", "Counter - Counter stores elements as keys and their corresponding counts as value", "also goes through each char n times = which means its an O(n)", "a library from collections called Counter - Counter stores elements as keys and", "counts of each charcter is equal to 1 return character #that's the input", "the input we receive return \"_\" #else return None \"\"\" T 0(n) -", "(key,value) - Created a Counter where it kept track of the # of", "the number of unique characters Counter first goes through list o(n) + for", "#It is a collection where elements are stored as dictionary keys and #", "-6}) for character in s: #iterate through string if counts[character] == 1: #if", "===== even though 0(n),we can make it go through the list once. -", "wanted to return that first chracter - otherwise \"_\" if there is no", "corresponding counts as value (key,value) - Created a Counter where it kept track", "0, 'c': -3, 'd': -6}) for character in s: #iterate through string if", "\"_\" #else return None \"\"\" T 0(n) - where n is the number", "goes through list o(n) + for loop also goes through each char n", "times = which means its an O(n) operation, S 0(n) - Storing in", "- not possible \"\"\" \"\"\" - i knew there was a library from", "input we receive return \"_\" #else return None \"\"\" T 0(n) - where", "number of unique characters Counter first goes through list o(n) + for loop", "kept track of the # of times each character appeared - key =", "time that a non repeating charactter is in the string (s) #if none,", "as dictionary keys and # their counts are stored as dictionary values. #we", "stored as dictionary values. #we want to return the first time that a", "collections import Counter #dict subclass for counting hashable objects {(Key, value)} #It is", "Counter first goes through list o(n) + for loop also goes through each", "none, return string with underscore def first_not_repeating_character(s): counts = Counter(s) # a new", "#that's the input we receive return \"_\" #else return None \"\"\" T 0(n)", "in s: #iterate through string if counts[character] == 1: #if counts of each", "#else return None \"\"\" T 0(n) - where n is the number of", "an O(n) operation, S 0(n) - Storing in Counter n times where n", "in Counter n times where n is the number of unique character. find", "track of the # of times each character appeared - key = character,", "values. #we want to return the first time that a non repeating charactter", "value = # of times it appears - iterated through a given string", "hashable objects {(Key, value)} #It is a collection where elements are stored as", "counts = Counter(s) # a new counter, not empty, contains string (s) #ex:", "= # of times it appears - iterated through a given string -", "Counter n times where n is the number of unique character. find a", "is the number of unique characters Counter first goes through list o(n) +", "each charcter is equal to 1 return character #that's the input we receive", "1 return character #that's the input we receive return \"_\" #else return None", "number of unique character. find a o(1) situation ===== even though 0(n),we can", "there was a library from collections called Counter - Counter stores elements as", "objects {(Key, value)} #It is a collection where elements are stored as dictionary", "Counter stores elements as keys and their corresponding counts as value (key,value) -", "<reponame>jvillalp/Leetcodepractice<filename>CodeChallange.py/codeone.py from collections import Counter #dict subclass for counting hashable objects {(Key, value)}", "'b': 0, 'c': -3, 'd': -6}) for character in s: #iterate through string", "3, 'b': 0, 'c': -3, 'd': -6}) for character in s: #iterate through", "dictionary values. #we want to return the first time that a non repeating", "(s) #ex: Counter({'a': 3, 'b': 0, 'c': -3, 'd': -6}) for character in", "not possible \"\"\" \"\"\" - i knew there was a library from collections", "# of times each character appeared - key = character, value = #", "charactter is in the string (s) #if none, return string with underscore def", "string (s) #if none, return string with underscore def first_not_repeating_character(s): counts = Counter(s)", "-3, 'd': -6}) for character in s: #iterate through string if counts[character] ==", "first chracter - otherwise \"_\" if there is no instance of a non-repeating", "first goes through list o(n) + for loop also goes through each char", "counting hashable objects {(Key, value)} #It is a collection where elements are stored", "through the list once. - not possible \"\"\" \"\"\" - i knew there", "appeared once, and if it did, i wanted to return that first chracter", "= Counter(s) # a new counter, not empty, contains string (s) #ex: Counter({'a':", "if counts[character] == 1: #if counts of each charcter is equal to 1", "return None \"\"\" T 0(n) - where n is the number of unique", "of unique characters Counter first goes through list o(n) + for loop also", "operation, S 0(n) - Storing in Counter n times where n is the", "counter, not empty, contains string (s) #ex: Counter({'a': 3, 'b': 0, 'c': -3,", "though 0(n),we can make it go through the list once. - not possible", "keys and # their counts are stored as dictionary values. #we want to", "goes through each char n times = which means its an O(n) operation,", "unique character. find a o(1) situation ===== even though 0(n),we can make it", "first_not_repeating_character(s): counts = Counter(s) # a new counter, not empty, contains string (s)", "(s) #if none, return string with underscore def first_not_repeating_character(s): counts = Counter(s) #", "first time that a non repeating charactter is in the string (s) #if", "value)} #It is a collection where elements are stored as dictionary keys and", "#we want to return the first time that a non repeating charactter is", "- key = character, value = # of times it appears - iterated", "unique characters Counter first goes through list o(n) + for loop also goes", "it appears - iterated through a given string - checked if character appeared", "string (s) #ex: Counter({'a': 3, 'b': 0, 'c': -3, 'd': -6}) for character", "times each character appeared - key = character, value = # of times", "to return the first time that a non repeating charactter is in the", "iterated through a given string - checked if character appeared once, and if", "if it did, i wanted to return that first chracter - otherwise \"_\"", "from collections import Counter #dict subclass for counting hashable objects {(Key, value)} #It", "receive return \"_\" #else return None \"\"\" T 0(n) - where n is", "times it appears - iterated through a given string - checked if character", "that a non repeating charactter is in the string (s) #if none, return", "character #that's the input we receive return \"_\" #else return None \"\"\" T", "n times where n is the number of unique character. find a o(1)", "find a o(1) situation ===== even though 0(n),we can make it go through", "chracter - otherwise \"_\" if there is no instance of a non-repeating character", "underscore def first_not_repeating_character(s): counts = Counter(s) # a new counter, not empty, contains", "the number of unique character. find a o(1) situation ===== even though 0(n),we", "appeared - key = character, value = # of times it appears -", "of unique character. find a o(1) situation ===== even though 0(n),we can make", "# a new counter, not empty, contains string (s) #ex: Counter({'a': 3, 'b':", "Storing in Counter n times where n is the number of unique character.", "#ex: Counter({'a': 3, 'b': 0, 'c': -3, 'd': -6}) for character in s:", "return the first time that a non repeating charactter is in the string", "character appeared - key = character, value = # of times it appears", "0(n) - where n is the number of unique characters Counter first goes", "O(n) operation, S 0(n) - Storing in Counter n times where n is", "- Storing in Counter n times where n is the number of unique", "a collection where elements are stored as dictionary keys and # their counts", "stored as dictionary keys and # their counts are stored as dictionary values.", "and # their counts are stored as dictionary values. #we want to return", "'d': -6}) for character in s: #iterate through string if counts[character] == 1:", "#iterate through string if counts[character] == 1: #if counts of each charcter is", "called Counter - Counter stores elements as keys and their corresponding counts as", "go through the list once. - not possible \"\"\" \"\"\" - i knew", "i wanted to return that first chracter - otherwise \"_\" if there is", "= character, value = # of times it appears - iterated through a", "make it go through the list once. - not possible \"\"\" \"\"\" -", "counts are stored as dictionary values. #we want to return the first time", "repeating charactter is in the string (s) #if none, return string with underscore", "S 0(n) - Storing in Counter n times where n is the number", "did, i wanted to return that first chracter - otherwise \"_\" if there", "for character in s: #iterate through string if counts[character] == 1: #if counts", "want to return the first time that a non repeating charactter is in", "their counts are stored as dictionary values. #we want to return the first", "the # of times each character appeared - key = character, value =", "0(n),we can make it go through the list once. - not possible \"\"\"", "#if counts of each charcter is equal to 1 return character #that's the", "subclass for counting hashable objects {(Key, value)} #It is a collection where elements", "None \"\"\" T 0(n) - where n is the number of unique characters", "'c': -3, 'd': -6}) for character in s: #iterate through string if counts[character]", "return \"_\" #else return None \"\"\" T 0(n) - where n is the", "is a collection where elements are stored as dictionary keys and # their", "0(n) - Storing in Counter n times where n is the number of", "of the # of times each character appeared - key = character, value", "+ for loop also goes through each char n times = which means", "which means its an O(n) operation, S 0(n) - Storing in Counter n", "are stored as dictionary keys and # their counts are stored as dictionary", "T 0(n) - where n is the number of unique characters Counter first", "where n is the number of unique character. find a o(1) situation =====", "list o(n) + for loop also goes through each char n times =", "from collections called Counter - Counter stores elements as keys and their corresponding", "each character appeared - key = character, value = # of times it", "def first_not_repeating_character(s): counts = Counter(s) # a new counter, not empty, contains string", "through list o(n) + for loop also goes through each char n times", "it did, i wanted to return that first chracter - otherwise \"_\" if", "we receive return \"_\" #else return None \"\"\" T 0(n) - where n", "dictionary keys and # their counts are stored as dictionary values. #we want", "its an O(n) operation, S 0(n) - Storing in Counter n times where", "a given string - checked if character appeared once, and if it did,", "through a given string - checked if character appeared once, and if it", "are stored as dictionary values. #we want to return the first time that", "# their counts are stored as dictionary values. #we want to return the", "collections called Counter - Counter stores elements as keys and their corresponding counts", "through string if counts[character] == 1: #if counts of each charcter is equal", "library from collections called Counter - Counter stores elements as keys and their", "the string (s) #if none, return string with underscore def first_not_repeating_character(s): counts =", "once, and if it did, i wanted to return that first chracter -", "Created a Counter where it kept track of the # of times each", "if character appeared once, and if it did, i wanted to return that", "- checked if character appeared once, and if it did, i wanted to", "where it kept track of the # of times each character appeared -", "was a library from collections called Counter - Counter stores elements as keys", "o(n) + for loop also goes through each char n times = which", "as keys and their corresponding counts as value (key,value) - Created a Counter", "a non repeating charactter is in the string (s) #if none, return string", "- Created a Counter where it kept track of the # of times", "- i knew there was a library from collections called Counter - Counter", "a new counter, not empty, contains string (s) #ex: Counter({'a': 3, 'b': 0,", "o(1) situation ===== even though 0(n),we can make it go through the list", "non repeating charactter is in the string (s) #if none, return string with", "return string with underscore def first_not_repeating_character(s): counts = Counter(s) # a new counter,", "the list once. - not possible \"\"\" \"\"\" - i knew there was", "- Counter stores elements as keys and their corresponding counts as value (key,value)", "import Counter #dict subclass for counting hashable objects {(Key, value)} #It is a", "each char n times = which means its an O(n) operation, S 0(n)", "\"\"\" T 0(n) - where n is the number of unique characters Counter", "means its an O(n) operation, S 0(n) - Storing in Counter n times", "character in s: #iterate through string if counts[character] == 1: #if counts of", "- where n is the number of unique characters Counter first goes through", "is the number of unique character. find a o(1) situation ===== even though", "Counter where it kept track of the # of times each character appeared", "contains string (s) #ex: Counter({'a': 3, 'b': 0, 'c': -3, 'd': -6}) for", "keys and their corresponding counts as value (key,value) - Created a Counter where", "that first chracter - otherwise \"_\" if there is no instance of a", "elements are stored as dictionary keys and # their counts are stored as", "empty, contains string (s) #ex: Counter({'a': 3, 'b': 0, 'c': -3, 'd': -6})", "is equal to 1 return character #that's the input we receive return \"_\"", "return character #that's the input we receive return \"_\" #else return None \"\"\"", "once. - not possible \"\"\" \"\"\" - i knew there was a library", "new counter, not empty, contains string (s) #ex: Counter({'a': 3, 'b': 0, 'c':", "\"\"\" - i knew there was a library from collections called Counter -", "given string - checked if character appeared once, and if it did, i", "can make it go through the list once. - not possible \"\"\" \"\"\"", "char n times = which means its an O(n) operation, S 0(n) -", "character, value = # of times it appears - iterated through a given", "characters Counter first goes through list o(n) + for loop also goes through", "key = character, value = # of times it appears - iterated through", "character. find a o(1) situation ===== even though 0(n),we can make it go", "their corresponding counts as value (key,value) - Created a Counter where it kept", "n is the number of unique character. find a o(1) situation ===== even", "list once. - not possible \"\"\" \"\"\" - i knew there was a", "and if it did, i wanted to return that first chracter - otherwise", "- iterated through a given string - checked if character appeared once, and", "checked if character appeared once, and if it did, i wanted to return", "1: #if counts of each charcter is equal to 1 return character #that's", "to 1 return character #that's the input we receive return \"_\" #else return", "counts as value (key,value) - Created a Counter where it kept track of", "it go through the list once. - not possible \"\"\" \"\"\" - i", "a o(1) situation ===== even though 0(n),we can make it go through the", "equal to 1 return character #that's the input we receive return \"_\" #else", "loop also goes through each char n times = which means its an", "#dict subclass for counting hashable objects {(Key, value)} #It is a collection where", "character appeared once, and if it did, i wanted to return that first", "situation ===== even though 0(n),we can make it go through the list once.", "appears - iterated through a given string - checked if character appeared once,", "Counter #dict subclass for counting hashable objects {(Key, value)} #It is a collection", "n times = which means its an O(n) operation, S 0(n) - Storing", "stores elements as keys and their corresponding counts as value (key,value) - Created", "s: #iterate through string if counts[character] == 1: #if counts of each charcter", "of times it appears - iterated through a given string - checked if", "for counting hashable objects {(Key, value)} #It is a collection where elements are", "i knew there was a library from collections called Counter - Counter stores", "{(Key, value)} #It is a collection where elements are stored as dictionary keys", "for loop also goes through each char n times = which means its", "is in the string (s) #if none, return string with underscore def first_not_repeating_character(s):", "of times each character appeared - key = character, value = # of", "charcter is equal to 1 return character #that's the input we receive return", "n is the number of unique characters Counter first goes through list o(n)", "and their corresponding counts as value (key,value) - Created a Counter where it", "possible \"\"\" \"\"\" - i knew there was a library from collections called" ]
[ "the # first function in __all__ __all__ = [\"main\"] def say_hello(name): print(\"Hello \"", "\"\"\"My hello world application\"\"\" args = sys.argv if len(args) > 1: name =", "= \"0.1.1\" # This will set the version __license__ = \"MIT\" # This", "print_function import sys __version__ = \"0.1.1\" # This will set the version __license__", "args = sys.argv if len(args) > 1: name = args[1] else: name =", "first function in __all__ __all__ = [\"main\"] def say_hello(name): print(\"Hello \" + name", "+ \"!\") def main(): \"\"\"My hello world application\"\"\" args = sys.argv if len(args)", "from __future__ import print_function import sys __version__ = \"0.1.1\" # This will set", "[\"main\"] def say_hello(name): print(\"Hello \" + name + \"!\") def main(): \"\"\"My hello", "This will set the version __license__ = \"MIT\" # This sets the license", "will set the version __license__ = \"MIT\" # This sets the license #", "the docstring of the # first function in __all__ __all__ = [\"main\"] def", "= \"MIT\" # This sets the license # The package description comes from", "sys __version__ = \"0.1.1\" # This will set the version __license__ = \"MIT\"", "version __license__ = \"MIT\" # This sets the license # The package description", "This sets the license # The package description comes from the docstring of", "\"!\") def main(): \"\"\"My hello world application\"\"\" args = sys.argv if len(args) >", "\" + name + \"!\") def main(): \"\"\"My hello world application\"\"\" args =", "= sys.argv if len(args) > 1: name = args[1] else: name = \"world\"", "license # The package description comes from the docstring of the # first", "import sys __version__ = \"0.1.1\" # This will set the version __license__ =", "set the version __license__ = \"MIT\" # This sets the license # The", "\"0.1.1\" # This will set the version __license__ = \"MIT\" # This sets", "__all__ __all__ = [\"main\"] def say_hello(name): print(\"Hello \" + name + \"!\") def", "# This sets the license # The package description comes from the docstring", "in __all__ __all__ = [\"main\"] def say_hello(name): print(\"Hello \" + name + \"!\")", "= [\"main\"] def say_hello(name): print(\"Hello \" + name + \"!\") def main(): \"\"\"My", "name + \"!\") def main(): \"\"\"My hello world application\"\"\" args = sys.argv if", "__license__ = \"MIT\" # This sets the license # The package description comes", "the version __license__ = \"MIT\" # This sets the license # The package", "description comes from the docstring of the # first function in __all__ __all__", "def main(): \"\"\"My hello world application\"\"\" args = sys.argv if len(args) > 1:", "# The package description comes from the docstring of the # first function", "the license # The package description comes from the docstring of the #", "of the # first function in __all__ __all__ = [\"main\"] def say_hello(name): print(\"Hello", "\"MIT\" # This sets the license # The package description comes from the", "# first function in __all__ __all__ = [\"main\"] def say_hello(name): print(\"Hello \" +", "package description comes from the docstring of the # first function in __all__", "__version__ = \"0.1.1\" # This will set the version __license__ = \"MIT\" #", "__future__ import print_function import sys __version__ = \"0.1.1\" # This will set the", "from the docstring of the # first function in __all__ __all__ = [\"main\"]", "say_hello(name): print(\"Hello \" + name + \"!\") def main(): \"\"\"My hello world application\"\"\"", "print(\"Hello \" + name + \"!\") def main(): \"\"\"My hello world application\"\"\" args", "import print_function import sys __version__ = \"0.1.1\" # This will set the version", "function in __all__ __all__ = [\"main\"] def say_hello(name): print(\"Hello \" + name +", "world application\"\"\" args = sys.argv if len(args) > 1: name = args[1] else:", "application\"\"\" args = sys.argv if len(args) > 1: name = args[1] else: name", "comes from the docstring of the # first function in __all__ __all__ =", "sys.argv if len(args) > 1: name = args[1] else: name = \"world\" say_hello(name)", "def say_hello(name): print(\"Hello \" + name + \"!\") def main(): \"\"\"My hello world", "__all__ = [\"main\"] def say_hello(name): print(\"Hello \" + name + \"!\") def main():", "+ name + \"!\") def main(): \"\"\"My hello world application\"\"\" args = sys.argv", "<reponame>pyrelease/PyRelease from __future__ import print_function import sys __version__ = \"0.1.1\" # This will", "main(): \"\"\"My hello world application\"\"\" args = sys.argv if len(args) > 1: name", "# This will set the version __license__ = \"MIT\" # This sets the", "The package description comes from the docstring of the # first function in", "hello world application\"\"\" args = sys.argv if len(args) > 1: name = args[1]", "sets the license # The package description comes from the docstring of the", "docstring of the # first function in __all__ __all__ = [\"main\"] def say_hello(name):" ]
[ "def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr)) def", "= path pass # end of class class GenericTreeModel(object): __connected_signals__ = None def", "args) self.__connected_signals__[signal_name] = handlers return handler_id def disconnect(self, signal_name, handler_id): try: handlers =", "# @ReservedAssignment handler(self, *((args,) + user_args)) pass def set_property(self, *args, **kwargs): pass def", "def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass # TOD0! def iter_is_valid(self, itr):", "return True # TODO! def __len__(self): return len(self._model_data) pass # end of class", "class class GenericTreeModel(object): __connected_signals__ = None def __init__(self): self.__connected_signals__ = {} def connect(self,", "self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass # TOD0! def iter_is_valid(self, itr): return True #", "# ex:ts=4:sw=4:et # Copyright (c) 2013, <NAME> # All rights reserved. # Complete", "user_data): return GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path))", "<NAME> # All rights reserved. # Complete license can be found in the", "signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id] = (handler,", "def connect(self, signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id]", "self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] = handlers return", "the LICENSE file. from mvc.support.utils import get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\"", "Copyright (c) 2013, <NAME> # All rights reserved. # Complete license can be", "user_args) in handlers.items(): # @ReservedAssignment handler(self, *((args,) + user_args)) pass def set_property(self, *args,", "return GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def", "= 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda: False class GtkTreeIter(): def __init__(self,", "emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args) in handlers.items():", "__connected_signals__ = None def __init__(self): self.__connected_signals__ = {} def connect(self, signal_name, handler, *args):", "try: handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError: pass return def emit(self,", "path=None): self.user_data = user_data self.path = path pass # end of class class", "row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self):", "license can be found in the LICENSE file. from mvc.support.utils import get_new_uuid __all__", "# coding=UTF-8 # ex:ts=4:sw=4:et # Copyright (c) 2013, <NAME> # All rights reserved.", "self.__connected_signals__[signal_name] = handlers return handler_id def disconnect(self, signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name,", "pass return def emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {}) for id, (handler,", "= handlers return handler_id def disconnect(self, signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name, {})", "(c) 2013, <NAME> # All rights reserved. # Complete license can be found", "\"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda:", "invalidate_iters(self): pass # TOD0! def iter_is_valid(self, itr): return True # TODO! def __len__(self):", "return handler_id def disconnect(self, signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id]", "of class class GenericTreeModel(object): __connected_signals__ = None def __init__(self): self.__connected_signals__ = {} def", "\"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending =", "get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST", "get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self,", "lambda: False class GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data = user_data self.path =", "def set_property(self, *args, **kwargs): pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self, itr):", "def __init__(self, user_data, path=None): self.user_data = user_data self.path = path pass # end", "itr): return True # TODO! def __len__(self): return len(self._model_data) pass # end of", "self.__connected_signals__ = {} def connect(self, signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id", "= self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError: pass return def emit(self, signal_name, args=()):", "{}) del handlers[handler_id] except KeyError: pass return def emit(self, signal_name, args=()): handlers =", "rights reserved. # Complete license can be found in the LICENSE file. from", "{} def connect(self, signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid()", "= 0x00 events_pending = lambda: False class GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data", "TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda: False class GtkTreeIter(): def __init__(self, user_data, path=None):", "reserved. # Complete license can be found in the LICENSE file. from mvc.support.utils", "= self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args) in handlers.items(): # @ReservedAssignment handler(self, *((args,)", "handler(self, *((args,) + user_args)) pass def set_property(self, *args, **kwargs): pass def create_tree_iter(self, user_data):", "(handler, args) self.__connected_signals__[signal_name] = handlers return handler_id def disconnect(self, signal_name, handler_id): try: handlers", "2013, <NAME> # All rights reserved. # Complete license can be found in", "*((args,) + user_args)) pass def set_property(self, *args, **kwargs): pass def create_tree_iter(self, user_data): return", "def get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path,", "0x00 events_pending = lambda: False class GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data =", "path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces):", "def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self, path):", "self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass # TOD0!", "coding=UTF-8 # ex:ts=4:sw=4:et # Copyright (c) 2013, <NAME> # All rights reserved. #", "LICENSE file. from mvc.support.utils import get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\"", "@ReservedAssignment handler(self, *((args,) + user_args)) pass def set_property(self, *args, **kwargs): pass def create_tree_iter(self,", "handler_id = get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] = handlers return handler_id def", "All rights reserved. # Complete license can be found in the LICENSE file.", "except KeyError: pass return def emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {}) for", "handlers = self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args) in handlers.items(): # @ReservedAssignment handler(self,", "handlers.items(): # @ReservedAssignment handler(self, *((args,) + user_args)) pass def set_property(self, *args, **kwargs): pass", "in handlers.items(): # @ReservedAssignment handler(self, *((args,) + user_args)) pass def set_property(self, *args, **kwargs):", "*args, **kwargs): pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data)", "user_data, path=None): self.user_data = user_data self.path = path pass # end of class", "from mvc.support.utils import get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY", "row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass # TOD0! def iter_is_valid(self, itr): return", "pass # end of class class GenericTreeModel(object): __connected_signals__ = None def __init__(self): self.__connected_signals__", "indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass # TOD0! def iter_is_valid(self, itr): return True", "# TOD0! def iter_is_valid(self, itr): return True # TODO! def __len__(self): return len(self._model_data)", "= lambda: False class GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data = user_data self.path", "__init__(self, user_data, path=None): self.user_data = user_data self.path = path pass # end of", "(path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass # TOD0! def", "user_args)) pass def set_property(self, *args, **kwargs): pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def", "= (handler, args) self.__connected_signals__[signal_name] = handlers return handler_id def disconnect(self, signal_name, handler_id): try:", "handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] = handlers return handler_id def disconnect(self, signal_name, handler_id):", "= [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00", "\"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda: False class", "def __init__(self): self.__connected_signals__ = {} def connect(self, signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name,", "def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def", "args=()): handlers = self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args) in handlers.items(): # @ReservedAssignment", "pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self,", "[ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending", "self.user_data = user_data self.path = path pass # end of class class GenericTreeModel(object):", "set_property(self, *args, **kwargs): pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self, itr): return", "id, (handler, user_args) in handlers.items(): # @ReservedAssignment handler(self, *((args,) + user_args)) pass def", "handlers[handler_id] except KeyError: pass return def emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {})", "\"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda: False", "] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda: False class GtkTreeIter():", "pass def set_property(self, *args, **kwargs): pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self,", "# end of class class GenericTreeModel(object): __connected_signals__ = None def __init__(self): self.__connected_signals__ =", "def emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args) in", "GenericTreeModel(object): __connected_signals__ = None def __init__(self): self.__connected_signals__ = {} def connect(self, signal_name, handler,", "connect(self, signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id] =", "class GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data = user_data self.path = path pass", "= self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] = handlers", "False class GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data = user_data self.path = path", "= user_data self.path = path pass # end of class class GenericTreeModel(object): __connected_signals__", "self.path = path pass # end of class class GenericTreeModel(object): __connected_signals__ = None", "end of class class GenericTreeModel(object): __connected_signals__ = None def __init__(self): self.__connected_signals__ = {}", "TOD0! def iter_is_valid(self, itr): return True # TODO! def __len__(self): return len(self._model_data) pass", "itr): return self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\",", "*args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name]", "path pass # end of class class GenericTreeModel(object): __connected_signals__ = None def __init__(self):", "handlers return handler_id def disconnect(self, signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name, {}) del", "user_data self.path = path pass # end of class class GenericTreeModel(object): __connected_signals__ =", "handler_id def disconnect(self, signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except", "(indeces,)) def invalidate_iters(self): pass # TOD0! def iter_is_valid(self, itr): return True # TODO!", "found in the LICENSE file. from mvc.support.utils import get_new_uuid __all__ = [ \"gobject\",", "mvc.support.utils import get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY =", "{}) handler_id = get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] = handlers return handler_id", "handler_id): try: handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError: pass return def", "pass # TOD0! def iter_is_valid(self, itr): return True # TODO! def __len__(self): return", "TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda: False class GtkTreeIter(): def", "# Copyright (c) 2013, <NAME> # All rights reserved. # Complete license can", "__init__(self): self.__connected_signals__ = {} def connect(self, signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name, {})", "def disconnect(self, signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError:", "iter_is_valid(self, itr): return True # TODO! def __len__(self): return len(self._model_data) pass # end", "# All rights reserved. # Complete license can be found in the LICENSE", "self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args) in handlers.items(): # @ReservedAssignment handler(self, *((args,) +", "create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self, path): return", "events_pending = lambda: False class GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data = user_data", "import get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00", "disconnect(self, signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError: pass", "+ user_args)) pass def set_property(self, *args, **kwargs): pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data)", "GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self,", "del handlers[handler_id] except KeyError: pass return def emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name,", "GtkTreeIter(): def __init__(self, user_data, path=None): self.user_data = user_data self.path = path pass #", "**kwargs): pass def create_tree_iter(self, user_data): return GtkTreeIter(user_data) def get_path(self, itr): return self.on_get_path(itr.user_data) def", "for id, (handler, user_args) in handlers.items(): # @ReservedAssignment handler(self, *((args,) + user_args)) pass", "handler, *args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id] = (handler, args)", "handlers = self.__connected_signals__.get(signal_name, {}) handler_id = get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] =", "handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError: pass return def emit(self, signal_name,", "in the LICENSE file. from mvc.support.utils import get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\",", "{}) for id, (handler, user_args) in handlers.items(): # @ReservedAssignment handler(self, *((args,) + user_args))", "self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,))", "return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\",", "class GenericTreeModel(object): __connected_signals__ = None def __init__(self): self.__connected_signals__ = {} def connect(self, signal_name,", "return self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path,", "self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr): self.emit(\"row-inserted\", (path, itr))", "return def emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args)", "can be found in the LICENSE file. from mvc.support.utils import get_new_uuid __all__ =", "= {} def connect(self, signal_name, handler, *args): handlers = self.__connected_signals__.get(signal_name, {}) handler_id =", "get_path(self, itr): return self.on_get_path(itr.user_data) def get_iter(self, path): return self.create_tree_iter(self.on_get_iter(path)) def row_inserted(self, path, itr):", "# Complete license can be found in the LICENSE file. from mvc.support.utils import", "(handler, user_args) in handlers.items(): # @ReservedAssignment handler(self, *((args,) + user_args)) pass def set_property(self,", "signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {}) for id, (handler, user_args) in handlers.items(): #", "def invalidate_iters(self): pass # TOD0! def iter_is_valid(self, itr): return True # TODO! def", "be found in the LICENSE file. from mvc.support.utils import get_new_uuid __all__ = [", "ex:ts=4:sw=4:et # Copyright (c) 2013, <NAME> # All rights reserved. # Complete license", "file. from mvc.support.utils import get_new_uuid __all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ]", "self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError: pass return def emit(self, signal_name, args=()): handlers", "None def __init__(self): self.__connected_signals__ = {} def connect(self, signal_name, handler, *args): handlers =", "__all__ = [ \"gobject\", \"GtkTreeIter\", \"GenericTreeModel\" \"TREE_MODEL_LIST_ONLY\" ] TREE_MODEL_LIST_ONLY = 0x00 TREE_MODEL_ITERS_PERSIST =", "def iter_is_valid(self, itr): return True # TODO! def __len__(self): return len(self._model_data) pass #", "path, itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass", "itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass # TOD0! def iter_is_valid(self,", "= None def __init__(self): self.__connected_signals__ = {} def connect(self, signal_name, handler, *args): handlers", "KeyError: pass return def emit(self, signal_name, args=()): handlers = self.__connected_signals__.get(signal_name, {}) for id,", "itr): self.emit(\"row-inserted\", (path, itr)) def row_deleted(self, indeces): self.emit(\"row-deleted\", (indeces,)) def invalidate_iters(self): pass #", "get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] = handlers return handler_id def disconnect(self, signal_name,", "= get_new_uuid() handlers[handler_id] = (handler, args) self.__connected_signals__[signal_name] = handlers return handler_id def disconnect(self,", "0x00 TREE_MODEL_ITERS_PERSIST = 0x00 events_pending = lambda: False class GtkTreeIter(): def __init__(self, user_data,", "signal_name, handler_id): try: handlers = self.__connected_signals__.get(signal_name, {}) del handlers[handler_id] except KeyError: pass return", "Complete license can be found in the LICENSE file. from mvc.support.utils import get_new_uuid" ]
[ "an integer in the range [0, w.length - 1]. pickIndex() should return the", "Solution object will be instantiated and called as such: # obj = Solution(w)", "(0-indexed). # We need to call the function pickIndex() which randomly returns an", "cum sum) def pickIndex(self) -> int: if len(self.values) <= 1: return 0 #", "1 / (1 + 3) = 0.25 (i.e 25%) while the probability of", "self.curSum lo, hi = 0, len(self.values) - 1 while lo < hi: mid", "hi = 0, len(self.values) - 1 while lo < hi: mid = lo", "if len(self.values) <= 1: return 0 weightedPick = random() * self.curSum for i", "obj.pickIndex() # Score Card # Did I need hints? N # Did you", "num for 0 .. curSum so lets use random to create a value", "= random() * self.curSum for i in range(len(self.values)): if self.values[i] > weightedPick: return", "# We need to call the function pickIndex() which randomly returns an integer", "# to binary search through sorted numbers like the weighted sum (based off", "# Did you finish within 30 min? 15 # Was the solution optimal?", "1 else: hi = mid return lo # Your Solution object will be", "is 3 / (1 + 3) = 0.75 (i.e 75%). # More formally,", "value that is from 0 .. 1 and multiply it by cursum ourPick", "More formally, the probability of picking index i is w[i] / sum(w). #", "2 if ourPick > self.values[mid]: lo = mid + 1 else: hi =", "example, for w = [1, 3], the probability of picking the index 0", "int: if len(self.values) <= 1: return 0 weightedPick = random() * self.curSum for", "Weight: https://leetcode.com/problems/random-pick-with-weight/ # You are given an array of positive integers w where", "it import random class Solution: def __init__(self, w): self.curSum = 0 self.values =", "from left to right until we are over that value and return it", "mid return lo # Your Solution object will be instantiated and called as", "left to right until we are over that value and return it import", "are over that value and return it import random class Solution: def __init__(self,", "pickIndex() should return the integer proportional to its weight in the w array.", "lo + (hi - lo) // 2 if ourPick > self.values[mid]: lo =", "-> int: if len(self.values) <= 1: return 0 weightedPick = random() * self.curSum", "to binary search through sorted numbers like the weighted sum (based off of", "cursum ourPick = random() * self.curSum lo, hi = 0, len(self.values) - 1", "across # from left to right until we are over that value and", "really easy # to binary search through sorted numbers like the weighted sum", "= lo + (hi - lo) // 2 if ourPick > self.values[mid]: lo", "self.values[mid]: lo = mid + 1 else: hi = mid return lo #", "numbers like the weighted sum (based off of cum sum) def pickIndex(self) ->", "picking index i is w[i] / sum(w). # This problem is actually quite", "in the w array. For example, for w = [1, 3], the probability", "sum (based off of cum sum) def pickIndex(self) -> int: if len(self.values) <=", "hi: mid = lo + (hi - lo) // 2 if ourPick >", "This problem is actually quite easy we keep a rolling total and then", "0 weightedPick = random() * self.curSum for i in range(len(self.values)): if self.values[i] >", "Your Solution object will be instantiated and called as such: # obj =", "the range [0, w.length - 1]. pickIndex() should return the integer proportional to", "then go across # from left to right until we are over that", "w: self.curSum += weight self.values.append(self.curSum) def pickIndex(self) -> int: if len(self.values) <= 1:", "mid = lo + (hi - lo) // 2 if ourPick > self.values[mid]:", "return it import random class Solution: def __init__(self, w): self.curSum = 0 self.values", "self.curSum = 0 self.values = [] for weight in w: self.curSum += weight", "if ourPick > self.values[mid]: lo = mid + 1 else: hi = mid", "for 0 .. curSum so lets use random to create a value that", "3) = 0.25 (i.e 25%) while the probability of picking the index 1", "random to create a value that is from 0 .. 1 and multiply", "w array. For example, for w = [1, 3], the probability of picking", "the probability of picking the index 0 is 1 / (1 + 3)", "+ 1 else: hi = mid return lo # Your Solution object will", "0 is 1 / (1 + 3) = 0.25 (i.e 25%) while the", "index 1 is 3 / (1 + 3) = 0.75 (i.e 75%). #", "for i in range(len(self.values)): if self.values[i] > weightedPick: return i # Now the", "/ sum(w). # This problem is actually quite easy we keep a rolling", "create a value that is from 0 .. 1 and multiply it by", "import random class Solution: def __init__(self, w): self.curSum = 0 self.values = []", "15 # Was the solution optimal? This is optimal # Were there any", "of picking the index 1 is 3 / (1 + 3) = 0.75", "with Weight: https://leetcode.com/problems/random-pick-with-weight/ # You are given an array of positive integers w", "len(self.values) - 1 while lo < hi: mid = lo + (hi -", "- 1]. pickIndex() should return the integer proportional to its weight in the", "sum) def pickIndex(self) -> int: if len(self.values) <= 1: return 0 # Create", "<= 1: return 0 # Create random num for 0 .. curSum so", "# You are given an array of positive integers w where w[i] describes", "the function pickIndex() which randomly returns an integer in the range [0, w.length", "1 is 3 / (1 + 3) = 0.75 (i.e 75%). # More", "> weightedPick: return i # Now the above runs in o(N) but we", "< hi: mid = lo + (hi - lo) // 2 if ourPick", "# param_1 = obj.pickIndex() # Score Card # Did I need hints? N", "0.75 (i.e 75%). # More formally, the probability of picking index i is", "of picking index i is w[i] / sum(w). # This problem is actually", "https://leetcode.com/problems/random-pick-with-weight/ # You are given an array of positive integers w where w[i]", "object will be instantiated and called as such: # obj = Solution(w) #", "sorted numbers like the weighted sum (based off of cum sum) def pickIndex(self)", "range(len(self.values)): if self.values[i] > weightedPick: return i # Now the above runs in", "such: # obj = Solution(w) # param_1 = obj.pickIndex() # Score Card #", "Random Pick with Weight: https://leetcode.com/problems/random-pick-with-weight/ # You are given an array of positive", "while the probability of picking the index 1 is 3 / (1 +", "You are given an array of positive integers w where w[i] describes the", "lo) // 2 if ourPick > self.values[mid]: lo = mid + 1 else:", "in O(nlogn) as it is really easy # to binary search through sorted", "[1, 3], the probability of picking the index 0 is 1 / (1", "weightedPick = random() * self.curSum for i in range(len(self.values)): if self.values[i] > weightedPick:", "weight of ith index (0-indexed). # We need to call the function pickIndex()", "// 2 if ourPick > self.values[mid]: lo = mid + 1 else: hi", "[] for weight in w: self.curSum += weight self.values.append(self.curSum) def pickIndex(self) -> int:", "off of cum sum) def pickIndex(self) -> int: if len(self.values) <= 1: return", "problem is actually quite easy we keep a rolling total and then go", "(i.e 25%) while the probability of picking the index 1 is 3 /", "called as such: # obj = Solution(w) # param_1 = obj.pickIndex() # Score", "within 30 min? 15 # Was the solution optimal? This is optimal #", "pickIndex(self) -> int: if len(self.values) <= 1: return 0 weightedPick = random() *", "pickIndex(self) -> int: if len(self.values) <= 1: return 0 # Create random num", "like the weighted sum (based off of cum sum) def pickIndex(self) -> int:", "sum(w). # This problem is actually quite easy we keep a rolling total", "ith index (0-indexed). # We need to call the function pickIndex() which randomly", "= mid return lo # Your Solution object will be instantiated and called", "return 0 weightedPick = random() * self.curSum for i in range(len(self.values)): if self.values[i]", "from 0 .. 1 and multiply it by cursum ourPick = random() *", "+ (hi - lo) // 2 if ourPick > self.values[mid]: lo = mid", "Score Card # Did I need hints? N # Did you finish within", "else: hi = mid return lo # Your Solution object will be instantiated", "lo # Your Solution object will be instantiated and called as such: #", "return 0 # Create random num for 0 .. curSum so lets use", "return i # Now the above runs in o(N) but we can do", "obj = Solution(w) # param_1 = obj.pickIndex() # Score Card # Did I", "(i.e 75%). # More formally, the probability of picking index i is w[i]", "hints? N # Did you finish within 30 min? 15 # Was the", "and multiply it by cursum ourPick = random() * self.curSum lo, hi =", "in o(N) but we can do this in O(nlogn) as it is really", "self.values = [] for weight in w: self.curSum += weight self.values.append(self.curSum) def pickIndex(self)", "Card # Did I need hints? N # Did you finish within 30", "# Was the solution optimal? This is optimal # Were there any bugs?", "array. For example, for w = [1, 3], the probability of picking the", "O(nlogn) as it is really easy # to binary search through sorted numbers", "that is from 0 .. 1 and multiply it by cursum ourPick =", "and then go across # from left to right until we are over", "w.length - 1]. pickIndex() should return the integer proportional to its weight in", "will be instantiated and called as such: # obj = Solution(w) # param_1", "30 min? 15 # Was the solution optimal? This is optimal # Were", "# Were there any bugs? No # 5 5 5 5 = 5", "binary search through sorted numbers like the weighted sum (based off of cum", "this in O(nlogn) as it is really easy # to binary search through", "- lo) // 2 if ourPick > self.values[mid]: lo = mid + 1", "weight in w: self.curSum += weight self.values.append(self.curSum) def pickIndex(self) -> int: if len(self.values)", "0 .. curSum so lets use random to create a value that is", "describes the weight of ith index (0-indexed). # We need to call the", "positive integers w where w[i] describes the weight of ith index (0-indexed). #", "a rolling total and then go across # from left to right until", "1: return 0 # Create random num for 0 .. curSum so lets", "lets use random to create a value that is from 0 .. 1", "0 # Create random num for 0 .. curSum so lets use random", "rolling total and then go across # from left to right until we", "integer in the range [0, w.length - 1]. pickIndex() should return the integer", "(1 + 3) = 0.75 (i.e 75%). # More formally, the probability of", "* self.curSum for i in range(len(self.values)): if self.values[i] > weightedPick: return i #", "0, len(self.values) - 1 while lo < hi: mid = lo + (hi", "over that value and return it import random class Solution: def __init__(self, w):", "probability of picking the index 1 is 3 / (1 + 3) =", "above runs in o(N) but we can do this in O(nlogn) as it", "that value and return it import random class Solution: def __init__(self, w): self.curSum", "in range(len(self.values)): if self.values[i] > weightedPick: return i # Now the above runs", "def pickIndex(self) -> int: if len(self.values) <= 1: return 0 weightedPick = random()", "picking the index 0 is 1 / (1 + 3) = 0.25 (i.e", "self.values.append(self.curSum) def pickIndex(self) -> int: if len(self.values) <= 1: return 0 weightedPick =", "is really easy # to binary search through sorted numbers like the weighted", "it by cursum ourPick = random() * self.curSum lo, hi = 0, len(self.values)", "def pickIndex(self) -> int: if len(self.values) <= 1: return 0 # Create random", "def __init__(self, w): self.curSum = 0 self.values = [] for weight in w:", "as it is really easy # to binary search through sorted numbers like", "and return it import random class Solution: def __init__(self, w): self.curSum = 0", "an array of positive integers w where w[i] describes the weight of ith", "self.curSum for i in range(len(self.values)): if self.values[i] > weightedPick: return i # Now", "weightedPick: return i # Now the above runs in o(N) but we can", "can do this in O(nlogn) as it is really easy # to binary", "is optimal # Were there any bugs? No # 5 5 5 5", "finish within 30 min? 15 # Was the solution optimal? This is optimal", "but we can do this in O(nlogn) as it is really easy #", "it is really easy # to binary search through sorted numbers like the", "ourPick > self.values[mid]: lo = mid + 1 else: hi = mid return", "= Solution(w) # param_1 = obj.pickIndex() # Score Card # Did I need", "the weight of ith index (0-indexed). # We need to call the function", "= 0.75 (i.e 75%). # More formally, the probability of picking index i", "is actually quite easy we keep a rolling total and then go across", "return the integer proportional to its weight in the w array. For example,", "need to call the function pickIndex() which randomly returns an integer in the", "__init__(self, w): self.curSum = 0 self.values = [] for weight in w: self.curSum", ".. 1 and multiply it by cursum ourPick = random() * self.curSum lo,", "Did you finish within 30 min? 15 # Was the solution optimal? This", "mid + 1 else: hi = mid return lo # Your Solution object", "the index 1 is 3 / (1 + 3) = 0.75 (i.e 75%).", "array of positive integers w where w[i] describes the weight of ith index", "random() * self.curSum lo, hi = 0, len(self.values) - 1 while lo <", "probability of picking index i is w[i] / sum(w). # This problem is", "the probability of picking index i is w[i] / sum(w). # This problem", "we keep a rolling total and then go across # from left to", "easy we keep a rolling total and then go across # from left", "range [0, w.length - 1]. pickIndex() should return the integer proportional to its", "# More formally, the probability of picking index i is w[i] / sum(w).", "1 while lo < hi: mid = lo + (hi - lo) //", "picking the index 1 is 3 / (1 + 3) = 0.75 (i.e", "Pick with Weight: https://leetcode.com/problems/random-pick-with-weight/ # You are given an array of positive integers", "3 / (1 + 3) = 0.75 (i.e 75%). # More formally, the", "optimal? This is optimal # Were there any bugs? No # 5 5", "return lo # Your Solution object will be instantiated and called as such:", "is from 0 .. 1 and multiply it by cursum ourPick = random()", "to create a value that is from 0 .. 1 and multiply it", "of positive integers w where w[i] describes the weight of ith index (0-indexed).", "the weighted sum (based off of cum sum) def pickIndex(self) -> int: if", "w = [1, 3], the probability of picking the index 0 is 1", "the index 0 is 1 / (1 + 3) = 0.25 (i.e 25%)", "probability of picking the index 0 is 1 / (1 + 3) =", "# This problem is actually quite easy we keep a rolling total and", "in w: self.curSum += weight self.values.append(self.curSum) def pickIndex(self) -> int: if len(self.values) <=", "N # Did you finish within 30 min? 15 # Was the solution", "(based off of cum sum) def pickIndex(self) -> int: if len(self.values) <= 1:", "25%) while the probability of picking the index 1 is 3 / (1", "weight in the w array. For example, for w = [1, 3], the", "we are over that value and return it import random class Solution: def", "len(self.values) <= 1: return 0 weightedPick = random() * self.curSum for i in", "given an array of positive integers w where w[i] describes the weight of", "is 1 / (1 + 3) = 0.25 (i.e 25%) while the probability", "the w array. For example, for w = [1, 3], the probability of", "the above runs in o(N) but we can do this in O(nlogn) as", "index i is w[i] / sum(w). # This problem is actually quite easy", "index (0-indexed). # We need to call the function pickIndex() which randomly returns", "index 0 is 1 / (1 + 3) = 0.25 (i.e 25%) while", "for weight in w: self.curSum += weight self.values.append(self.curSum) def pickIndex(self) -> int: if", "(hi - lo) // 2 if ourPick > self.values[mid]: lo = mid +", "class Solution: def __init__(self, w): self.curSum = 0 self.values = [] for weight", "Solution: def __init__(self, w): self.curSum = 0 self.values = [] for weight in", "= 0, len(self.values) - 1 while lo < hi: mid = lo +", "3], the probability of picking the index 0 is 1 / (1 +", "int: if len(self.values) <= 1: return 0 # Create random num for 0", "integers w where w[i] describes the weight of ith index (0-indexed). # We", "of ith index (0-indexed). # We need to call the function pickIndex() which", "1 and multiply it by cursum ourPick = random() * self.curSum lo, hi", "(1 + 3) = 0.25 (i.e 25%) while the probability of picking the", "and called as such: # obj = Solution(w) # param_1 = obj.pickIndex() #", "where w[i] describes the weight of ith index (0-indexed). # We need to", "keep a rolling total and then go across # from left to right", "go across # from left to right until we are over that value", "# Random Pick with Weight: https://leetcode.com/problems/random-pick-with-weight/ # You are given an array of", "you finish within 30 min? 15 # Was the solution optimal? This is", "which randomly returns an integer in the range [0, w.length - 1]. pickIndex()", "Now the above runs in o(N) but we can do this in O(nlogn)", "while lo < hi: mid = lo + (hi - lo) // 2", "in the range [0, w.length - 1]. pickIndex() should return the integer proportional", "i in range(len(self.values)): if self.values[i] > weightedPick: return i # Now the above", "= obj.pickIndex() # Score Card # Did I need hints? N # Did", "do this in O(nlogn) as it is really easy # to binary search", "Create random num for 0 .. curSum so lets use random to create", "3) = 0.75 (i.e 75%). # More formally, the probability of picking index", "param_1 = obj.pickIndex() # Score Card # Did I need hints? N #", "i is w[i] / sum(w). # This problem is actually quite easy we", "self.curSum += weight self.values.append(self.curSum) def pickIndex(self) -> int: if len(self.values) <= 1: return", "[0, w.length - 1]. pickIndex() should return the integer proportional to its weight", "+ 3) = 0.25 (i.e 25%) while the probability of picking the index", "1: return 0 weightedPick = random() * self.curSum for i in range(len(self.values)): if", "# Now the above runs in o(N) but we can do this in", "optimal # Were there any bugs? No # 5 5 5 5 =", "0.25 (i.e 25%) while the probability of picking the index 1 is 3", "lo, hi = 0, len(self.values) - 1 while lo < hi: mid =", "be instantiated and called as such: # obj = Solution(w) # param_1 =", "total and then go across # from left to right until we are", "returns an integer in the range [0, w.length - 1]. pickIndex() should return", "* self.curSum lo, hi = 0, len(self.values) - 1 while lo < hi:", "Did I need hints? N # Did you finish within 30 min? 15", "= 0.25 (i.e 25%) while the probability of picking the index 1 is", "# Did I need hints? N # Did you finish within 30 min?", "call the function pickIndex() which randomly returns an integer in the range [0,", "curSum so lets use random to create a value that is from 0", "random class Solution: def __init__(self, w): self.curSum = 0 self.values = [] for", "solution optimal? This is optimal # Were there any bugs? No # 5", "<= 1: return 0 weightedPick = random() * self.curSum for i in range(len(self.values)):", "I need hints? N # Did you finish within 30 min? 15 #", "for w = [1, 3], the probability of picking the index 0 is", "the solution optimal? This is optimal # Were there any bugs? No #", "is w[i] / sum(w). # This problem is actually quite easy we keep", "to right until we are over that value and return it import random", "# Score Card # Did I need hints? N # Did you finish", "w[i] describes the weight of ith index (0-indexed). # We need to call", "lo = mid + 1 else: hi = mid return lo # Your", "to its weight in the w array. For example, for w = [1,", "so lets use random to create a value that is from 0 ..", "use random to create a value that is from 0 .. 1 and", "/ (1 + 3) = 0.75 (i.e 75%). # More formally, the probability", "random num for 0 .. curSum so lets use random to create a", "of cum sum) def pickIndex(self) -> int: if len(self.values) <= 1: return 0", "as such: # obj = Solution(w) # param_1 = obj.pickIndex() # Score Card", "1]. pickIndex() should return the integer proportional to its weight in the w", "if len(self.values) <= 1: return 0 # Create random num for 0 ..", "w where w[i] describes the weight of ith index (0-indexed). # We need", "easy # to binary search through sorted numbers like the weighted sum (based", "75%). # More formally, the probability of picking index i is w[i] /", ".. curSum so lets use random to create a value that is from", "its weight in the w array. For example, for w = [1, 3],", "# Your Solution object will be instantiated and called as such: # obj", "Solution(w) # param_1 = obj.pickIndex() # Score Card # Did I need hints?", "0 self.values = [] for weight in w: self.curSum += weight self.values.append(self.curSum) def", "= mid + 1 else: hi = mid return lo # Your Solution", "self.values[i] > weightedPick: return i # Now the above runs in o(N) but", "by cursum ourPick = random() * self.curSum lo, hi = 0, len(self.values) -", "pickIndex() which randomly returns an integer in the range [0, w.length - 1].", "len(self.values) <= 1: return 0 # Create random num for 0 .. curSum", "min? 15 # Was the solution optimal? This is optimal # Were there", "- 1 while lo < hi: mid = lo + (hi - lo)", "0 .. 1 and multiply it by cursum ourPick = random() * self.curSum", "quite easy we keep a rolling total and then go across # from", "i # Now the above runs in o(N) but we can do this", "search through sorted numbers like the weighted sum (based off of cum sum)", "right until we are over that value and return it import random class", "function pickIndex() which randomly returns an integer in the range [0, w.length -", "= [] for weight in w: self.curSum += weight self.values.append(self.curSum) def pickIndex(self) ->", "the integer proportional to its weight in the w array. For example, for", "until we are over that value and return it import random class Solution:", "This is optimal # Were there any bugs? No # 5 5 5", "randomly returns an integer in the range [0, w.length - 1]. pickIndex() should", "runs in o(N) but we can do this in O(nlogn) as it is", "value and return it import random class Solution: def __init__(self, w): self.curSum =", "w): self.curSum = 0 self.values = [] for weight in w: self.curSum +=", "a value that is from 0 .. 1 and multiply it by cursum", "# from left to right until we are over that value and return", "-> int: if len(self.values) <= 1: return 0 # Create random num for", "Was the solution optimal? This is optimal # Were there any bugs? No", "are given an array of positive integers w where w[i] describes the weight", "we can do this in O(nlogn) as it is really easy # to", "+= weight self.values.append(self.curSum) def pickIndex(self) -> int: if len(self.values) <= 1: return 0", "# Create random num for 0 .. curSum so lets use random to", "multiply it by cursum ourPick = random() * self.curSum lo, hi = 0,", "the probability of picking the index 1 is 3 / (1 + 3)", "if self.values[i] > weightedPick: return i # Now the above runs in o(N)", "formally, the probability of picking index i is w[i] / sum(w). # This", "need hints? N # Did you finish within 30 min? 15 # Was", "> self.values[mid]: lo = mid + 1 else: hi = mid return lo", "lo < hi: mid = lo + (hi - lo) // 2 if", "instantiated and called as such: # obj = Solution(w) # param_1 = obj.pickIndex()", "weight self.values.append(self.curSum) def pickIndex(self) -> int: if len(self.values) <= 1: return 0 weightedPick", "= random() * self.curSum lo, hi = 0, len(self.values) - 1 while lo", "= 0 self.values = [] for weight in w: self.curSum += weight self.values.append(self.curSum)", "random() * self.curSum for i in range(len(self.values)): if self.values[i] > weightedPick: return i", "should return the integer proportional to its weight in the w array. For", "integer proportional to its weight in the w array. For example, for w", "actually quite easy we keep a rolling total and then go across #", "We need to call the function pickIndex() which randomly returns an integer in", "through sorted numbers like the weighted sum (based off of cum sum) def", "to call the function pickIndex() which randomly returns an integer in the range", "For example, for w = [1, 3], the probability of picking the index", "hi = mid return lo # Your Solution object will be instantiated and", "= [1, 3], the probability of picking the index 0 is 1 /", "/ (1 + 3) = 0.25 (i.e 25%) while the probability of picking", "ourPick = random() * self.curSum lo, hi = 0, len(self.values) - 1 while", "of picking the index 0 is 1 / (1 + 3) = 0.25", "# obj = Solution(w) # param_1 = obj.pickIndex() # Score Card # Did", "w[i] / sum(w). # This problem is actually quite easy we keep a", "o(N) but we can do this in O(nlogn) as it is really easy", "weighted sum (based off of cum sum) def pickIndex(self) -> int: if len(self.values)", "+ 3) = 0.75 (i.e 75%). # More formally, the probability of picking", "proportional to its weight in the w array. For example, for w =" ]
[ "import forms from wagtail.wagtailimages.widgets import AdminImageChooser from .models import Author class AuthorAdminForm(forms.ModelForm): class", "Author # TODO: Ability to add author image exclude = ['image', 'slug', 'bio']", "from wagtail.wagtailimages.widgets import AdminImageChooser from .models import Author class AuthorAdminForm(forms.ModelForm): class Meta: model", "from .models import Author class AuthorAdminForm(forms.ModelForm): class Meta: model = Author # TODO:", "import Author class AuthorAdminForm(forms.ModelForm): class Meta: model = Author # TODO: Ability to", "from django import forms from wagtail.wagtailimages.widgets import AdminImageChooser from .models import Author class", "class AuthorAdminForm(forms.ModelForm): class Meta: model = Author # TODO: Ability to add author", "import AdminImageChooser from .models import Author class AuthorAdminForm(forms.ModelForm): class Meta: model = Author", "django import forms from wagtail.wagtailimages.widgets import AdminImageChooser from .models import Author class AuthorAdminForm(forms.ModelForm):", "Meta: model = Author # TODO: Ability to add author image exclude =", "forms from wagtail.wagtailimages.widgets import AdminImageChooser from .models import Author class AuthorAdminForm(forms.ModelForm): class Meta:", "= Author # TODO: Ability to add author image exclude = ['image', 'slug',", "wagtail.wagtailimages.widgets import AdminImageChooser from .models import Author class AuthorAdminForm(forms.ModelForm): class Meta: model =", "AuthorAdminForm(forms.ModelForm): class Meta: model = Author # TODO: Ability to add author image", "AdminImageChooser from .models import Author class AuthorAdminForm(forms.ModelForm): class Meta: model = Author #", ".models import Author class AuthorAdminForm(forms.ModelForm): class Meta: model = Author # TODO: Ability", "Author class AuthorAdminForm(forms.ModelForm): class Meta: model = Author # TODO: Ability to add", "model = Author # TODO: Ability to add author image exclude = ['image',", "class Meta: model = Author # TODO: Ability to add author image exclude" ]
[ "an EC2 resource, identified as above or below the configured threshold by Hyperglance", "== 0): res_id = resource['id'] raise RuntimeError(f'No tags to update on {res_id} because", "[ { \"name\": \"New Key\", \"type\": \"string\", \"default\": \"\" } ], \"permissions\": [", "its value\", \"resourceTypes\": [ \"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\",", "Role exists. \"\"\" import processing.automation_utils as utils def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\"", "of its tags matched the search criteria.') for old_key, value in matched_tag_attrs: #", "dict, automation_params=''): \"\"\" Attempts to Tag an EC2 Resource Parameters ---------- boto_session :", "do the destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource) def info() -> dict: INFO", "= { \"displayName\": \"Update Tag\", \"description\": \"Replaces a tags key but keeps its", "\"string\", \"default\": \"\" } ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\", \"ec2:DeleteTags\", \"sns:UntagResource\", \"sqs:UntagQueue\"", "below the configured threshold by Hyperglance Rule(s) This automation will operate across accounts,", "in matched_tag_attrs: # tag might already be 'good' if old_key == new_key: continue", "def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts to Tag an EC2 Resource Parameters", "an EC2 Resource Parameters ---------- boto_session : object The boto session to use", "passed from the Hyperglance UI \"\"\" new_key = automation_params.get('New Key') matched_tag_attrs = [attr", "matched_tag_attrs: # tag might already be 'good' if old_key == new_key: continue ##", "RuntimeError(f'No tags to update on {res_id} because none of its tags matched the", "\"displayName\": \"Update Tag\", \"description\": \"Replaces a tags key but keeps its value\", \"resourceTypes\":", "\"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network Interface\", \"Placement Group\", \"Route Table\", \"EC2 Snapshot\",", "EC2 Resource This automation attempts to fix a tag in for an EC2", "configured threshold by Hyperglance Rule(s) This automation will operate across accounts, where the", "update on {res_id} because none of its tags matched the search criteria.') for", "Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\" ], \"params\":", "ACL\", \"Network Interface\", \"Placement Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS", "resource['matchedAttributes'].items() if attr[0] in resource['tags']] if (len(matched_tag_attrs) == 0): res_id = resource['id'] raise", "identified as above or below the configured threshold by Hyperglance Rule(s) This automation", "existing value utils.add_tag(boto_session, new_key, value, resource) ## Remove the old offending tag (we", "Parameters ---------- boto_session : object The boto session to use to invoke the", "Topic\", \"SQS Queue\" ], \"params\": [ { \"name\": \"New Key\", \"type\": \"string\", \"default\":", "## Create the new tag and retain existing value utils.add_tag(boto_session, new_key, value, resource)", "operate across accounts, where the appropriate IAM Role exists. \"\"\" import processing.automation_utils as", "import processing.automation_utils as utils def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts to Tag", "utils def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts to Tag an EC2 Resource", "Resource This automation attempts to fix a tag in for an EC2 resource,", "[attr for attr in resource['matchedAttributes'].items() if attr[0] in resource['tags']] if (len(matched_tag_attrs) == 0):", "\"SQS Queue\" ], \"params\": [ { \"name\": \"New Key\", \"type\": \"string\", \"default\": \"\"", "2nd!) utils.remove_tag(boto_session, old_key, resource) def info() -> dict: INFO = { \"displayName\": \"Update", "offending tag (we make sure to do the destructive action 2nd!) utils.remove_tag(boto_session, old_key,", "fix a tag in for an EC2 resource, identified as above or below", "Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\" ], \"params\": [ { \"name\": \"New Key\",", "use to invoke the automation resource: dict Dict of Resource attributes touse in", "the new tag and retain existing value utils.add_tag(boto_session, new_key, value, resource) ## Remove", "the destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource) def info() -> dict: INFO =", "---------- boto_session : object The boto session to use to invoke the automation", "in the automation automation_params : str Automation parameters passed from the Hyperglance UI", "already be 'good' if old_key == new_key: continue ## Create the new tag", "resource) def info() -> dict: INFO = { \"displayName\": \"Update Tag\", \"description\": \"Replaces", "will operate across accounts, where the appropriate IAM Role exists. \"\"\" import processing.automation_utils", "old_key, value in matched_tag_attrs: # tag might already be 'good' if old_key ==", "to invoke the automation resource: dict Dict of Resource attributes touse in the", "might already be 'good' if old_key == new_key: continue ## Create the new", "because none of its tags matched the search criteria.') for old_key, value in", "to update on {res_id} because none of its tags matched the search criteria.')", "resource: dict, automation_params=''): \"\"\" Attempts to Tag an EC2 Resource Parameters ---------- boto_session", "\"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network Interface\", \"Placement Group\", \"Route Table\",", "automation_params=''): \"\"\" Attempts to Tag an EC2 Resource Parameters ---------- boto_session : object", "Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network Interface\", \"Placement Group\", \"Route", "\"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\" ], \"params\": [ { \"name\": \"New", "touse in the automation automation_params : str Automation parameters passed from the Hyperglance", "the search criteria.') for old_key, value in matched_tag_attrs: # tag might already be", "\"\"\" Attempts to Tag an EC2 Resource Parameters ---------- boto_session : object The", "Hyperglance Rule(s) This automation will operate across accounts, where the appropriate IAM Role", "in resource['matchedAttributes'].items() if attr[0] in resource['tags']] if (len(matched_tag_attrs) == 0): res_id = resource['id']", "automation resource: dict Dict of Resource attributes touse in the automation automation_params :", "automation automation_params : str Automation parameters passed from the Hyperglance UI \"\"\" new_key", "{res_id} because none of its tags matched the search criteria.') for old_key, value", "Automation parameters passed from the Hyperglance UI \"\"\" new_key = automation_params.get('New Key') matched_tag_attrs", "resource) ## Remove the old offending tag (we make sure to do the", "\"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\" ], \"params\": [", "new_key: continue ## Create the new tag and retain existing value utils.add_tag(boto_session, new_key,", "\"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network Interface\", \"Placement Group\",", "\"Network ACL\", \"Network Interface\", \"Placement Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\",", "EC2 Resource Parameters ---------- boto_session : object The boto session to use to", "resource['tags']] if (len(matched_tag_attrs) == 0): res_id = resource['id'] raise RuntimeError(f'No tags to update", "resource: dict Dict of Resource attributes touse in the automation automation_params : str", "res_id = resource['id'] raise RuntimeError(f'No tags to update on {res_id} because none of", "destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource) def info() -> dict: INFO = {", "parameters passed from the Hyperglance UI \"\"\" new_key = automation_params.get('New Key') matched_tag_attrs =", "(len(matched_tag_attrs) == 0): res_id = resource['id'] raise RuntimeError(f'No tags to update on {res_id}", "info() -> dict: INFO = { \"displayName\": \"Update Tag\", \"description\": \"Replaces a tags", "} ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\", \"ec2:DeleteTags\", \"sns:UntagResource\", \"sqs:UntagQueue\" ] } return", "on {res_id} because none of its tags matched the search criteria.') for old_key,", "if attr[0] in resource['tags']] if (len(matched_tag_attrs) == 0): res_id = resource['id'] raise RuntimeError(f'No", "\"Network Interface\", \"Placement Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\",", "Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\" ], \"params\": [ { \"name\":", "INFO = { \"displayName\": \"Update Tag\", \"description\": \"Replaces a tags key but keeps", "tags matched the search criteria.') for old_key, value in matched_tag_attrs: # tag might", "\"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\" ],", "the appropriate IAM Role exists. \"\"\" import processing.automation_utils as utils def hyperglance_automation(boto_session, resource:", "but keeps its value\", \"resourceTypes\": [ \"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\",", "the configured threshold by Hyperglance Rule(s) This automation will operate across accounts, where", "UI \"\"\" new_key = automation_params.get('New Key') matched_tag_attrs = [attr for attr in resource['matchedAttributes'].items()", "'good' if old_key == new_key: continue ## Create the new tag and retain", "to use to invoke the automation resource: dict Dict of Resource attributes touse", "str Automation parameters passed from the Hyperglance UI \"\"\" new_key = automation_params.get('New Key')", "in resource['tags']] if (len(matched_tag_attrs) == 0): res_id = resource['id'] raise RuntimeError(f'No tags to", "Tag an EC2 Resource Parameters ---------- boto_session : object The boto session to", "Dict of Resource attributes touse in the automation automation_params : str Automation parameters", "exists. \"\"\" import processing.automation_utils as utils def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts", "in for an EC2 resource, identified as above or below the configured threshold", "automation_params : str Automation parameters passed from the Hyperglance UI \"\"\" new_key =", "\"\"\" new_key = automation_params.get('New Key') matched_tag_attrs = [attr for attr in resource['matchedAttributes'].items() if", "\"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\" ], \"params\": [ {", "\"type\": \"string\", \"default\": \"\" } ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\", \"ec2:DeleteTags\", \"sns:UntagResource\",", "\"name\": \"New Key\", \"type\": \"string\", \"default\": \"\" } ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\",", "criteria.') for old_key, value in matched_tag_attrs: # tag might already be 'good' if", "Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network Interface\", \"Placement Group\", \"Route Table\", \"EC2", "\"Internet Gateway\", \"Network ACL\", \"Network Interface\", \"Placement Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\",", "tag and retain existing value utils.add_tag(boto_session, new_key, value, resource) ## Remove the old", "resource, identified as above or below the configured threshold by Hyperglance Rule(s) This", "This automation attempts to fix a tag in for an EC2 resource, identified", "resource['id'] raise RuntimeError(f'No tags to update on {res_id} because none of its tags", "\"default\": \"\" } ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\", \"ec2:DeleteTags\", \"sns:UntagResource\", \"sqs:UntagQueue\" ]", "make sure to do the destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource) def info()", "\"\" } ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\", \"ec2:DeleteTags\", \"sns:UntagResource\", \"sqs:UntagQueue\" ] }", "\"description\": \"Replaces a tags key but keeps its value\", \"resourceTypes\": [ \"Security Group\",", "from the Hyperglance UI \"\"\" new_key = automation_params.get('New Key') matched_tag_attrs = [attr for", "tags to update on {res_id} because none of its tags matched the search", "automation_params.get('New Key') matched_tag_attrs = [attr for attr in resource['matchedAttributes'].items() if attr[0] in resource['tags']]", "attr in resource['matchedAttributes'].items() if attr[0] in resource['tags']] if (len(matched_tag_attrs) == 0): res_id =", "as above or below the configured threshold by Hyperglance Rule(s) This automation will", "Key\", \"type\": \"string\", \"default\": \"\" } ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\", \"ec2:DeleteTags\",", "the automation automation_params : str Automation parameters passed from the Hyperglance UI \"\"\"", "Attempts to Tag an EC2 Resource Parameters ---------- boto_session : object The boto", "\"Placement Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS", "\"params\": [ { \"name\": \"New Key\", \"type\": \"string\", \"default\": \"\" } ], \"permissions\":", "value, resource) ## Remove the old offending tag (we make sure to do", "attempts to fix a tag in for an EC2 resource, identified as above", "continue ## Create the new tag and retain existing value utils.add_tag(boto_session, new_key, value,", "key but keeps its value\", \"resourceTypes\": [ \"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet", "to Tag an EC2 Resource Parameters ---------- boto_session : object The boto session", "old_key == new_key: continue ## Create the new tag and retain existing value", "<filename>lambda/automations/update_tag.py \"\"\"Tags an EC2 Resource This automation attempts to fix a tag in", "if old_key == new_key: continue ## Create the new tag and retain existing", "by Hyperglance Rule(s) This automation will operate across accounts, where the appropriate IAM", "automation will operate across accounts, where the appropriate IAM Role exists. \"\"\" import", "Resource Parameters ---------- boto_session : object The boto session to use to invoke", "Gateway\", \"Network ACL\", \"Network Interface\", \"Placement Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS", "\"\"\"Tags an EC2 Resource This automation attempts to fix a tag in for", "search criteria.') for old_key, value in matched_tag_attrs: # tag might already be 'good'", "= resource['id'] raise RuntimeError(f'No tags to update on {res_id} because none of its", "Rule(s) This automation will operate across accounts, where the appropriate IAM Role exists.", "-> dict: INFO = { \"displayName\": \"Update Tag\", \"description\": \"Replaces a tags key", "{ \"name\": \"New Key\", \"type\": \"string\", \"default\": \"\" } ], \"permissions\": [ \"ec2:CreateTags\",", "[ \"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network Interface\", \"Placement", "\"Replaces a tags key but keeps its value\", \"resourceTypes\": [ \"Security Group\", \"EC2", "\"resourceTypes\": [ \"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network Interface\",", "matched the search criteria.') for old_key, value in matched_tag_attrs: # tag might already", "\"\"\" import processing.automation_utils as utils def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts to", "tag in for an EC2 resource, identified as above or below the configured", "sure to do the destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource) def info() ->", "boto session to use to invoke the automation resource: dict Dict of Resource", "a tags key but keeps its value\", \"resourceTypes\": [ \"Security Group\", \"EC2 Instance\",", "its tags matched the search criteria.') for old_key, value in matched_tag_attrs: # tag", "of Resource attributes touse in the automation automation_params : str Automation parameters passed", "Key') matched_tag_attrs = [attr for attr in resource['matchedAttributes'].items() if attr[0] in resource['tags']] if", "Hyperglance UI \"\"\" new_key = automation_params.get('New Key') matched_tag_attrs = [attr for attr in", "\"VPC\", \"SNS Topic\", \"SQS Queue\" ], \"params\": [ { \"name\": \"New Key\", \"type\":", "matched_tag_attrs = [attr for attr in resource['matchedAttributes'].items() if attr[0] in resource['tags']] if (len(matched_tag_attrs)", "for old_key, value in matched_tag_attrs: # tag might already be 'good' if old_key", "Tag\", \"description\": \"Replaces a tags key but keeps its value\", \"resourceTypes\": [ \"Security", "# tag might already be 'good' if old_key == new_key: continue ## Create", "new tag and retain existing value utils.add_tag(boto_session, new_key, value, resource) ## Remove the", "IAM Role exists. \"\"\" import processing.automation_utils as utils def hyperglance_automation(boto_session, resource: dict, automation_params=''):", "Create the new tag and retain existing value utils.add_tag(boto_session, new_key, value, resource) ##", "for an EC2 resource, identified as above or below the configured threshold by", "attributes touse in the automation automation_params : str Automation parameters passed from the", "The boto session to use to invoke the automation resource: dict Dict of", "tag might already be 'good' if old_key == new_key: continue ## Create the", "processing.automation_utils as utils def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts to Tag an", "Resource attributes touse in the automation automation_params : str Automation parameters passed from", "across accounts, where the appropriate IAM Role exists. \"\"\" import processing.automation_utils as utils", "Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS Queue\"", "\"New Key\", \"type\": \"string\", \"default\": \"\" } ], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\",", "== new_key: continue ## Create the new tag and retain existing value utils.add_tag(boto_session,", "Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\",", "new_key = automation_params.get('New Key') matched_tag_attrs = [attr for attr in resource['matchedAttributes'].items() if attr[0]", "an EC2 Resource This automation attempts to fix a tag in for an", "threshold by Hyperglance Rule(s) This automation will operate across accounts, where the appropriate", ": object The boto session to use to invoke the automation resource: dict", "utils.add_tag(boto_session, new_key, value, resource) ## Remove the old offending tag (we make sure", "where the appropriate IAM Role exists. \"\"\" import processing.automation_utils as utils def hyperglance_automation(boto_session,", "as utils def hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts to Tag an EC2", "dict: INFO = { \"displayName\": \"Update Tag\", \"description\": \"Replaces a tags key but", "to fix a tag in for an EC2 resource, identified as above or", "Interface\", \"Placement Group\", \"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\",", "old offending tag (we make sure to do the destructive action 2nd!) utils.remove_tag(boto_session,", "{ \"displayName\": \"Update Tag\", \"description\": \"Replaces a tags key but keeps its value\",", "to do the destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource) def info() -> dict:", "0): res_id = resource['id'] raise RuntimeError(f'No tags to update on {res_id} because none", "value in matched_tag_attrs: # tag might already be 'good' if old_key == new_key:", "Remove the old offending tag (we make sure to do the destructive action", "a tag in for an EC2 resource, identified as above or below the", "def info() -> dict: INFO = { \"displayName\": \"Update Tag\", \"description\": \"Replaces a", "the automation resource: dict Dict of Resource attributes touse in the automation automation_params", "= [attr for attr in resource['matchedAttributes'].items() if attr[0] in resource['tags']] if (len(matched_tag_attrs) ==", "], \"permissions\": [ \"ec2:CreateTags\", \"sns:TagResource\", \"sqs:TagQueue\", \"ec2:DeleteTags\", \"sns:UntagResource\", \"sqs:UntagQueue\" ] } return INFO", "accounts, where the appropriate IAM Role exists. \"\"\" import processing.automation_utils as utils def", "old_key, resource) def info() -> dict: INFO = { \"displayName\": \"Update Tag\", \"description\":", "session to use to invoke the automation resource: dict Dict of Resource attributes", "tag (we make sure to do the destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource)", "if (len(matched_tag_attrs) == 0): res_id = resource['id'] raise RuntimeError(f'No tags to update on", "value\", \"resourceTypes\": [ \"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network ACL\", \"Network", "automation attempts to fix a tag in for an EC2 resource, identified as", ": str Automation parameters passed from the Hyperglance UI \"\"\" new_key = automation_params.get('New", "attr[0] in resource['tags']] if (len(matched_tag_attrs) == 0): res_id = resource['id'] raise RuntimeError(f'No tags", "retain existing value utils.add_tag(boto_session, new_key, value, resource) ## Remove the old offending tag", "and retain existing value utils.add_tag(boto_session, new_key, value, resource) ## Remove the old offending", "\"Update Tag\", \"description\": \"Replaces a tags key but keeps its value\", \"resourceTypes\": [", "value utils.add_tag(boto_session, new_key, value, resource) ## Remove the old offending tag (we make", "new_key, value, resource) ## Remove the old offending tag (we make sure to", "\"Route Table\", \"EC2 Snapshot\", \"Subnet\", \"EBS Snapshot\", \"EBS Volume\", \"VPC\", \"SNS Topic\", \"SQS", "the old offending tag (we make sure to do the destructive action 2nd!)", "This automation will operate across accounts, where the appropriate IAM Role exists. \"\"\"", "hyperglance_automation(boto_session, resource: dict, automation_params=''): \"\"\" Attempts to Tag an EC2 Resource Parameters ----------", "EC2 resource, identified as above or below the configured threshold by Hyperglance Rule(s)", "utils.remove_tag(boto_session, old_key, resource) def info() -> dict: INFO = { \"displayName\": \"Update Tag\",", "none of its tags matched the search criteria.') for old_key, value in matched_tag_attrs:", "be 'good' if old_key == new_key: continue ## Create the new tag and", "keeps its value\", \"resourceTypes\": [ \"Security Group\", \"EC2 Instance\", \"AMI\", \"Internet Gateway\", \"Network", "the Hyperglance UI \"\"\" new_key = automation_params.get('New Key') matched_tag_attrs = [attr for attr", "for attr in resource['matchedAttributes'].items() if attr[0] in resource['tags']] if (len(matched_tag_attrs) == 0): res_id", "(we make sure to do the destructive action 2nd!) utils.remove_tag(boto_session, old_key, resource) def", "], \"params\": [ { \"name\": \"New Key\", \"type\": \"string\", \"default\": \"\" } ],", "or below the configured threshold by Hyperglance Rule(s) This automation will operate across", "appropriate IAM Role exists. \"\"\" import processing.automation_utils as utils def hyperglance_automation(boto_session, resource: dict,", "object The boto session to use to invoke the automation resource: dict Dict", "raise RuntimeError(f'No tags to update on {res_id} because none of its tags matched", "invoke the automation resource: dict Dict of Resource attributes touse in the automation", "= automation_params.get('New Key') matched_tag_attrs = [attr for attr in resource['matchedAttributes'].items() if attr[0] in", "boto_session : object The boto session to use to invoke the automation resource:", "tags key but keeps its value\", \"resourceTypes\": [ \"Security Group\", \"EC2 Instance\", \"AMI\",", "dict Dict of Resource attributes touse in the automation automation_params : str Automation", "\"SNS Topic\", \"SQS Queue\" ], \"params\": [ { \"name\": \"New Key\", \"type\": \"string\",", "Queue\" ], \"params\": [ { \"name\": \"New Key\", \"type\": \"string\", \"default\": \"\" }", "action 2nd!) utils.remove_tag(boto_session, old_key, resource) def info() -> dict: INFO = { \"displayName\":", "## Remove the old offending tag (we make sure to do the destructive", "above or below the configured threshold by Hyperglance Rule(s) This automation will operate" ]
[ "clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not None \\ and cleaned_data['parent']", "self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not None", "from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object): model =", "super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not None \\ and cleaned_data['parent'] is not None:", "le dossier a un parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if", "if cleaned_data['statique']: if not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] =", "is not None: initial = kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial", "instance is not None: initial = kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial'] =", "cleaned_data['parent'] is not None: msg = 'Ne pas saisir de catégorie si le", "= kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def", "= initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie']", ".models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object): model = DossierDEvenements", "initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is", "import BooleanField, ModelForm from tree.forms import TreeChoiceField from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm):", "django.forms import BooleanField, ModelForm from tree.forms import TreeChoiceField from .models import DossierDEvenements class", "kwargs.get('instance') if instance is not None: initial = kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists()", "= instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm,", "de catégorie si le dossier a un parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements", "{ 'parent': TreeChoiceField, } class Media(object): css = { 'all': ('css/custom_admin.css',), } def", "si le dossier a un parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements')", "not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if self.instance.pk", "exclude = () field_classes = { 'parent': TreeChoiceField, } class Media(object): css =", "not None \\ and cleaned_data['parent'] is not None: msg = 'Ne pas saisir", "__init__(self, *args, **kwargs): instance = kwargs.get('instance') if instance is not None: initial =", "evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if self.instance.pk is", "catégorie si le dossier a un parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements =", "tree.forms import TreeChoiceField from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class", "TreeChoiceField, } class Media(object): css = { 'all': ('css/custom_admin.css',), } def __init__(self, *args,", "class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object): model = DossierDEvenements exclude = ()", "cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements']", "def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not None \\ and", "un parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if not", "from django.forms import BooleanField, ModelForm from tree.forms import TreeChoiceField from .models import DossierDEvenements", "'parent': TreeChoiceField, } class Media(object): css = { 'all': ('css/custom_admin.css',), } def __init__(self,", "if cleaned_data['categorie'] is not None \\ and cleaned_data['parent'] is not None: msg =", "\\ and cleaned_data['parent'] is not None: msg = 'Ne pas saisir de catégorie", "= 'Ne pas saisir de catégorie si le dossier a un parent.' self.add_error('categorie',", "BooleanField(required=False) class Meta(object): model = DossierDEvenements exclude = () field_classes = { 'parent':", "if not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if", "self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if self.instance.pk is not None: self.instance.evenements.clear() return", "initial = kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs)", "{ 'all': ('css/custom_admin.css',), } def __init__(self, *args, **kwargs): instance = kwargs.get('instance') if instance", "from tree.forms import TreeChoiceField from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False)", "= { 'parent': TreeChoiceField, } class Media(object): css = { 'all': ('css/custom_admin.css',), }", "dossier a un parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']:", "self.add_error('categorie', msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements: cleaned_data['evenements']", "TreeChoiceField from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object): model", "parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements:", "**kwargs): instance = kwargs.get('instance') if instance is not None: initial = kwargs.get('initial', {})", "a un parent.' self.add_error('categorie', msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if", "evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements)", "ModelForm from tree.forms import TreeChoiceField from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique =", "css = { 'all': ('css/custom_admin.css',), } def __init__(self, *args, **kwargs): instance = kwargs.get('instance')", "Media(object): css = { 'all': ('css/custom_admin.css',), } def __init__(self, *args, **kwargs): instance =", "**kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not None \\", "DossierDEvenements exclude = () field_classes = { 'parent': TreeChoiceField, } class Media(object): css", "None: initial = kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args,", "None: msg = 'Ne pas saisir de catégorie si le dossier a un", "if instance is not None: initial = kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial']", "saisir de catégorie si le dossier a un parent.' self.add_error('categorie', msg) self.add_error('parent', msg)", "self).clean() if cleaned_data['categorie'] is not None \\ and cleaned_data['parent'] is not None: msg", "() field_classes = { 'parent': TreeChoiceField, } class Media(object): css = { 'all':", "None \\ and cleaned_data['parent'] is not None: msg = 'Ne pas saisir de", "def __init__(self, *args, **kwargs): instance = kwargs.get('instance') if instance is not None: initial", "statique = BooleanField(required=False) class Meta(object): model = DossierDEvenements exclude = () field_classes =", "Meta(object): model = DossierDEvenements exclude = () field_classes = { 'parent': TreeChoiceField, }", "super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not", "is not None: msg = 'Ne pas saisir de catégorie si le dossier", "= BooleanField(required=False) class Meta(object): model = DossierDEvenements exclude = () field_classes = {", "field_classes = { 'parent': TreeChoiceField, } class Media(object): css = { 'all': ('css/custom_admin.css',),", "'all': ('css/custom_admin.css',), } def __init__(self, *args, **kwargs): instance = kwargs.get('instance') if instance is", "initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data =", "DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object): model = DossierDEvenements exclude = () field_classes", "= cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else:", "*args, **kwargs): instance = kwargs.get('instance') if instance is not None: initial = kwargs.get('initial',", "{}) initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data", "self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements: cleaned_data['evenements'] = \\", "pas saisir de catégorie si le dossier a un parent.' self.add_error('categorie', msg) self.add_error('parent',", "self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if self.instance.pk is not None: self.instance.evenements.clear() return cleaned_data", "msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True)", "class Media(object): css = { 'all': ('css/custom_admin.css',), } def __init__(self, *args, **kwargs): instance", "= { 'all': ('css/custom_admin.css',), } def __init__(self, *args, **kwargs): instance = kwargs.get('instance') if", "is not None \\ and cleaned_data['parent'] is not None: msg = 'Ne pas", "class Meta(object): model = DossierDEvenements exclude = () field_classes = { 'parent': TreeChoiceField,", "DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object): model = DossierDEvenements exclude =", "= DossierDEvenements exclude = () field_classes = { 'parent': TreeChoiceField, } class Media(object):", "and cleaned_data['parent'] is not None: msg = 'Ne pas saisir de catégorie si", "not None: initial = kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm,", "msg) self.add_error('parent', msg) evenements = cleaned_data.get('evenements') if cleaned_data['statique']: if not evenements: cleaned_data['evenements'] =", "model = DossierDEvenements exclude = () field_classes = { 'parent': TreeChoiceField, } class", "= kwargs.get('instance') if instance is not None: initial = kwargs.get('initial', {}) initial['statique'] =", "kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean() if", "import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object): model = DossierDEvenements exclude", "kwargs.get('initial', {}) initial['statique'] = instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self):", "cleaned_data['categorie'] is not None \\ and cleaned_data['parent'] is not None: msg = 'Ne", "BooleanField, ModelForm from tree.forms import TreeChoiceField from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique", "instance = kwargs.get('instance') if instance is not None: initial = kwargs.get('initial', {}) initial['statique']", "} class Media(object): css = { 'all': ('css/custom_admin.css',), } def __init__(self, *args, **kwargs):", "not None: msg = 'Ne pas saisir de catégorie si le dossier a", "= () field_classes = { 'parent': TreeChoiceField, } class Media(object): css = {", "\\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if self.instance.pk is not None: self.instance.evenements.clear()", "msg = 'Ne pas saisir de catégorie si le dossier a un parent.'", "= super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not None \\ and cleaned_data['parent'] is not", "cleaned_data['statique']: if not evenements: cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = []", "} def __init__(self, *args, **kwargs): instance = kwargs.get('instance') if instance is not None:", "cleaned_data['evenements'] = \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if self.instance.pk is not", "import TreeChoiceField from .models import DossierDEvenements class DossierDEvenementsForm(ModelForm): statique = BooleanField(required=False) class Meta(object):", "'Ne pas saisir de catégorie si le dossier a un parent.' self.add_error('categorie', msg)", "cleaned_data = super(DossierDEvenementsForm, self).clean() if cleaned_data['categorie'] is not None \\ and cleaned_data['parent'] is", "= \\ self.instance.get_queryset(dynamic=True) self.instance.evenements.add(*evenements) else: cleaned_data['evenements'] = [] if self.instance.pk is not None:", "instance.evenements.exists() kwargs['initial'] = initial super(DossierDEvenementsForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(DossierDEvenementsForm, self).clean()", "('css/custom_admin.css',), } def __init__(self, *args, **kwargs): instance = kwargs.get('instance') if instance is not" ]
[ "make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: (f, r) =", "{ \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\" : \"ASP.NET Misconfiguration: Missing", "parse_datetime from xml.sax import make_parser from xml.sax.handler import ContentHandler import argparse import zipfile", "output') args = argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe = [] self.description_summary_tag =", "Custom Error Page\", # \"description_summary\" : \"An ASP .NET application must enable custom", "import argparse import zipfile import tempfile from lib.ProgressBar import progressbar from lib.Config import", "modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit(\"Cannot", "xml and store in database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \"", "python3 # # Import script of NIST CWE Common Weakness Enumeration. # #", "ASP .NET application must enable custom # error pages in order to prevent", "= parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not None: if lastmodified", "argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe = [] self.description_summary_tag = False self.weakness_tag =", "\"\") elif name == 'Weakness': self.weakness_tag = False # make parser parser =", "# framework's built-in responses.\", \"status\" : \"Draft\", \"id\" : \"12\" } # #", "Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() class CWEHandler(ContentHandler): def", "# Import script of NIST CWE Common Weakness Enumeration. # # Until now,", "from xml.sax import make_parser from xml.sax.handler import ContentHandler import argparse import zipfile import", "= CWEHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('cwe') except", "from lib.Config import Configuration import lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update NIST CWE", "# The format is the following: # # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\"", "os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse as", "now, the import is only import Weakness description. # # The format is", "from dateutil.parser import parse as parse_datetime from xml.sax import make_parser from xml.sax.handler import", "= make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: (f, r)", "cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print (cwe) cweList.append(cwe)", "from mining information from the # framework's built-in responses.An ASP .NET application must", "# Until now, the import is only import Weakness description. # # The", "check modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e)", "help='verbose output') args = argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe = [] self.description_summary_tag", "Import script of NIST CWE Common Weakness Enumeration. # # Until now, the", "args = argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe = [] self.description_summary_tag = False", "the following: # # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\"", "ch.replace(\" \", \"\") def endElement(self, name): if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag", "and self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\",", "# # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\" : \"ASP.NET", "prevent attackers from mining information from the # framework's built-in responses.\", \"status\" :", "import make_parser from xml.sax.handler import ContentHandler import argparse import zipfile import tempfile from", "self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness': self.weakness_tag = False #", "<gh_stars>100-1000 #!/usr/bin/env python3 # # Import script of NIST CWE Common Weakness Enumeration.", "= self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness': self.weakness_tag", "db argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose", "argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe =", "# Copyright (c) 2013-2014 <NAME> - <EMAIL> # Copyright (c) 2015-2016 <NAME> -", "internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not None:", "progressbar from lib.Config import Configuration import lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update NIST", "cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update database info", "framework's built-in responses.\", \"status\" : \"Draft\", \"id\" : \"12\" } # # Software", "# # Import script of NIST CWE Common Weakness Enumeration. # # Until", "- <EMAIL> # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\"))", "license\" # # Copyright (c) 2013-2014 <NAME> - <EMAIL> # Copyright (c) 2015-2016", "self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif", "make_parser from xml.sax.handler import ContentHandler import argparse import zipfile import tempfile from lib.ProgressBar", "self.description_summary_tag = False self.weakness_tag = False def startElement(self, name, attrs): if name ==", "ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\" : \"ASP.NET Misconfiguration: Missing Custom Error Page\",", "CWE Common Weakness Enumeration. # # Until now, the import is only import", "'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] =", "\"Variant\", # \"name\" : \"ASP.NET Misconfiguration: Missing Custom Error Page\", # \"description_summary\" :", "is only import Weakness description. # # The format is the following: #", "Software is free software released under the \"Modified BSD license\" # # Copyright", ": \"12\" } # # Software is free software released under the \"Modified", "CWEHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception", "self.description_summary = \"\" def characters(self, ch): if self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\")", "built-in responses.\", \"status\" : \"Draft\", \"id\" : \"12\" } # # Software is", "name): if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary", "== 'Weakness': self.weakness_tag = False # make parser parser = make_parser() ch =", "'Description_Summary' and self.weakness_tag: self.description_summary_tag = True self.description_summary = \"\" def characters(self, ch): if", "Enumeration. # # Until now, the import is only import Weakness description. #", "db.getLastModified('cwe') if i is not None: if lastmodified == i: print(\"Not modified\") sys.exit(0)", "'Weakness': self.weakness_tag = False # make parser parser = make_parser() ch = CWEHandler()", "and store in database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \")", "Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import", "True self.description_summary = \"\" def characters(self, ch): if self.description_summary_tag: self.description_summary += ch.replace(\" \",", "== 'Description_Summary' and self.weakness_tag: self.description_summary_tag = True self.description_summary = \"\" def characters(self, ch):", "print(e) sys.exit(\"Cannot open url %s. Bad URL or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\")))", "i: print(\"Not modified\") sys.exit(0) # parse xml and store in database parser.parse(f) cweList=[]", ".NET application must enable custom # error pages in order to prevent attackers", "= False def startElement(self, name, attrs): if name == 'Weakness': self.weakness_tag = True", "following: # # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\" :", "the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not", "args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update database info after successful program-run db.setColUpdate('cwe',", "argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output')", "as e: print(e) sys.exit(\"Cannot open url %s. Bad URL or not connected to", "self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif name", "is free software released under the \"Modified BSD license\" # # Copyright (c)", "# # Copyright (c) 2013-2014 <NAME> - <EMAIL> # Copyright (c) 2015-2016 <NAME>", "= attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif name ==", ": \"An ASP .NET application must enable custom error # pages in order", "} # # Software is free software released under the \"Modified BSD license\"", "print(\"Not modified\") sys.exit(0) # parse xml and store in database parser.parse(f) cweList=[] for", "class CWEHandler(ContentHandler): def __init__(self): self.cwe = [] self.description_summary_tag = False self.weakness_tag = False", "ASP .NET application must enable custom error # pages in order to prevent", "ignoretz=True) i = db.getLastModified('cwe') if i is not None: if lastmodified == i:", "sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse as parse_datetime from", "import is only import Weakness description. # # The format is the following:", "Misconfiguration: Missing Custom Error Page\", # \"description_summary\" : \"An ASP .NET application must", "enable custom # error pages in order to prevent attackers from mining information", "self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\")", "is not None: if lastmodified == i: print(\"Not modified\") sys.exit(0) # parse xml", "\"ASP.NET Misconfiguration: Missing Custom Error Page\", # \"description_summary\" : \"An ASP .NET application", "parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try:", "= attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs})", "cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList)", "application must enable custom error # pages in order to prevent attackers from", "Weakness description. # # The format is the following: # # { \"_id\"", "# pages in order to prevent attackers from mining information from the #", "<EMAIL> # Copyright (c) 2015-2016 <NAME> - <EMAIL> # Imports import os import", "xml.sax.handler import ContentHandler import argparse import zipfile import tempfile from lib.ProgressBar import progressbar", "for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe',", "information from the # framework's built-in responses.\", \"status\" : \"Draft\", \"id\" : \"12\"", "self.description_summary_tag = True self.description_summary = \"\" def characters(self, ch): if self.description_summary_tag: self.description_summary +=", "self.weaknessabs}) elif name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = True self.description_summary = \"\"", "connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i", "url %s. Bad URL or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'],", ": \"Variant\", # \"name\" : \"ASP.NET Misconfiguration: Missing Custom Error Page\", # \"description_summary\"", "must enable custom error # pages in order to prevent attackers from mining", "as parse_datetime from xml.sax import make_parser from xml.sax.handler import ContentHandler import argparse import", "Page\", # \"description_summary\" : \"An ASP .NET application must enable custom error #", "False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name ==", "name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = True self.description_summary = \"\" def characters(self,", "endElement(self, name): if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary =", "free software released under the \"Modified BSD license\" # # Copyright (c) 2013-2014", "dateutil.parser import parse as parse_datetime from xml.sax import make_parser from xml.sax.handler import ContentHandler", "= self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness': self.weakness_tag = False # make parser", "responses.\", \"status\" : \"Draft\", \"id\" : \"12\" } # # Software is free", "is the following: # # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", #", "Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self):", "# make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification", "Until now, the import is only import Weakness description. # # The format", "startElement(self, name, attrs): if name == 'Weakness': self.weakness_tag = True self.statement = \"\"", "lib.Config import Configuration import lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common", "i = db.getLastModified('cwe') if i is not None: if lastmodified == i: print(\"Not", "e: print(e) sys.exit(\"Cannot open url %s. Bad URL or not connected to the", "Copyright (c) 2015-2016 <NAME> - <EMAIL> # Imports import os import sys runPath", "= attrs.get('Name') self.idname = attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status':", "parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print (cwe)", "self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary' and self.weakness_tag:", "self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary' and", "self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness': self.weakness_tag = False # make", "open url %s. Bad URL or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified =", "# \"name\" : \"ASP.NET Misconfiguration: Missing Custom Error Page\", # \"description_summary\" : \"An", "if lastmodified == i: print(\"Not modified\") sys.exit(0) # parse xml and store in", "self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname = attrs.get('ID') self.status", "URL or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i =", "= Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit(\"Cannot open url %s. Bad URL", "CWEHandler(ContentHandler): def __init__(self): self.cwe = [] self.description_summary_tag = False self.weakness_tag = False def", "Weakness Enumeration. # # Until now, the import is only import Weakness description.", ": \"ASP.NET Misconfiguration: Missing Custom Error Page\", # \"description_summary\" : \"An ASP .NET", "custom # error pages in order to prevent attackers from mining information from", "self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness':", "responses.An ASP .NET application must enable custom # error pages in order to", "<NAME> - <EMAIL> # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath,", "def startElement(self, name, attrs): if name == 'Weakness': self.weakness_tag = True self.statement =", "# Copyright (c) 2015-2016 <NAME> - <EMAIL> # Imports import os import sys", "\"An ASP .NET application must enable custom error # pages in order to", "r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit(\"Cannot open url %s. Bad", "the # framework's built-in responses.An ASP .NET application must enable custom # error", "in database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v:", "= argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output') args", "Configuration import lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration", "format is the following: # # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\",", "sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse as parse_datetime from xml.sax import make_parser from", "(c) 2015-2016 <NAME> - <EMAIL> # Imports import os import sys runPath =", "\"\" def characters(self, ch): if self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\") def endElement(self,", "prevent attackers from mining information from the # framework's built-in responses.An ASP .NET", "progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update database", "= attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname = attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name,", "self.description_summary += ch.replace(\" \", \"\") def endElement(self, name): if name == 'Description_Summary' and", "script of NIST CWE Common Weakness Enumeration. # # Until now, the import", "parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not None: if lastmodified ==", "def __init__(self): self.cwe = [] self.description_summary_tag = False self.weakness_tag = False def startElement(self,", "not None: if lastmodified == i: print(\"Not modified\") sys.exit(0) # parse xml and", "'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag", "except Exception as e: print(e) sys.exit(\"Cannot open url %s. Bad URL or not", "self.idname = attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs':", "error # pages in order to prevent attackers from mining information from the", "the # framework's built-in responses.\", \"status\" : \"Draft\", \"id\" : \"12\" } #", "def endElement(self, name): if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary", "argparse import zipfile import tempfile from lib.ProgressBar import progressbar from lib.Config import Configuration", "\"..\")) from dateutil.parser import parse as parse_datetime from xml.sax import make_parser from xml.sax.handler", "= True self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname =", "to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is", "modified\") sys.exit(0) # parse xml and store in database parser.parse(f) cweList=[] for cwe", "mining information from the # framework's built-in responses.An ASP .NET application must enable", "Bad URL or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i", "lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database') argparser.add_argument('-v',", "+ self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness': self.weakness_tag = False", "# # The format is the following: # # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"),", "self.weakness_tag: self.description_summary_tag = True self.description_summary = \"\" def characters(self, ch): if self.description_summary_tag: self.description_summary", "= \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname = attrs.get('ID') self.status =", "characters(self, ch): if self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\") def endElement(self, name): if", "application must enable custom # error pages in order to prevent attackers from", "in order to prevent attackers from mining information from the # framework's built-in", "CWE Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() class", "released under the \"Modified BSD license\" # # Copyright (c) 2013-2014 <NAME> -", "# # Until now, the import is only import Weakness description. # #", "Common Weakness Enumeration. # # Until now, the import is only import Weakness", "\" \") if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update database info after", "if i is not None: if lastmodified == i: print(\"Not modified\") sys.exit(0) #", "import tempfile from lib.ProgressBar import progressbar from lib.Config import Configuration import lib.DatabaseLayer as", "import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse as parse_datetime", "name, attrs): if name == 'Weakness': self.weakness_tag = True self.statement = \"\" self.weaknessabs", ": \"Draft\", \"id\" : \"12\" } # # Software is free software released", "import zipfile import tempfile from lib.ProgressBar import progressbar from lib.Config import Configuration import", "of NIST CWE Common Weakness Enumeration. # # Until now, the import is", "\"status\" : \"Draft\", \"id\" : \"12\" } # # Software is free software", "from xml.sax.handler import ContentHandler import argparse import zipfile import tempfile from lib.ProgressBar import", "\"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\" : \"ASP.NET Misconfiguration: Missing Custom", "from mining information from the # framework's built-in responses.\", \"status\" : \"Draft\", \"id\"", "mining information from the # framework's built-in responses.\", \"status\" : \"Draft\", \"id\" :", "True self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname = attrs.get('ID')", "'Weakness': self.weakness_tag = True self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name')", "'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = True self.description_summary =", "attrs): if name == 'Weakness': self.weakness_tag = True self.statement = \"\" self.weaknessabs =", "try: (f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit(\"Cannot open url", "as db argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true',", "False # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check", "BSD license\" # # Copyright (c) 2013-2014 <NAME> - <EMAIL> # Copyright (c)", "import progressbar from lib.Config import Configuration import lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update", "# \"description_summary\" : \"An ASP .NET application must enable custom error # pages", "the \"Modified BSD license\" # # Copyright (c) 2013-2014 <NAME> - <EMAIL> #", "os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse as parse_datetime from xml.sax import make_parser", "# Software is free software released under the \"Modified BSD license\" # #", "not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if", "custom error # pages in order to prevent attackers from mining information from", "only import Weakness description. # # The format is the following: # #", "Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit(\"Cannot open url %s. Bad URL or", "import Configuration import lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness", "information from the # framework's built-in responses.An ASP .NET application must enable custom", "attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname = attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id':", "import lib.DatabaseLayer as db argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database')", "self.status, 'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = True self.description_summary", "Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() class CWEHandler(ContentHandler):", "the import is only import Weakness description. # # The format is the", "zipfile import tempfile from lib.ProgressBar import progressbar from lib.Config import Configuration import lib.DatabaseLayer", "= False # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) #", "runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse as parse_datetime from xml.sax", "\"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname = attrs.get('ID') self.status = attrs.get('Status')", "make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date", "print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update database info after successful program-run db.setColUpdate('cwe', lastmodified)", "order to prevent attackers from mining information from the # framework's built-in responses.An", "software released under the \"Modified BSD license\" # # Copyright (c) 2013-2014 <NAME>", "= os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse as parse_datetime from xml.sax import", "name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary", "parse xml and store in database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\",", "from the # framework's built-in responses.An ASP .NET application must enable custom #", ".NET application must enable custom error # pages in order to prevent attackers", "lib.ProgressBar import progressbar from lib.Config import Configuration import lib.DatabaseLayer as db argparser =", "None: if lastmodified == i: print(\"Not modified\") sys.exit(0) # parse xml and store", "elif name == 'Weakness': self.weakness_tag = False # make parser parser = make_parser()", "sys.exit(0) # parse xml and store in database parser.parse(f) cweList=[] for cwe in", "framework's built-in responses.An ASP .NET application must enable custom # error pages in", "self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness': self.weakness_tag = False # make parser parser", "'status': self.status, 'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = True", "parse as parse_datetime from xml.sax import make_parser from xml.sax.handler import ContentHandler import argparse", "must enable custom # error pages in order to prevent attackers from mining", "ContentHandler import argparse import zipfile import tempfile from lib.ProgressBar import progressbar from lib.Config", "enable custom error # pages in order to prevent attackers from mining information", "argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output') args =", "False def startElement(self, name, attrs): if name == 'Weakness': self.weakness_tag = True self.statement", "if name == 'Weakness': self.weakness_tag = True self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction')", "\"id\" : \"12\" } # # Software is free software released under the", "ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('cwe')", "description. # # The format is the following: # # { \"_id\" :", "parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: (f,", "\", \"\") def endElement(self, name): if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag =", "\"12\" } # # Software is free software released under the \"Modified BSD", "parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception as", "ch): if self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\") def endElement(self, name): if name", "name == 'Weakness': self.weakness_tag = False # make parser parser = make_parser() ch", "- <EMAIL> # Copyright (c) 2015-2016 <NAME> - <EMAIL> # Imports import os", "\") if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update database info after successful", "# { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\" : \"ASP.NET Misconfiguration:", "= True self.description_summary = \"\" def characters(self, ch): if self.description_summary_tag: self.description_summary += ch.replace(\"", "under the \"Modified BSD license\" # # Copyright (c) 2013-2014 <NAME> - <EMAIL>", "to prevent attackers from mining information from the # framework's built-in responses.\", \"status\"", "\"description_summary\" : \"An ASP .NET application must enable custom error # pages in", "\"weaknessabs\" : \"Variant\", # \"name\" : \"ASP.NET Misconfiguration: Missing Custom Error Page\", #", "\"\") def endElement(self, name): if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False", ": ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" : \"Variant\", # \"name\" : \"ASP.NET Misconfiguration: Missing Custom Error", "to prevent attackers from mining information from the # framework's built-in responses.An ASP", "import parse as parse_datetime from xml.sax import make_parser from xml.sax.handler import ContentHandler import", "False self.weakness_tag = False def startElement(self, name, attrs): if name == 'Weakness': self.weakness_tag", "# error pages in order to prevent attackers from mining information from the", "(c) 2013-2014 <NAME> - <EMAIL> # Copyright (c) 2015-2016 <NAME> - <EMAIL> #", "xml.sax import make_parser from xml.sax.handler import ContentHandler import argparse import zipfile import tempfile", "self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name == 'Weakness': self.weakness_tag =", "== 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary']", "attackers from mining information from the # framework's built-in responses.\", \"status\" : \"Draft\",", "self.weakness_tag = False # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch)", "= False self.weakness_tag = False def startElement(self, name, attrs): if name == 'Weakness':", "error pages in order to prevent attackers from mining information from the #", "The format is the following: # # { \"_id\" : ObjectId(\"52b70521b261026f36818515\"), \"weaknessabs\" :", "attrs.get('Name') self.idname = attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status,", "action='store_true', help='verbose output') args = argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe = []", "name == 'Weakness': self.weakness_tag = True self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name", "self.name = attrs.get('Name') self.idname = attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname,", "built-in responses.An ASP .NET application must enable custom # error pages in order", "pages in order to prevent attackers from mining information from the # framework's", "<EMAIL> # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from", "self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag =", "= db.getLastModified('cwe') if i is not None: if lastmodified == i: print(\"Not modified\")", "if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary +", "tempfile from lib.ProgressBar import progressbar from lib.Config import Configuration import lib.DatabaseLayer as db", "attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif", "# parse xml and store in database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe):", "# framework's built-in responses.An ASP .NET application must enable custom # error pages", "i is not None: if lastmodified == i: print(\"Not modified\") sys.exit(0) # parse", "== i: print(\"Not modified\") sys.exit(0) # parse xml and store in database parser.parse(f)", "database') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe", "NIST CWE Common Weakness Enumeration. # # Until now, the import is only", "Error Page\", # \"description_summary\" : \"An ASP .NET application must enable custom error", "lastmodified == i: print(\"Not modified\") sys.exit(0) # parse xml and store in database", "# check modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception as e:", "= argparser.parse_args() class CWEHandler(ContentHandler): def __init__(self): self.cwe = [] self.description_summary_tag = False self.weakness_tag", "<NAME> - <EMAIL> # Copyright (c) 2015-2016 <NAME> - <EMAIL> # Imports import", "from the # framework's built-in responses.\", \"status\" : \"Draft\", \"id\" : \"12\" }", "self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\") def endElement(self, name): if name == 'Description_Summary'", "database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print", "\"Draft\", \"id\" : \"12\" } # # Software is free software released under", "self.weakness_tag = True self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname", "__init__(self): self.cwe = [] self.description_summary_tag = False self.weakness_tag = False def startElement(self, name,", "elif name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = True self.description_summary = \"\" def", "Exception as e: print(e) sys.exit(\"Cannot open url %s. Bad URL or not connected", "lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not None: if", "if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update database info after successful program-run", "attrs.get('Status') self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs}) elif name == 'Description_Summary'", "self.weakness_tag = False def startElement(self, name, attrs): if name == 'Weakness': self.weakness_tag =", "== 'Weakness': self.weakness_tag = True self.statement = \"\" self.weaknessabs = attrs.get('Weakness_Abstraction') self.name =", "(f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit(\"Cannot open url %s.", "\"Modified BSD license\" # # Copyright (c) 2013-2014 <NAME> - <EMAIL> # Copyright", "#!/usr/bin/env python3 # # Import script of NIST CWE Common Weakness Enumeration. #", "%s. Bad URL or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True)", "+= ch.replace(\" \", \"\") def endElement(self, name): if name == 'Description_Summary' and self.weakness_tag:", "Missing Custom Error Page\", # \"description_summary\" : \"An ASP .NET application must enable", "# Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser", "[] self.description_summary_tag = False self.weakness_tag = False def startElement(self, name, attrs): if name", "date try: (f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit(\"Cannot open", "store in database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if", "or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe')", "import Weakness description. # # The format is the following: # # {", "from lib.ProgressBar import progressbar from lib.Config import Configuration import lib.DatabaseLayer as db argparser", "attackers from mining information from the # framework's built-in responses.An ASP .NET application", "and self.weakness_tag: self.description_summary_tag = True self.description_summary = \"\" def characters(self, ch): if self.description_summary_tag:", "= \"\" def characters(self, ch): if self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\") def", "2013-2014 <NAME> - <EMAIL> # Copyright (c) 2015-2016 <NAME> - <EMAIL> # Imports", "sys.exit(\"Cannot open url %s. Bad URL or not connected to the internet?\"%(Configuration.getFeedURL(\"cwe\"))) lastmodified", "if self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\") def endElement(self, name): if name ==", "order to prevent attackers from mining information from the # framework's built-in responses.\",", "self.cwe = [] self.description_summary_tag = False self.weakness_tag = False def startElement(self, name, attrs):", "\"name\" : \"ASP.NET Misconfiguration: Missing Custom Error Page\", # \"description_summary\" : \"An ASP", "= [] self.description_summary_tag = False self.weakness_tag = False def startElement(self, name, attrs): if", "= False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace(\"\\n\", \"\") elif name", "# # Software is free software released under the \"Modified BSD license\" #", "self.weaknessabs = attrs.get('Weakness_Abstraction') self.name = attrs.get('Name') self.idname = attrs.get('ID') self.status = attrs.get('Status') self.cwe.append({'name':", "NIST CWE Common Weakness Enumeration database') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args()", "in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace(\"\\t\\t\\t\\t\\t\", \" \") if args.v: print (cwe) cweList.append(cwe) db.bulkUpdate('cwe', cweList) #update", "import ContentHandler import argparse import zipfile import tempfile from lib.ProgressBar import progressbar from", "import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, \"..\")) from dateutil.parser import parse", "2015-2016 <NAME> - <EMAIL> # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__))", "Copyright (c) 2013-2014 <NAME> - <EMAIL> # Copyright (c) 2015-2016 <NAME> - <EMAIL>", "def characters(self, ch): if self.description_summary_tag: self.description_summary += ch.replace(\" \", \"\") def endElement(self, name):" ]
[ "shape of the returned tensor, but the last element of that shape. \"\"\"", "uses to represent each offset. This is `not` the shape of the returned", "to represent each offset. This is `not` the shape of the returned tensor,", "This is `not` the shape of the returned tensor, but the last element", "int: \"\"\" Returns the final output dimension that this ``OffsetEmbedder`` uses to represent", "the shape of the returned tensor, but the last element of that shape.", "import torch from allennlp.common import Registrable class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation =", "Registrable class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation = \"relative\" def get_output_dim(self) -> int:", "tensor, but the last element of that shape. \"\"\" raise NotImplementedError def is_additive(self)", "<reponame>DFKI-NLP/RelEx import torch from allennlp.common import Registrable class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation", "output dimension that this ``OffsetEmbedder`` uses to represent each offset. This is `not`", "\"\"\" default_implementation = \"relative\" def get_output_dim(self) -> int: \"\"\" Returns the final output", "each offset. This is `not` the shape of the returned tensor, but the", "\"relative\" def get_output_dim(self) -> int: \"\"\" Returns the final output dimension that this", "OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation = \"relative\" def get_output_dim(self) -> int: \"\"\" Returns", "from allennlp.common import Registrable class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation = \"relative\" def", "dimension that this ``OffsetEmbedder`` uses to represent each offset. This is `not` the", "offset. This is `not` the shape of the returned tensor, but the last", "is `not` the shape of the returned tensor, but the last element of", "\"\"\" \"\"\" default_implementation = \"relative\" def get_output_dim(self) -> int: \"\"\" Returns the final", "torch from allennlp.common import Registrable class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation = \"relative\"", "that this ``OffsetEmbedder`` uses to represent each offset. This is `not` the shape", "-> int: \"\"\" Returns the final output dimension that this ``OffsetEmbedder`` uses to", "this ``OffsetEmbedder`` uses to represent each offset. This is `not` the shape of", "get_output_dim(self) -> int: \"\"\" Returns the final output dimension that this ``OffsetEmbedder`` uses", "the last element of that shape. \"\"\" raise NotImplementedError def is_additive(self) -> bool:", "returned tensor, but the last element of that shape. \"\"\" raise NotImplementedError def", "import Registrable class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation = \"relative\" def get_output_dim(self) ->", "final output dimension that this ``OffsetEmbedder`` uses to represent each offset. This is", "def get_output_dim(self) -> int: \"\"\" Returns the final output dimension that this ``OffsetEmbedder``", "\"\"\" Returns the final output dimension that this ``OffsetEmbedder`` uses to represent each", "of the returned tensor, but the last element of that shape. \"\"\" raise", "but the last element of that shape. \"\"\" raise NotImplementedError def is_additive(self) ->", "element of that shape. \"\"\" raise NotImplementedError def is_additive(self) -> bool: raise NotImplementedError", "represent each offset. This is `not` the shape of the returned tensor, but", "Registrable): \"\"\" \"\"\" default_implementation = \"relative\" def get_output_dim(self) -> int: \"\"\" Returns the", "= \"relative\" def get_output_dim(self) -> int: \"\"\" Returns the final output dimension that", "Returns the final output dimension that this ``OffsetEmbedder`` uses to represent each offset.", "default_implementation = \"relative\" def get_output_dim(self) -> int: \"\"\" Returns the final output dimension", "``OffsetEmbedder`` uses to represent each offset. This is `not` the shape of the", "the final output dimension that this ``OffsetEmbedder`` uses to represent each offset. This", "allennlp.common import Registrable class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation = \"relative\" def get_output_dim(self)", "`not` the shape of the returned tensor, but the last element of that", "class OffsetEmbedder(torch.nn.Module, Registrable): \"\"\" \"\"\" default_implementation = \"relative\" def get_output_dim(self) -> int: \"\"\"", "last element of that shape. \"\"\" raise NotImplementedError def is_additive(self) -> bool: raise", "the returned tensor, but the last element of that shape. \"\"\" raise NotImplementedError" ]
[ "django.contrib import admin # Register your models here. from history.models import HistoryModelFile admin.site.register(HistoryModelFile)", "from django.contrib import admin # Register your models here. from history.models import HistoryModelFile", "<gh_stars>0 from django.contrib import admin # Register your models here. from history.models import" ]
[ "median = sorted(random_list, reverse=True)[n // 2] end = time.time() print(median) print(end - start)", "random_list = random.sample(range(n * 10), n) start = time.time() median = sorted(random_list, reverse=True)[n", "* 10), n) start = time.time() median = sorted(random_list, reverse=True)[n // 2] end", "time n = 10000000 random_list = random.sample(range(n * 10), n) start = time.time()", "n = 10000000 random_list = random.sample(range(n * 10), n) start = time.time() median", "random.sample(range(n * 10), n) start = time.time() median = sorted(random_list, reverse=True)[n // 2]", "10), n) start = time.time() median = sorted(random_list, reverse=True)[n // 2] end =", "= time.time() median = sorted(random_list, reverse=True)[n // 2] end = time.time() print(median) print(end", "random import time n = 10000000 random_list = random.sample(range(n * 10), n) start", "n) start = time.time() median = sorted(random_list, reverse=True)[n // 2] end = time.time()", "time.time() median = sorted(random_list, reverse=True)[n // 2] end = time.time() print(median) print(end -", "import time n = 10000000 random_list = random.sample(range(n * 10), n) start =", "10000000 random_list = random.sample(range(n * 10), n) start = time.time() median = sorted(random_list,", "import random import time n = 10000000 random_list = random.sample(range(n * 10), n)", "= 10000000 random_list = random.sample(range(n * 10), n) start = time.time() median =", "= random.sample(range(n * 10), n) start = time.time() median = sorted(random_list, reverse=True)[n //", "start = time.time() median = sorted(random_list, reverse=True)[n // 2] end = time.time() print(median)" ]
[]
[ "more-company']/@href\").get() # next_page now has the form of '/companies?page=2' or None if next_page", "response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/')", "file. \"\"\" name = \"companies\" start_urls = [ 'https://itviec.com/companies', ] def parse(self, response):", "# next_page now has the form of '/companies?page=2' or None if next_page is", "spider wil crawl all the company link available in itviec and save it", "\"\"\" name = \"companies\" start_urls = [ 'https://itviec.com/companies', ] def parse(self, response): all_companies", "absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has the form of", "] def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link", "the form of '/companies?page=2' or None if next_page is not None: # makes", "next_page is not None: # makes absolute url next_page = response.urljoin(next_page) yield scrapy.Request(next_page,", "all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name", "next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has the form of '/companies?page=2' or", "if next_page is not None: # makes absolute url next_page = response.urljoin(next_page) yield", "= '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield {'company_name': company_name,", "company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link } next_page =", "CompaniesSpider(scrapy.Spider): \"\"\"This spider wil crawl all the company link available in itviec and", "wil crawl all the company link available in itviec and save it to", "'/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield {'company_name': company_name, 'url':", "[-2] absolute_link = response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link", "for company_link in all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2] absolute_link", "parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link = '/'.join(company_link.split('/')", "response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has the form of '/companies?page=2' or None if", "all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield", "now has the form of '/companies?page=2' or None if next_page is not None:", "'/companies?page=2' or None if next_page is not None: # makes absolute url next_page", "\"\"\"This spider wil crawl all the company link available in itviec and save", "company link available in itviec and save it to a json line file.", "= response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name =", "the company link available in itviec and save it to a json line", "next_page now has the form of '/companies?page=2' or None if next_page is not", "company_name, 'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has the", "= company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link } next_page", "import scrapy class CompaniesSpider(scrapy.Spider): \"\"\"This spider wil crawl all the company link available", "is not None: # makes absolute url next_page = response.urljoin(next_page) yield scrapy.Request(next_page, callback", "absolute_link = response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get()", "to a json line file. \"\"\" name = \"companies\" start_urls = [ 'https://itviec.com/companies',", "itviec and save it to a json line file. \"\"\" name = \"companies\"", "'https://itviec.com/companies', ] def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies:", "of '/companies?page=2' or None if next_page is not None: # makes absolute url", "available in itviec and save it to a json line file. \"\"\" name", "all the company link available in itviec and save it to a json", "= [ 'https://itviec.com/companies', ] def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link", "line file. \"\"\" name = \"companies\" start_urls = [ 'https://itviec.com/companies', ] def parse(self,", "save it to a json line file. \"\"\" name = \"companies\" start_urls =", "link available in itviec and save it to a json line file. \"\"\"", "in itviec and save it to a json line file. \"\"\" name =", "def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link =", "= response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() #", "name = \"companies\" start_urls = [ 'https://itviec.com/companies', ] def parse(self, response): all_companies =", "None if next_page is not None: # makes absolute url next_page = response.urljoin(next_page)", "and save it to a json line file. \"\"\" name = \"companies\" start_urls", "json line file. \"\"\" name = \"companies\" start_urls = [ 'https://itviec.com/companies', ] def", "} next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has the form of '/companies?page=2'", "yield {'company_name': company_name, 'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now", "not None: # makes absolute url next_page = response.urljoin(next_page) yield scrapy.Request(next_page, callback =", "crawl all the company link available in itviec and save it to a", "[ 'https://itviec.com/companies', ] def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in", "start_urls = [ 'https://itviec.com/companies', ] def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for", "= response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has the form of '/companies?page=2' or None", "response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page", "None: # makes absolute url next_page = response.urljoin(next_page) yield scrapy.Request(next_page, callback = self.parse)", "companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2]", "relative_link = '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield {'company_name':", "or None if next_page is not None: # makes absolute url next_page =", "scrapy class CompaniesSpider(scrapy.Spider): \"\"\"This spider wil crawl all the company link available in", "company_name = company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link }", "a json line file. \"\"\" name = \"companies\" start_urls = [ 'https://itviec.com/companies', ]", "{'company_name': company_name, 'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has", "\"companies\" start_urls = [ 'https://itviec.com/companies', ] def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall()", "company_link in all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2] absolute_link =", "'url': absolute_link } next_page = response.xpath(\"//a[@class='more-jobs-link more-company']/@href\").get() # next_page now has the form", "= \"companies\" start_urls = [ 'https://itviec.com/companies', ] def parse(self, response): all_companies = response.xpath(\"//div[@class='first-group", "it to a json line file. \"\"\" name = \"companies\" start_urls = [", "class CompaniesSpider(scrapy.Spider): \"\"\"This spider wil crawl all the company link available in itviec", "in all_companies: relative_link = '/'.join(company_link.split('/') [:-1]) company_name = company_link.split('/') [-2] absolute_link = response.urljoin(relative_link)", "[:-1]) company_name = company_link.split('/') [-2] absolute_link = response.urljoin(relative_link) yield {'company_name': company_name, 'url': absolute_link", "response): all_companies = response.xpath(\"//div[@class='first-group companies']/a[@class='featured-company']/@href\").getall() for company_link in all_companies: relative_link = '/'.join(company_link.split('/') [:-1])", "has the form of '/companies?page=2' or None if next_page is not None: #", "form of '/companies?page=2' or None if next_page is not None: # makes absolute" ]
[ "in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2]", ") comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation files required if material zone technique", "of the grain material, and the atrributes are the parameter values\" \"iterate across", "in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60:", "range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\")", "mat_create(orien,const, diameter,statev): #rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to", "fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all", "tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n')", "diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220", "#writing diameters to file printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 +", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\")", "i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68:", "i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail =", "i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail =", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "#scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct", "i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close()", "Materials = Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5',", "for each constant2\" for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain,", "comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation files required if material zone technique is", "for i in range(0,statev): child_grain_tail = SubElement(child_grain, 'IntVar',Index=str(i+1), Type='Constant',Value='0.') tree = ElementTree(Materials) tree.write(\"fatemptzone2.xml\")", "open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt',", "\\tau #diameter currnetly in microns, convert to mm #need to add 17.9 and", "\"\"\" Created on Tue Nov 24 15:19:55 2020 @author: mi19356 \"\"\" import numpy", "#check to make sure the there are no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "#iterate across the required number of state vairables needed for i in range(0,statev):", "of state vairables needed for i in range(0,statev): child_grain_tail = SubElement(child_grain, 'IntVar',Index=str(i+1), Type='Constant',Value='0.')", "orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to make sure the", "the there are no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials = Element('Materials') comment", "fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include grain dependent info", "create subelelements for each constant2\" for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail", "in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien)", "different material constants to create subelelements for each constant2\" for i in range(0,(len(const))):", "orientations #update the value for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1]", "fsamp23 = open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w') orien3", "material constants to create subelelements for each constant2\" for i in range(0,(len(const))): if", "orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close()", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2, total_rot, orien \"\"\" create material file", "elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif", "if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata)", "are the parameter values\" \"iterate across the different material constants to create subelelements", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required number of state vairables needed", "excel const file. diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters') #writing orientations to file", "#orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk", "rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i]))", "Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69:", "child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the parameters required for the material\"", "to include grain dependent info #grain orientations #update the value for tau0 elif", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\")", "File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2]", "for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien)", "as np import os import pandas as pd from xml.etree.ElementTree import Element, SubElement,", "i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail =", "i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67:", "state vairables needed for i in range(0,statev): child_grain_tail = SubElement(child_grain, 'IntVar',Index=str(i+1), Type='Constant',Value='0.') tree", "fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the parameters", "File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2])", "fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include grain dependent info #grain orientations #update the", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required", "elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail", "from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree import random import math from scrape", "include grain dependent info #grain orientations #update the value for tau0 elif i==98:", "required number of state vairables needed for i in range(0,statev): child_grain_tail = SubElement(child_grain,", "i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const", "Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68:", "'w') fsamp21 = open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w')", "File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1]", "i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61:", "are no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials = Element('Materials') comment = Comment('REFERENCE", "'w') for numMat in range(1,len(orien)+1): for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98:", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\")", "technique is used in AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w')", "const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain,", "tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n')", "fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "#taud[checkgreater]=340.0 Materials = Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0=", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif", "comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment", "array to include grain dependent info #grain orientations #update the value for tau0", "const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain,", "across the required number of state vairables needed for i in range(0,statev): child_grain_tail", "variable parameter for \\tau #diameter currnetly in microns, convert to mm #need to", "elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include", "'w') tau01 = open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w') for numMat in range(1,len(orien)+1):", "'w') orien1 = open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w')", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required number of state vairables", "const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain,", "orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\" def", "File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0]", "orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to make sure the there are no", "sure the there are no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials = Element('Materials')", "orien2 = open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w') tau02", "material zone technique is used in AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2 =", "all the parameters required for the material\" \"Coeff is the element of the", "@author: mi19356 \"\"\" import numpy as np import os import pandas as pd", "in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2, total_rot, orien \"\"\"", "i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close()", "else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required number of state", "if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail", "#diameter currnetly in microns, convert to mm #need to add 17.9 and 10", "stores all the parameters required for the material\" \"Coeff is the element of", "subelelements for each constant2\" for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail =", "orien=rotation_info(orien,grainids) #use the diameter to create a variable parameter for \\tau #diameter currnetly", "return vec1, vec2, samp1, samp2, total_rot, orien \"\"\" create material file for AMITEX", "import Element, SubElement, Comment, ElementTree import random import math from scrape import vtk_scrap", "import math from scrape import vtk_scrap from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape", "dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\"", "const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain,", "convert to mm #need to add 17.9 and 10 to excel const file.", "'w') fsamp3 = open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w')", "numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the parameters required for the material\" \"Coeff is", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif", "Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL", "orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in", "each constant2\" for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\")", "in range(1,len(orien)+1): for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1]", "import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien)", "is used in AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w') fsamp3", "\"\"\" Create orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "#Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check to see if", "#check to see if there are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i", "orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i", "rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check to see", "info #grain orientations #update the value for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif", "i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien)", "range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1]", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores", "#update the value for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "as pd from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree import random import math", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif", "dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata,", "17.9 and 10 to excel const file. diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters')", "i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170:", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail", "const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include grain dependent", "vec1, vec2, samp1, samp2, total_rot, orien \"\"\" create material file for AMITEX \"\"\"", "vec2, samp1, samp2, total_rot, orien \"\"\" create material file for AMITEX \"\"\" def", "Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1') Materials.append(comment)", "scrape import vtk_scrap from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0", "const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain,", "elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail", "'w') orien2 = open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w')", "numMat in range(1,len(orien)+1): for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60:", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "(17.9/((diameter)**0.5)) #check to make sure the there are no #checkgreater=np.where(taud>350)[0] #replace these values", "'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation files required if", "fsamp21 = open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w') orien1", "'w') tau02 = open('tau2.txt', 'w') for numMat in range(1,len(orien)+1): for i in range(0,(len(const))):", "i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail =", "fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS')", "orien3 = open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w') for", "Nov 24 15:19:55 2020 @author: mi19356 \"\"\" import numpy as np import os", "open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w') orien1 = open('orien1.txt',", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "file for AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot,", "= open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w') orien1 =", "in microns, convert to mm #need to add 17.9 and 10 to excel", "coding: utf-8 -*- \"\"\" Created on Tue Nov 24 15:19:55 2020 @author: mi19356", "if there are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)])", "\"orientation files required if material zone technique is used in AMITEX\" fsamp1 =", "const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2]", "matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1)", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif", "#checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials = Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment)", "import pandas as pd from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree import random", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\")", "File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1]", "MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1')", "tau01 = open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w') for numMat in range(1,len(orien)+1): for", "#grain orientations #update the value for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114:", "data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else:", "tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n')", "Comment, ElementTree import random import math from scrape import vtk_scrap from dataconversions import", "fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the", "value for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168:", "reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream)", "totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]])", "orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create", "for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0])", "= open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w') for numMat", "ElementTree import random import math from scrape import vtk_scrap from dataconversions import data_reconstruct,", "Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67:", "File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0])", "samp2, total_rot, orien \"\"\" create material file for AMITEX \"\"\" def mat_create(orien,const, diameter,statev):", "range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2, total_rot, orien \"\"\" create", "parameter for \\tau #diameter currnetly in microns, convert to mm #need to add", "import numpy as np import os import pandas as pd from xml.etree.ElementTree import", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2, total_rot, orien \"\"\" create material", "'2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation files required if material", "const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain,", "Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' )", "fsamp2 = open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w') fsamp22", "from scrape import vtk_scrap from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata')", "for AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids)", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\")", "to mm #need to add 17.9 and 10 to excel const file. diameter=(2*diameter)/1000", "np import os import pandas as pd from xml.etree.ElementTree import Element, SubElement, Comment,", "printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to make sure the there are no #checkgreater=np.where(taud>350)[0]", "total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2,", "to see if there are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif", "= open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w') for numMat in range(1,len(orien)+1): for i", "fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "mi19356 \"\"\" import numpy as np import os import pandas as pd from", "data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a", "'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required number of state vairables needed for i", "const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2])", "total_rot, orien \"\"\" create material file for AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating", "open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w') tau01 = open('tau1.txt',", "open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w') orien3 = open('orien3.txt',", "AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w')", "diameter,statev): #rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to create", "const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0]))", "i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif", "Materials.append(comment) \"orientation files required if material zone technique is used in AMITEX\" fsamp1", "const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain,", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else:", "and the atrributes are the parameter values\" \"iterate across the different material constants", "Tue Nov 24 15:19:55 2020 @author: mi19356 \"\"\" import numpy as np import", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\")", "SubElement, Comment, ElementTree import random import math from scrape import vtk_scrap from dataconversions", "across the different material constants to create subelelements for each constant2\" for i", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\")", "elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif", "i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include grain dependent info #grain orientations", "1') Materials.append(comment) \"orientation files required if material zone technique is used in AMITEX\"", "pandas as pd from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree import random import", "file printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to", "the atrributes are the parameter values\" \"iterate across the different material constants to", "i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials,", "grain material, and the atrributes are the parameter values\" \"iterate across the different", "diameters to file printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5))", "\"iterate across the different material constants to create subelelements for each constant2\" for", "the grain material, and the atrributes are the parameter values\" \"iterate across the", "Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required number", "#need to add 17.9 and 10 to excel const file. diameter=(2*diameter)/1000 #writing diameters", "pd from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree import random import math from", "the different material constants to create subelelements for each constant2\" for i in", "there are no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials = Element('Materials') comment =", "no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials = Element('Materials') comment = Comment('REFERENCE MATERIAL')", "material file for AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors using grain orientations", "fsamp3 = open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w') fsamp23", "Law='UMATBCCGDGS') \"This stores all the parameters required for the material\" \"Coeff is the", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "vtk_scrap from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1:", "a variable parameter for \\tau #diameter currnetly in microns, convert to mm #need", "i in range(0,statev): child_grain_tail = SubElement(child_grain, 'IntVar',Index=str(i+1), Type='Constant',Value='0.') tree = ElementTree(Materials) tree.write(\"fatemptzone2.xml\") mat_create(orien,const,diameter,900)", "elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close()", "#replace these values #taud[checkgreater]=340.0 Materials = Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child", "= SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation files", "#construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining", "see if there are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)):", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include grain dependent info #grain orientations #update", "i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include grain", "import random import math from scrape import vtk_scrap from dataconversions import data_reconstruct, reso_change,", "const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\")", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check to see if there are missing orientations", "if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]])", "Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation files required if material zone", "+ (17.9/((diameter)**0.5)) #check to make sure the there are no #checkgreater=np.where(taud>350)[0] #replace these", "grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio", "needed for i in range(0,statev): child_grain_tail = SubElement(child_grain, 'IntVar',Index=str(i+1), Type='Constant',Value='0.') tree = ElementTree(Materials)", "const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain,", "\"\"\" def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check", "i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail =", "samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2, total_rot, orien \"\"\" create material file for", "samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1,", "constant2\" for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix", "vairables needed for i in range(0,statev): child_grain_tail = SubElement(child_grain, 'IntVar',Index=str(i+1), Type='Constant',Value='0.') tree =", "fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This", "parameter values\" \"iterate across the different material constants to create subelelements for each", "orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close()", "len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]])", "on Tue Nov 24 15:19:55 2020 @author: mi19356 \"\"\" import numpy as np", "open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt',", "\"\"\" create material file for AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors using", "'w') fsamp2 = open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w')", "there are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3)", "15:19:55 2020 @author: mi19356 \"\"\" import numpy as np import os import pandas", "fsamp1 = open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w') fsamp21", "open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt',", "xml.etree.ElementTree import Element, SubElement, Comment, ElementTree import random import math from scrape import", "for \\tau #diameter currnetly in microns, convert to mm #need to add 17.9", "elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close()", "child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation", "= open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w') orien2 =", "const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1])", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif", "to excel const file. diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters') #writing orientations to", "the value for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif", "dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file", "material\" \"Coeff is the element of the grain material, and the atrributes are", "number of state vairables needed for i in range(0,statev): child_grain_tail = SubElement(child_grain, 'IntVar',Index=str(i+1),", "open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w') tau02 = open('tau2.txt',", "= open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w') fsamp22 =", "\"Coeff is the element of the grain material, and the atrributes are the", "'w') orien3 = open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w')", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif", "to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to make sure the there", "data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream) grainids=data_reconstruct(vtkdata, vtkdataPoints,1,orien) else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien)", "material, and the atrributes are the parameter values\" \"iterate across the different material", "constants to create subelelements for each constant2\" for i in range(0,(len(const))): if i==59:", "a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining local", "elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to include grain dependent info #grain", "create material file for AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors using grain", "are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct", "make sure the there are no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials =", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\")", "microns, convert to mm #need to add 17.9 and 10 to excel const", "const array to include grain dependent info #grain orientations #update the value for", "= Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5'", "atrributes are the parameter values\" \"iterate across the different material constants to create", "to make sure the there are no #checkgreater=np.where(taud>350)[0] #replace these values #taud[checkgreater]=340.0 Materials", "const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close()", "orientations orien=orien[1:,1:] #check to see if there are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien)", "orien=orien[1:,1:] #check to see if there are missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for", "const file. diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien)", "elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail", "files required if material zone technique is used in AMITEX\" fsamp1 = open('fsam1.txt',", "add 17.9 and 10 to excel const file. diameter=(2*diameter)/1000 #writing diameters to file", "orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the parameters required for", "= open('fsam3.txt', 'w') fsamp21 = open('fsam21.txt', 'w') fsamp22 = open('fsam22.txt', 'w') fsamp23 =", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across", "File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1])", "File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1]", "= open('tau2.txt', 'w') for numMat in range(1,len(orien)+1): for i in range(0,(len(const))): if i==59:", "#writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to make sure", "open('tau2.txt', 'w') for numMat in range(1,len(orien)+1): for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0]", "the orientations orien=orien[1:,1:] #check to see if there are missing orientations if len(orien)<len(grainids):", "AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use", "#modify the orientations orien=orien[1:,1:] #check to see if there are missing orientations if", "for the material\" \"Coeff is the element of the grain material, and the", "using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to create a variable parameter", "matrix \"\"\" def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:]", "elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif", "the parameters required for the material\" \"Coeff is the element of the grain", "fsamp22 = open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w') orien2", "orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the parameters required", "elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail", "= Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment =", "if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif", "orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations", "else: orien,vtkdata,const=vtk_scrap('vtkupdate','graindata',dream) grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\"", "the diameter to create a variable parameter for \\tau #diameter currnetly in microns,", "the parameter values\" \"iterate across the different material constants to create subelelements for", "vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining local variables", "Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114:", "Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170:", "elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close()", "file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to make sure the there are", "grain dependent info #grain orientations #update the value for tau0 elif i==98: const[i,0]=taud[numMat-1]", "\"This stores all the parameters required for the material\" \"Coeff is the element", "#use the diameter to create a variable parameter for \\tau #diameter currnetly in", "= Comment('MATERIAL 1') Materials.append(comment) \"orientation files required if material zone technique is used", "and 10 to excel const file. diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters') #writing", "elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2] child_grain_tail", "open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w') orien2 = open('orien2.txt',", "in AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt',", "range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for", "required if material zone technique is used in AMITEX\" fsamp1 = open('fsam1.txt', 'w')", "to add 17.9 and 10 to excel const file. diameter=(2*diameter)/1000 #writing diameters to", "= open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w') tau02 =", "to create subelelements for each constant2\" for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0]", "orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to create a variable parameter for \\tau", "the required number of state vairables needed for i in range(0,statev): child_grain_tail =", "i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail =", "# -*- coding: utf-8 -*- \"\"\" Created on Tue Nov 24 15:19:55 2020", "diameter to create a variable parameter for \\tau #diameter currnetly in microns, convert", "i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail =", "samp1, samp2, total_rot, orien \"\"\" create material file for AMITEX \"\"\" def mat_create(orien,const,", "Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail", "import vtk_scrap from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if", "elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail", "fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust const array to", "Element, SubElement, Comment, ElementTree import random import math from scrape import vtk_scrap from", "missing orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation", "'w') fsamp22 = open('fsam22.txt', 'w') fsamp23 = open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w')", "mm #need to add 17.9 and 10 to excel const file. diameter=(2*diameter)/1000 #writing", "elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail", "= open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w') fsamp3 = open('fsam3.txt', 'w') fsamp21 =", "currnetly in microns, convert to mm #need to add 17.9 and 10 to", "numpy as np import os import pandas as pd from xml.etree.ElementTree import Element,", "xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif", "24 15:19:55 2020 @author: mi19356 \"\"\" import numpy as np import os import", "#contruct rotation matrix zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)):", "i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail =", "grainids,diameter=data_reconstruct_dream(vtkdata,orien) #construct a vtk file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\" def rotation_info(orien,grainids):", "create a variable parameter for \\tau #diameter currnetly in microns, convert to mm", "orien1 = open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w') tau01", "#adjust const array to include grain dependent info #grain orientations #update the value", "Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168: const[i,0]=(orien[numMat-1,0]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169:", "dependent info #grain orientations #update the value for tau0 elif i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "orientations if len(orien)<len(grainids): totaldif=len(grainids)-len(orien) for i in range(0,int(totaldif)): orien=np.append(orien,[random.uniform(0,2*math.pi),random.uniform(0,2*math.pi),random.uniform(0,2*math.pi)]) orien=orien.reshape(int(len(orien)/3),3) #contruct rotation matrix", "values\" \"iterate across the different material constants to create subelelements for each constant2\"", "fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "tau02 = open('tau2.txt', 'w') for numMat in range(1,len(orien)+1): for i in range(0,(len(const))): if", "const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1]", "elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail", "const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material',", "taud=220 + (17.9/((diameter)**0.5)) #check to make sure the there are no #checkgreater=np.where(taud>350)[0] #replace", "File=\"MAT/Coeff/fsam21.txt\") elif i==68: const[i,0]=samp2[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam22.txt\") elif i==69: const[i,0]=samp2[numMat-1][2]", "-*- \"\"\" Created on Tue Nov 24 15:19:55 2020 @author: mi19356 \"\"\" import", "vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to create a variable parameter for \\tau #diameter", "printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check to make", "variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check to see if there are", "Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau2.txt\") elif i==168:", "SubElement(Materials, 'Reference_Material',Lambda0= '2.0431e+5', Mu0='0.8756e+5' ) comment = Comment('MATERIAL 1') Materials.append(comment) \"orientation files required", "i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail =", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "values #taud[checkgreater]=340.0 Materials = Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child = SubElement(Materials,", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif i==114: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required number of state vairables needed for", "File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant',Value=str(const[i,0])) #iterate across the required number of", "for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif", "i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69:", "these values #taud[checkgreater]=340.0 Materials = Element('Materials') comment = Comment('REFERENCE MATERIAL') Materials.append(comment) child =", "for numMat in range(1,len(orien)+1): for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif", "\"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the", "= SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone',", "is the element of the grain material, and the atrributes are the parameter", "from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data #orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00189000','graindata') dream=0 if dream==1: orien,vtkdata,vtkdataPoints,const=vtk_scrap('PF_00130000','graindata',dream)", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61: const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif", "orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the parameters required for the", "if material zone technique is used in AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2", "for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2, total_rot,", "'w') fsamp23 = open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w')", "Created on Tue Nov 24 15:19:55 2020 @author: mi19356 \"\"\" import numpy as", "const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==61: const[i,0]=samp1[numMat-1][2] fsamp3.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==67: const[i,0]=samp2[numMat-1][0]", "Type='Constant',Value=str(const[i,0])) #iterate across the required number of state vairables needed for i in", "open('orien3.txt', 'w') tau01 = open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w') for numMat in", "i==98: const[i,0]=taud[numMat-1] tau01.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==114: const[i,0]=taud[numMat-1] tau02.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==168: const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169:", "the element of the grain material, and the atrributes are the parameter values\"", "10 to excel const file. diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters') #writing orientations", "-*- coding: utf-8 -*- \"\"\" Created on Tue Nov 24 15:19:55 2020 @author:", "Type='Constant_Zone', File=\"MAT/Coeff/fsam1.txt\") elif i==60: const[i,0]=samp1[numMat-1][1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam2.txt\") elif i==61:", "file #vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1]", "import os import pandas as pd from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree", "const[i,0]=samp1[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam3.txt\") elif i==67: const[i,0]=samp2[numMat-1][0] child_grain_tail = SubElement(child_grain,", "local variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check to see if there", "= open('fsam23.txt', 'w') orien1 = open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w') orien3 =", "file. diameter=(2*diameter)/1000 #writing diameters to file printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations')", "zrot=np.array([[np.cos((orien[:,0])),np.sin((orien[:,0])),np.zeros(len(orien))],[-np.sin((orien[:,0])),np.cos((orien[:,0])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) xrot=np.array([[np.ones(len(orien)),np.zeros(len(orien)),np.zeros(len(orien))],[np.zeros(len(orien)),np.cos((orien[:,1])),np.sin((orien[:,1]))],[np.zeros(len(orien)),-np.sin((orien[:,1])),np.cos((orien[:,1]))]]) zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2)", "open('tau1.txt', 'w') tau02 = open('tau2.txt', 'w') for numMat in range(1,len(orien)+1): for i in", "child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien1.txt\") elif i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1),", "parameters required for the material\" \"Coeff is the element of the grain material,", "elif i==67: const[i,0]=samp2[numMat-1][0] fsamp21.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==68: const[i,0]=samp2[numMat-1][1] fsamp22.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==69: const[i,0]=samp2[numMat-1][2] fsamp23.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') #adjust", "required for the material\" \"Coeff is the element of the grain material, and", "#rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to create a", "Comment('MATERIAL 1') Materials.append(comment) \"orientation files required if material zone technique is used in", "grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to create a variable parameter for", "to file printtofiletext(diameter,'diameters') #writing orientations to file orienprint=list(orien) printtofiletext(orienprint,'orientations') taud=220 + (17.9/((diameter)**0.5)) #check", "Create orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify the", "'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/tau1.txt\") elif", "zone technique is used in AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt',", "vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter to create a variable", "used in AMITEX\" fsamp1 = open('fsam1.txt', 'w') fsamp2 = open('fsam2.txt', 'w') fsamp3 =", "2020 @author: mi19356 \"\"\" import numpy as np import os import pandas as", "def mat_create(orien,const, diameter,statev): #rotating vectors using grain orientations vec1,vec2,samp1,samp2,total_rot, orien=rotation_info(orien,grainids) #use the diameter", "orien \"\"\" create material file for AMITEX \"\"\" def mat_create(orien,const, diameter,statev): #rotating vectors", "orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close() fsamp21.close() fsamp22.close() fsamp23.close() orien1.close() orien2.close() orien3.close() child_grain=SubElement(Materials, 'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so',", "vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check to see if there are missing", "samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2,", "math from scrape import vtk_scrap from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext #scrape data", "i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1, vec2, samp1, samp2, total_rot, orien", "const[i,0]=(orien[numMat-1,0]) orien1.write(str(const[i,0])+'\\n') elif i==169: const[i,0]=(orien[numMat-1,1]) orien2.write(str(const[i,0])+'\\n') elif i==170: const[i,0]=(orien[numMat-1,2]) orien3.write(str(const[i,0])+'\\n') fsamp1.close() fsamp2.close() fsamp3.close()", "to create a variable parameter for \\tau #diameter currnetly in microns, convert to", "File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail =", "\"\"\" import numpy as np import os import pandas as pd from xml.etree.ElementTree", "def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0] #modify the orientations orien=orien[1:,1:] #check to", "element of the grain material, and the atrributes are the parameter values\" \"iterate", "zrot2=np.array([[np.cos((orien[:,2])),np.sin((orien[:,2])),np.zeros(len(orien))],[-np.sin((orien[:,2])),np.cos((orien[:,2])),np.zeros(len(orien))],[np.zeros(len(orien)),np.zeros(len(orien)),np.ones(len(orien))]]) total_rot=[[]*len(orien)]*len(orien) samp1=[[]*len(orien)]*len(orien) samp2=[[]*len(orien)]*len(orien) for i in range(0,len(orien)): total_rot[i]=np.transpose(np.dot(np.dot(zrot2[:,:,i],xrot[:,:,i]),zrot[:,:,i])) samp1[i]=np.dot(total_rot[i],vec1) samp2[i]=np.dot(total_rot[i],vec2) return vec1,", "= open('orien1.txt', 'w') orien2 = open('orien2.txt', 'w') orien3 = open('orien3.txt', 'w') tau01 =", "#vtkdatareso=reso_change(vtkdata) \"\"\" Create orientatio matrix \"\"\" def rotation_info(orien,grainids): #Defining local variables vec1=[0,0,1] vec2=[0,1,0]", "range(1,len(orien)+1): for i in range(0,(len(const))): if i==59: const[i,0]=samp1[numMat-1][0] fsamp1.write(str(\"{:.16f}\".format(const[i,0]))+'\\n') elif i==60: const[i,0]=samp1[numMat-1][1] fsamp2.write(str(\"{:.16f}\".format(const[i,0]))+'\\n')", "the material\" \"Coeff is the element of the grain material, and the atrributes", "'Material', numM=\"1\",Lib='/mnt/storage/home/mi19356/amitex_fftp-v8.17.1/Grainsize/UMAT/libUmatAmitex.so', Law='UMATBCCGDGS') \"This stores all the parameters required for the material\" \"Coeff", "random import math from scrape import vtk_scrap from dataconversions import data_reconstruct, reso_change, data_reconstruct_dream,sphericaldiam,printtofiletext", "i==169: const[i,0]=(orien[numMat-1,1]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien2.txt\") elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail =", "os import pandas as pd from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree import", "const[i,0]=samp2[numMat-1][2] child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/fsam23.txt\") elif i==98: const[i,0]=taud[numMat-1] child_grain_tail = SubElement(child_grain,", "elif i==170: const[i,0]=(orien[numMat-1,2]) child_grain_tail = SubElement(child_grain, 'Coeff',Index=str(i+1), Type='Constant_Zone', File=\"MAT/Coeff/orien3.txt\") else: child_grain_tail = SubElement(child_grain,", "utf-8 -*- \"\"\" Created on Tue Nov 24 15:19:55 2020 @author: mi19356 \"\"\"" ]
[ "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "SOFTWARE. \"\"\" from enum import Enum from .message import Message class NumberingSchemes(Enum): \"\"\"Numbering", "Raw payload that was resulted upon requesting phonebook entries Returns: PhoneBookEntry: A phonebook", "idx (int): Index of the entry in the phonebook number (str): The phonebook", "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "phonebook entry number scheme (NumberingSchemes): Numbering scheme used (National or International) contact_name (str):", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "Index of the entry in the phonebook number (str): The phonebook entry number", "this software and associated documentation files (the \"Software\"), to deal in the Software", "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "number: str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended to called directly Args:", "contact_name (str): The name the entry has been saved as \"\"\" self.index =", "entry \"\"\" def __init__(self, idx: int, number: str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor", "A stringified version of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod", "OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "the Software without restriction, including without limitation the rights to use, copy, modify,", "person obtaining a copy of this software and associated documentation files (the \"Software\"),", "the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "IN THE SOFTWARE. \"\"\" from enum import Enum from .message import Message class", "without restriction, including without limitation the rights to use, copy, modify, merge, publish,", "merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit", "the entry in the phonebook number (str): The phonebook entry number scheme (NumberingSchemes):", "= 0 INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def __init__(self,", "the entry has been saved as \"\"\" self.index = idx self.number = number", "called directly Args: idx (int): Index of the entry in the phonebook number", "= number self.scheme = scheme self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to", "\"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates a phonebook", "in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED", "of the object. \"\"\" return str(self) def __str__(self): \"\"\"`__str__` magicmethod to be used", "The phonebook entry number scheme (NumberingSchemes): Numbering scheme used (National or International) contact_name", "sublicense, and/or sell copies of the Software, and to permit persons to whom", "this permission notice shall be included in all copies or substantial portions of", "modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "\"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if params[2] == 129", "of the entry in the phonebook number (str): The phonebook entry number scheme", "def __str__(self): \"\"\"`__str__` magicmethod to be used in repr(). Returns: str: A stringified", "WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "schemes implemented in phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL = 1 class PhoneBookEntry:", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A", "notice and this permission notice shall be included in all copies or substantial", "Phonebook entry \"\"\" def __init__(self, idx: int, number: str, scheme: NumberingSchemes, contact_name: str):", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE", "charge, to any person obtaining a copy of this software and associated documentation", "def from_payload(payload: str): \"\"\"Creates a phonebook entry from a payload. Args: payload (str):", "KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "str): \"\"\"Creates a phonebook entry from a payload. Args: payload (str): Raw payload", "number self.scheme = scheme self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to be", "\"\"\"`__str__` magicmethod to be used in repr(). Returns: str: A stringified version of", "phonebook entry from a payload. Args: payload (str): Raw payload that was resulted", "the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates", "a payload. Args: payload (str): Raw payload that was resulted upon requesting phonebook", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION", "persons to whom the Software is furnished to do so, subject to the", "Software is furnished to do so, subject to the following conditions: The above", "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "upon requesting phonebook entries Returns: PhoneBookEntry: A phonebook entry \"\"\" params = Message.from_payload(payload).parameters", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND", "\"\"\"`__repr__` magicmethod to be used in repr(). Returns: str: Representation of the object.", "__repr__(self): \"\"\"`__repr__` magicmethod to be used in repr(). Returns: str: Representation of the", "Enum from .message import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks \"\"\"", "to deal in the Software without restriction, including without limitation the rights to", "import Enum from .message import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks", "to whom the Software is furnished to do so, subject to the following", "phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A Phonebook entry", "documentation files (the \"Software\"), to deal in the Software without restriction, including without", "has been saved as \"\"\" self.index = idx self.number = number self.scheme =", "NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL = 1", "from .message import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks \"\"\" NATIONAL", "files (the \"Software\"), to deal in the Software without restriction, including without limitation", "Software without restriction, including without limitation the rights to use, copy, modify, merge,", "\"\"\" NATIONAL = 0 INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\"", "\"\"\" from enum import Enum from .message import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes", "to do so, subject to the following conditions: The above copyright notice and", "in the Software without restriction, including without limitation the rights to use, copy,", "PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def __init__(self, idx: int, number: str, scheme: NumberingSchemes,", "that was resulted upon requesting phonebook entries Returns: PhoneBookEntry: A phonebook entry \"\"\"", "the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "to any person obtaining a copy of this software and associated documentation files", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from enum", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from", "str: A stringified version of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\"", "scheme used (National or International) contact_name (str): The name the entry has been", "NATIONAL = 0 INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def", "to called directly Args: idx (int): Index of the entry in the phonebook", "a copy of this software and associated documentation files (the \"Software\"), to deal", "Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if params[2] == 129 else", "f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates a phonebook entry from", "str(self) def __str__(self): \"\"\"`__str__` magicmethod to be used in repr(). Returns: str: A", "\"\"\"Creates a phonebook entry from a payload. Args: payload (str): Raw payload that", "enum import Enum from .message import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in", "be used in repr(). Returns: str: A stringified version of the object. \"\"\"", "free of charge, to any person obtaining a copy of this software and", "and this permission notice shall be included in all copies or substantial portions", "and to permit persons to whom the Software is furnished to do so,", "in repr(). Returns: str: Representation of the object. \"\"\" return str(self) def __str__(self):", "str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended to called directly Args: idx", "rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "str): \"\"\"Constructor not recommended to called directly Args: idx (int): Index of the", "number (str): The phonebook entry number scheme (NumberingSchemes): Numbering scheme used (National or", "EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "directly Args: idx (int): Index of the entry in the phonebook number (str):", "= idx self.number = number self.scheme = scheme self.contact_name = contact_name def __repr__(self):", "phonebook entry \"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if params[2]", "scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates a phonebook entry from a payload.", "Representation of the object. \"\"\" return str(self) def __str__(self): \"\"\"`__str__` magicmethod to be", "associated documentation files (the \"Software\"), to deal in the Software without restriction, including", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE", "used in repr(). Returns: str: A stringified version of the object. \"\"\" return", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,", "2021 <NAME> Permission is hereby granted, free of charge, to any person obtaining", "from a payload. Args: payload (str): Raw payload that was resulted upon requesting", "Returns: str: A stringified version of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme},", "notice shall be included in all copies or substantial portions of the Software.", "payload (str): Raw payload that was resulted upon requesting phonebook entries Returns: PhoneBookEntry:", "Numbering scheme used (National or International) contact_name (str): The name the entry has", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "copy of this software and associated documentation files (the \"Software\"), to deal in", "substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "Returns: PhoneBookEntry: A phonebook entry \"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1],", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION", "obtaining a copy of this software and associated documentation files (the \"Software\"), to", "INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def __init__(self, idx: int,", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", ".message import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks \"\"\" NATIONAL =", "implemented in phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A", "OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "self.index = idx self.number = number self.scheme = scheme self.contact_name = contact_name def", "scheme (NumberingSchemes): Numbering scheme used (National or International) contact_name (str): The name the", "used in repr(). Returns: str: Representation of the object. \"\"\" return str(self) def", "\"\"\"A Phonebook entry \"\"\" def __init__(self, idx: int, number: str, scheme: NumberingSchemes, contact_name:", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "idx: int, number: str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended to called", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from enum import", "number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates a phonebook entry from a", "OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons", "contact_name: str): \"\"\"Constructor not recommended to called directly Args: idx (int): Index of", "including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,", "Args: payload (str): Raw payload that was resulted upon requesting phonebook entries Returns:", "or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "(str): The phonebook entry number scheme (NumberingSchemes): Numbering scheme used (National or International)", "or International) contact_name (str): The name the entry has been saved as \"\"\"", "requesting phonebook entries Returns: PhoneBookEntry: A phonebook entry \"\"\" params = Message.from_payload(payload).parameters return", "The name the entry has been saved as \"\"\" self.index = idx self.number", "all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS", "SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "entries Returns: PhoneBookEntry: A phonebook entry \"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0],", "not recommended to called directly Args: idx (int): Index of the entry in", "number scheme (NumberingSchemes): Numbering scheme used (National or International) contact_name (str): The name", "OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from enum import Enum", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN", "be used in repr(). Returns: str: Representation of the object. \"\"\" return str(self)", "ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "to be used in repr(). Returns: str: Representation of the object. \"\"\" return", "(str): Raw payload that was resulted upon requesting phonebook entries Returns: PhoneBookEntry: A", "OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "above copyright notice and this permission notice shall be included in all copies", "the phonebook number (str): The phonebook entry number scheme (NumberingSchemes): Numbering scheme used", "= scheme self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to be used in", "WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "return str(self) def __str__(self): \"\"\"`__str__` magicmethod to be used in repr(). Returns: str:", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS", "permission notice shall be included in all copies or substantial portions of the", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS", "@staticmethod def from_payload(payload: str): \"\"\"Creates a phonebook entry from a payload. Args: payload", "recommended to called directly Args: idx (int): Index of the entry in the", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH", "the following conditions: The above copyright notice and this permission notice shall be", "to be used in repr(). Returns: str: A stringified version of the object.", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT", "= 1 class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def __init__(self, idx: int, number:", "furnished to do so, subject to the following conditions: The above copyright notice", "OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from enum import Enum from .message", "return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if params[2] == 129 else NumberingSchemes.INTERNATIONAL, params[3] )", "in repr(). Returns: str: A stringified version of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index},", "permit persons to whom the Software is furnished to do so, subject to", "any person obtaining a copy of this software and associated documentation files (the", "1 class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def __init__(self, idx: int, number: str,", "copies of the Software, and to permit persons to whom the Software is", "import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks \"\"\" NATIONAL = 0", "a phonebook entry from a payload. Args: payload (str): Raw payload that was", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "included in all copies or substantial portions of the Software. THE SOFTWARE IS", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and", "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "(c) 2021 <NAME> Permission is hereby granted, free of charge, to any person", "\"\"\" return str(self) def __str__(self): \"\"\"`__str__` magicmethod to be used in repr(). Returns:", "the Software, and to permit persons to whom the Software is furnished to", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,", "following conditions: The above copyright notice and this permission notice shall be included", "(National or International) contact_name (str): The name the entry has been saved as", "International) contact_name (str): The name the entry has been saved as \"\"\" self.index", "copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\",", "return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates a phonebook entry", "NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "0 INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def __init__(self, idx:", "The above copyright notice and this permission notice shall be included in all", "\"Software\"), to deal in the Software without restriction, including without limitation the rights", "deal in the Software without restriction, including without limitation the rights to use,", "DEALINGS IN THE SOFTWARE. \"\"\" from enum import Enum from .message import Message", "granted, free of charge, to any person obtaining a copy of this software", "limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "def __init__(self, idx: int, number: str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "def __repr__(self): \"\"\"`__repr__` magicmethod to be used in repr(). Returns: str: Representation of", "str: Representation of the object. \"\"\" return str(self) def __str__(self): \"\"\"`__str__` magicmethod to", "of this software and associated documentation files (the \"Software\"), to deal in the", "the object. \"\"\" return str(self) def __str__(self): \"\"\"`__str__` magicmethod to be used in", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "sell copies of the Software, and to permit persons to whom the Software", "(int): Index of the entry in the phonebook number (str): The phonebook entry", "scheme self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to be used in repr().", "Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if params[2] == 129 else NumberingSchemes.INTERNATIONAL, params[3]", "do so, subject to the following conditions: The above copyright notice and this", "\"\"\" self.index = idx self.number = number self.scheme = scheme self.contact_name = contact_name", "MIT License Copyright (c) 2021 <NAME> Permission is hereby granted, free of charge,", "class PhoneBookEntry: \"\"\"A Phonebook entry \"\"\" def __init__(self, idx: int, number: str, scheme:", "is furnished to do so, subject to the following conditions: The above copyright", "repr(). Returns: str: A stringified version of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}',", "\"\"\"Numbering schemes implemented in phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL = 1 class", "saved as \"\"\" self.index = idx self.number = number self.scheme = scheme self.contact_name", "so, subject to the following conditions: The above copyright notice and this permission", "phonebook number (str): The phonebook entry number scheme (NumberingSchemes): Numbering scheme used (National", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR", "of the Software, and to permit persons to whom the Software is furnished", "self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to be used in repr(). Returns:", "and/or sell copies of the Software, and to permit persons to whom the", "magicmethod to be used in repr(). Returns: str: Representation of the object. \"\"\"", "of charge, to any person obtaining a copy of this software and associated", "(the \"Software\"), to deal in the Software without restriction, including without limitation the", "class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL =", "(NumberingSchemes): Numbering scheme used (National or International) contact_name (str): The name the entry", "copyright notice and this permission notice shall be included in all copies or", "version of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload:", "to permit persons to whom the Software is furnished to do so, subject", "= Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if params[2] == 129 else NumberingSchemes.INTERNATIONAL,", "from enum import Enum from .message import Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented", "resulted upon requesting phonebook entries Returns: PhoneBookEntry: A phonebook entry \"\"\" params =", "conditions: The above copyright notice and this permission notice shall be included in", "THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "(str): The name the entry has been saved as \"\"\" self.index = idx", "NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended to called directly Args: idx (int): Index", "was resulted upon requesting phonebook entries Returns: PhoneBookEntry: A phonebook entry \"\"\" params", "idx self.number = number self.scheme = scheme self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__`", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER", "Permission is hereby granted, free of charge, to any person obtaining a copy", "stringified version of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def", "be included in all copies or substantial portions of the Software. THE SOFTWARE", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "entry from a payload. Args: payload (str): Raw payload that was resulted upon", "whom the Software is furnished to do so, subject to the following conditions:", "self.scheme = scheme self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to be used", "PhoneBookEntry: A phonebook entry \"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL", "self.number = number self.scheme = scheme self.contact_name = contact_name def __repr__(self): \"\"\"`__repr__` magicmethod", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\"", "THE SOFTWARE. \"\"\" from enum import Enum from .message import Message class NumberingSchemes(Enum):", "USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from enum import Enum from", "name the entry has been saved as \"\"\" self.index = idx self.number =", "FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "of the object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str):", "entry has been saved as \"\"\" self.index = idx self.number = number self.scheme", "__str__(self): \"\"\"`__str__` magicmethod to be used in repr(). Returns: str: A stringified version", "portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "payload that was resulted upon requesting phonebook entries Returns: PhoneBookEntry: A phonebook entry", "\"\"\" MIT License Copyright (c) 2021 <NAME> Permission is hereby granted, free of", "DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "<filename>src/PhoneBook.py \"\"\" MIT License Copyright (c) 2021 <NAME> Permission is hereby granted, free", "distribute, sublicense, and/or sell copies of the Software, and to permit persons to", "of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "\"\"\" def __init__(self, idx: int, number: str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not", "software and associated documentation files (the \"Software\"), to deal in the Software without", "repr(). Returns: str: Representation of the object. \"\"\" return str(self) def __str__(self): \"\"\"`__str__`", "magicmethod to be used in repr(). Returns: str: A stringified version of the", "Args: idx (int): Index of the entry in the phonebook number (str): The", "shall be included in all copies or substantial portions of the Software. THE", "in the phonebook number (str): The phonebook entry number scheme (NumberingSchemes): Numbering scheme", "entry \"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if params[2] ==", "NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "phonebook entries Returns: PhoneBookEntry: A phonebook entry \"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry(", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "A phonebook entry \"\"\" params = Message.from_payload(payload).parameters return PhoneBookEntry( params[0], params[1], NumberingSchemes.NATIONAL if", "object. \"\"\" return str(self) def __str__(self): \"\"\"`__str__` magicmethod to be used in repr().", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "__init__(self, idx: int, number: str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended to", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT", "the Software is furnished to do so, subject to the following conditions: The", "contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to be used in repr(). Returns: str: Representation", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING", "been saved as \"\"\" self.index = idx self.number = number self.scheme = scheme", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "subject to the following conditions: The above copyright notice and this permission notice", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE", "used (National or International) contact_name (str): The name the entry has been saved", "object. \"\"\" return f\"<PhoneBookEntry<index={self.index}, number='{self.number}', scheme={self.scheme}, contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates a", "Message class NumberingSchemes(Enum): \"\"\"Numbering schemes implemented in phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL", "as \"\"\" self.index = idx self.number = number self.scheme = scheme self.contact_name =", "is hereby granted, free of charge, to any person obtaining a copy of", "and associated documentation files (the \"Software\"), to deal in the Software without restriction,", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "hereby granted, free of charge, to any person obtaining a copy of this", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE", "\"\"\"Constructor not recommended to called directly Args: idx (int): Index of the entry", "scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended to called directly Args: idx (int):", "Returns: str: Representation of the object. \"\"\" return str(self) def __str__(self): \"\"\"`__str__` magicmethod", "int, number: str, scheme: NumberingSchemes, contact_name: str): \"\"\"Constructor not recommended to called directly", "restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,", "in phonebooks \"\"\" NATIONAL = 0 INTERNATIONAL = 1 class PhoneBookEntry: \"\"\"A Phonebook", "entry number scheme (NumberingSchemes): Numbering scheme used (National or International) contact_name (str): The", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR", "entry in the phonebook number (str): The phonebook entry number scheme (NumberingSchemes): Numbering", "= contact_name def __repr__(self): \"\"\"`__repr__` magicmethod to be used in repr(). Returns: str:", "payload. Args: payload (str): Raw payload that was resulted upon requesting phonebook entries", "License Copyright (c) 2021 <NAME> Permission is hereby granted, free of charge, to", "to the following conditions: The above copyright notice and this permission notice shall", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "Software, and to permit persons to whom the Software is furnished to do", "contactname='{self.contact_name}'>\" @staticmethod def from_payload(payload: str): \"\"\"Creates a phonebook entry from a payload. Args:", "OTHER DEALINGS IN THE SOFTWARE. \"\"\" from enum import Enum from .message import", "<NAME> Permission is hereby granted, free of charge, to any person obtaining a", "Copyright (c) 2021 <NAME> Permission is hereby granted, free of charge, to any", "from_payload(payload: str): \"\"\"Creates a phonebook entry from a payload. Args: payload (str): Raw" ]
[ "<reponame>twobackfromtheend/challenger<filename>challenger_bot/car_controllers.py from enum import Enum class CarController(Enum): AGENT = 0 DS4 = 1" ]
[ "<filename>scripts/bulkLoadGeojson2elasticsearch.py # ================================================================= # # Authors: <NAME> <<EMAIL>> # # ================================================================= import os", "#,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view',", "================================================================= # # Authors: <NAME> <<EMAIL>> # # ================================================================= import os for eqScenario", "# # ================================================================= import os for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']:", "in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading:", "'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view})) os.system('python", "# Authors: <NAME> <<EMAIL>> # # ================================================================= import os for eqScenario in ['sim6p8_cr2022_rlz_1']:", "'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view})) os.system('python load_es_data.py dsra_{eqScenario}_{retrofitPrefix}_{view}.json", "<NAME> <<EMAIL>> # # ================================================================= import os for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix", "in ['b0']: #,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view',", "<<EMAIL>> # # ================================================================= import os for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in", "'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view})) os.system('python load_es_data.py dsra_{eqScenario}_{retrofitPrefix}_{view}.json \"Sauid\"'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix,", "Authors: <NAME> <<EMAIL>> # # ================================================================= import os for eqScenario in ['sim6p8_cr2022_rlz_1']: for", "for retrofitPrefix in ['b0']: #,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view',", "'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view}))", "for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view',", "'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view})) os.system('python load_es_data.py dsra_{eqScenario}_{retrofitPrefix}_{view}.json \"Sauid\"'.format(**{'eqScenario':eqScenario,", "import os for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']: for view", "in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view',", "['b0']: #,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view',", "'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view})) os.system('python load_es_data.py dsra_{eqScenario}_{retrofitPrefix}_{view}.json \"Sauid\"'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view}))", "for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']: for view in ['casualties_agg_view',", "'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix, 'view':view})) os.system('python load_es_data.py", "view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']:", "# ================================================================= # # Authors: <NAME> <<EMAIL>> # # ================================================================= import os for", "['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario,", "retrofitPrefix in ['b0']: #,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view',", "# ================================================================= import os for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']:", "================================================================= import os for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']: for", "'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view', 'functional_state_agg_view', 'partial_retrofit_agg_view', 'recovery_agg_view', 'scenario_hazard_agg_view', 'scenario_hazard_threat_agg_view', 'scenario_rupture_agg_view', 'social_disruption_agg_view']: print('loading: '+'dsra_{eqScenario}_{retrofitPrefix}_{view}.json'.format(**{'eqScenario':eqScenario, 'retrofitPrefix':retrofitPrefix,", "# # Authors: <NAME> <<EMAIL>> # # ================================================================= import os for eqScenario in", "os for eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']: for view in", "eqScenario in ['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view',", "['sim6p8_cr2022_rlz_1']: for retrofitPrefix in ['b0']: #,'r1','r2']: for view in ['casualties_agg_view', 'damage_state_agg_view', 'economic_loss_agg_view', 'full_retrofit_agg_view'," ]
[ "print \"\\n=== Test ADD/DEL %s user objects ===\\n\" % num avg_add = Decimal(\"0.0\")", "difference in execution time for bulk # creation of user objects. This will", "in host: host = \"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host,", "running information if not \"://\" in host: host = \"ldap://%s\" % host ldb_options", "PURPOSE. See the # GNU General Public License for more details. # #", "import delete_force from subunit.run import SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\")", "import gensec, sd_utils from samba.samdb import SamDB from samba.credentials import Credentials import samba.tests", "self.ldb_admin = ldb self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print", "%.3fs\" % ( x, float(res_add) ) # start = time.time() self.remove_bundle(num) res_del =", "# -*- coding: utf-8 -*- # # Unix SMB/CIFS implementation. # This speed", "user objects created ===\\n\" % num print \"\\n=== Test search on %s user", "of user objects. This will help us compare # Samba4 vs MS Active", "\"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\"", "self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\"))", "===\\n\" % num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i in", "def create_user(self, user_dn): ldif = \"\"\" dn: \"\"\" + user_dn + \"\"\" sAMAccountName:", "will be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn,", "test should be enabled preferably against MS Active Directory. It takes quite the", "-*- # # Unix SMB/CIFS implementation. # This speed test aims to show", "% float( Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average DEL: %.3fs\" % float( Decimal(avg_del)", "= \"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp,", "x, float(res_add) ) # start = time.time() self.remove_bundle(num) res_del = Decimal( str(time.time() -", "host: host = \"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds,", "parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1) host = args[0] lp = sambaopts.get_loadparm()", "(name, self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm())", "samba.tests from samba.tests import delete_force from subunit.run import SubunitTestRunner import unittest parser =", "group_dn + \"\"\" objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4", "= [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner() rc", "= self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print", "= self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def", "def create_bundle(self, count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self,", ") self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self, target_username,", "should have received a copy of the GNU General Public License # along", "the GNU General Public License # along with this program. If not, see", "= parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1) host = args[0] lp =", "class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user =", "objects ===\\n\" % num avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x in", "rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1 if not runner.run(unittest.makeSuite(AclSearchSpeedTest)).wasSuccessful(): rc", "%s user objects ===\\n\" % num avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for", "class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def", "in str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel,", "samba.samdb import SamDB from samba.credentials import Credentials import samba.tests from samba.tests import delete_force", "without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "time for bulk # creation of user objects. This will help us compare", "for bulk # creation of user objects. This will help us compare #", "time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() - start) )", "optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line creds", "super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print \"\\n=== Creating %s user", "ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn = ldb.domain_dn() self.domain_sid =", "ldif = \"\"\" dn: \"\"\" + user_dn + \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:]", "scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() - start) ) avg_search += res_search print \"", "see objects but not attributes, all attributes will be filtered out mod =", "= \"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn def create_user(self, user_dn): ldif = \"\"\"", "AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\",", "% float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" %", "test users from previous test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100)", "def create_group(self, group_dn, desc=None): ldif = \"\"\" dn: \"\"\" + group_dn + \"\"\"", "user_dn): ldif = \"\"\" dn: \"\"\" + user_dn + \"\"\" sAMAccountName: \"\"\" +", "if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1 if not runner.run(unittest.makeSuite(AclSearchSpeedTest)).wasSuccessful(): rc = 1 sys.exit(rc)", "self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn", "start = time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() -", "You should have received a copy of the GNU General Public License #", "self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow the user to see objects but not", "===\\n\" % num avg_search = Decimal(\"0.0\") for x in [1, 2, 3]: start", "\"\"\" groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i in", "import unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) #", "samba.auth import system_session from samba import gensec, sd_utils from samba.samdb import SamDB from", "res_add = Decimal( str(time.time() - start) ) avg_add += res_add print \" Attempt", "\"\\n=== Test ADD/DEL %s user objects ===\\n\" % num avg_add = Decimal(\"0.0\") avg_del", "time.time() self.create_bundle(num) res_add = Decimal( str(time.time() - start) ) avg_add += res_add print", "get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp =", "= SubunitTestRunner() rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1 if not", "This speed test aims to show difference in execution time for bulk #", "details. # # You should have received a copy of the GNU General", "Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\")", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "\"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn def create_user(self, user_dn): ldif = \"\"\" dn:", "mod) print \"\\n=== %s user objects created ===\\n\" % num print \"\\n=== Test", "self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for item in res if \"speedtestuser\" in", "that it will be useful, # but WITHOUT ANY WARRANTY; without even the", "options from ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr", "str(time.time() - start) ) avg_del += res_del print \" Attempt %s DEL: %.3fs\"", "implementation. # This speed test aims to show difference in execution time for", "ndr_unpack from samba.dcerpc import security from samba.auth import system_session from samba import gensec,", "float( Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average DEL: %.3fs\" % float( Decimal(avg_del) /", ") print \"\" def test_00000(self): \"\"\" Remove possibly undeleted test users from previous", "< 1: parser.print_usage() sys.exit(1) host = args[0] lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp)", "\"subunit/python\") import samba.getopt as options from ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT,", "the terms of the GNU General Public License as published by # the", "\"Average DEL: %.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\") ) print \"\" def test_00000(self):", "creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self):", "Test ADD/DEL %s user objects ===\\n\" % num avg_add = Decimal(\"0.0\") avg_del =", "filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000,", "\"\"\" This test should be enabled preferably against MS Active Directory. It takes", "3]: start = time.time() self.create_bundle(num) res_add = Decimal( str(time.time() - start) ) avg_add", "self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This test should be enabled preferably", "%s ADD: %.3fs\" % ( x, float(res_add) ) # start = time.time() self.remove_bundle(num)", "4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\"", "\"\\n=== Test search on %s user objects ===\\n\" % num avg_search = Decimal(\"0.0\")", "www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif = \"\"\" dn: \"\"\" +", "% self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "len(args) < 1: parser.print_usage() sys.exit(1) host = args[0] lp = sambaopts.get_loadparm() creds =", "url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif = \"\"\" dn: \"\"\"", "this program. If not, see <http://www.gnu.org/licenses/>. # import optparse import sys import time", "num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\"", "res_search = Decimal( str(time.time() - start) ) avg_search += res_search print \" Attempt", "setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass", "- start) ) avg_search += res_search print \" Attempt %s SEARCH: %.3fs\" %", "the hope that it will be useful, # but WITHOUT ANY WARRANTY; without", "# start = time.time() self.remove_bundle(num) res_del = Decimal( str(time.time() - start) ) avg_del", "= [item.dn for item in res if \"speedtestuser\" in str(item.dn)] for dn in", "\"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod)", "% ( x, float(res_del) ) print \"Average ADD: %.3fs\" % float( Decimal(avg_add) /", "ADD/DEL %s user objects ===\\n\" % num avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\")", "creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target", "SamDB from samba.credentials import Credentials import samba.tests from samba.tests import delete_force from subunit.run", "self.ldb_user) # Important unit running information if not \"://\" in host: host =", "\"\"\" dn: \"\"\" + group_dn + \"\"\" objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:]", "see <http://www.gnu.org/licenses/>. # import optparse import sys import time import base64 from decimal", "created ===\\n\" % num print \"\\n=== Test search on %s user objects ===\\n\"", "self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb):", "ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn def", "str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print \"\\n=== %s", "creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args() if len(args)", "% (name, self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain())", "create_group(self, group_dn, desc=None): ldif = \"\"\" dn: \"\"\" + group_dn + \"\"\" objectClass:", "# You should have received a copy of the GNU General Public License", "previous test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000)", "(str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important unit running information", "float(res_add) ) # start = time.time() self.remove_bundle(num) res_del = Decimal( str(time.time() - start)", "str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important unit running information if", "+ user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) +", "def test_search2_01000(self): # allow the user to see objects but not attributes, all", "\"\\n=== Creating %s user objects ===\\n\" % num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" %", "test aims to show difference in execution time for bulk # creation of", "Decimal(\"3.0\") ) print \"Average DEL: %.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\") ) print", "credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow the", "creation of user objects. This will help us compare # Samba4 vs MS", "SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner() rc = 0 if not", "def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for", "version. # # This program is distributed in the hope that it will", "start) ) avg_search += res_search print \" Attempt %s SEARCH: %.3fs\" % (", "- start) ) avg_del += res_del print \" Attempt %s DEL: %.3fs\" %", "takes quite the time against Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def", "# import optparse import sys import time import base64 from decimal import Decimal", "[options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line creds if", "= args[0] lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # #", "[1, 2, 3]: start = time.time() self.create_bundle(num) res_add = Decimal( str(time.time() - start)", "from ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import", "a copy of the GNU General Public License # along with this program.", "undeleted test users from previous test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self):", "(C) <NAME> <<EMAIL>> 2010 # # This program is free software; you can", "(i+1, self.base_dn)) def remove_bundle(self, count): for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1,", "3 of the License, or # (at your option) any later version. #", "A PARTICULAR PURPOSE. See the # GNU General Public License for more details.", "= Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host,", "self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\" %", "Attempt %s SEARCH: %.3fs\" % ( x, float(res_search) ) print \"Average Search: %.3fs\"", "Decimal(avg_del) / Decimal(\"3.0\") ) print \"\" def test_00000(self): \"\"\" Remove possibly undeleted test", "sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests start here #", "def run_bundle(self, num): print \"\\n=== Test ADD/DEL %s user objects ===\\n\" % num", "+ user_dn + \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd::", "setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print \"\\n=== Test ADD/DEL %s user objects", "%.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\") ) print \"\" def test_00000(self): \"\"\" Remove", "GNU General Public License as published by # the Free Software Foundation; either", "This test should be enabled preferably against MS Active Directory. It takes quite", "SEARCH: %.3fs\" % ( x, float(res_search) ) print \"Average Search: %.3fs\" % float(", "2, 3]: start = time.time() self.create_bundle(num) res_add = Decimal( str(time.time() - start) )", "self.run_bundle(1000) def _test_10000(self): \"\"\" This test should be enabled preferably against MS Active", "ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner()", "the Free Software Foundation; either version 3 of the License, or # (at", "super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass =", "creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests start here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self,", "start) ) avg_del += res_del print \" Attempt %s DEL: %.3fs\" % (", "res_del print \" Attempt %s DEL: %.3fs\" % ( x, float(res_del) ) print", "option) any later version. # # This program is distributed in the hope", "sys import time import base64 from decimal import Decimal sys.path.insert(0, \"bin/python\") import samba", "if \"speedtestuser\" in str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def", "# GNU General Public License for more details. # # You should have", "import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options from ldb import", "self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def", "%s user objects created ===\\n\" % num print \"\\n=== Test search on %s", "creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp)", "parser.add_option_group(options.VersionOptions(parser)) # use command line creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts,", "ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow the user to see", "objects but not attributes, all attributes will be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\"", "sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def", "days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils =", "import samba.tests from samba.tests import delete_force from subunit.run import SubunitTestRunner import unittest parser", "for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self, count): for i", "= ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for item in res", "SubunitTestRunner() rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1 if not runner.run(unittest.makeSuite(AclSearchSpeedTest)).wasSuccessful():", "res_del = Decimal( str(time.time() - start) ) avg_del += res_del print \" Attempt", "from samba.tests import delete_force from subunit.run import SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py", "self.create_bundle(num) res_add = Decimal( str(time.time() - start) ) avg_add += res_add print \"", "i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self, count): for i in", "% num avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x in [1, 2,", "def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This test should be", "print \"\\n=== Test search on %s user objects ===\\n\" % num avg_search =", "time import base64 from decimal import Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\")", "for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print \"\\n=== %s user", "# (at your option) any later version. # # This program is distributed", "published by # the Free Software Foundation; either version 3 of the License,", "gensec.FEATURE_SEAL) # # Tests start here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res", "for x in [1, 2, 3]: start = time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\",", "self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This test should", "it and/or modify # it under the terms of the GNU General Public", "dn_list = [item.dn for item in res if \"speedtestuser\" in str(item.dn)] for dn", "objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url: www.example.com \"\"\"", "x in [1, 2, 3]: start = time.time() self.create_bundle(num) res_add = Decimal( str(time.time()", "will help us compare # Samba4 vs MS Active Directory performance. # Copyright", "float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name,", "Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp,", "def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest,", "target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target", "for dn in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def", "[1, 2, 3]: start = time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search =", "% host ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner", "self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" %", "# it under the terms of the GNU General Public License as published", "performance. # Copyright (C) <NAME> <<EMAIL>> 2010 # # This program is free", "%s\" % self.base_dn def create_user(self, user_dn): ldif = \"\"\" dn: \"\"\" + user_dn", "Decimal( str(time.time() - start) ) avg_add += res_add print \" Attempt %s ADD:", "as published by # the Free Software Foundation; either version 3 of the", "create_bundle(self, count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self, count):", "+ group_dn + \"\"\" objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType:", "\"\"\" dn: \"\"\" + user_dn + \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\"", "objects ===\\n\" % num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i", "% (str(self.user_sid), str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print", "SMB/CIFS implementation. # This speed test aims to show difference in execution time", "+ base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn,", "Test search on %s user objects ===\\n\" % num avg_search = Decimal(\"0.0\") for", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "License, or # (at your option) any later version. # # This program", "num): print \"\\n=== Test ADD/DEL %s user objects ===\\n\" % num avg_add =", "Decimal( str(time.time() - start) ) avg_search += res_search print \" Attempt %s SEARCH:", "objects. This will help us compare # Samba4 vs MS Active Directory performance.", "for x in [1, 2, 3]: start = time.time() self.create_bundle(num) res_add = Decimal(", "self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print \"\\n=== Creating %s user objects", "optparse import sys import time import base64 from decimal import Decimal sys.path.insert(0, \"bin/python\")", "= time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() - start)", "| gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin)", "# Important unit running information if not \"://\" in host: host = \"ldap://%s\"", "= options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1)", "i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\"", "for item in res if \"speedtestuser\" in str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin,", "( x, float(res_search) ) print \"Average Search: %.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\")", ") print \"Average ADD: %.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average", "\"\"\" objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url: www.example.com", "host ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner =", "the GNU General Public License as published by # the Free Software Foundation;", "us compare # Samba4 vs MS Active Directory performance. # Copyright (C) <NAME>", "res_add print \" Attempt %s ADD: %.3fs\" % ( x, float(res_add) ) #", "self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\"))", "avg_search = Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time() res", "\"\"\" objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com", "objects created ===\\n\" % num print \"\\n=== Test search on %s user objects", "unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use", "%s user objects ===\\n\" % num avg_search = Decimal(\"0.0\") for x in [1,", "Search: %.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name): return", "setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid", "self.base_dn), mod) print \"\\n=== %s user objects created ===\\n\" % num print \"\\n===", "# Copyright (C) <NAME> <<EMAIL>> 2010 # # This program is free software;", "- start) ) avg_add += res_add print \" Attempt %s ADD: %.3fs\" %", "num, _ldb): print \"\\n=== Creating %s user objects ===\\n\" % num self.create_bundle(num) mod", "utf-8 -*- # # Unix SMB/CIFS implementation. # This speed test aims to", "+= res_add print \" Attempt %s ADD: %.3fs\" % ( x, float(res_add) )", "super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid =", "import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack", "/ Decimal(\"3.0\") ) print \"\" def test_00000(self): \"\"\" Remove possibly undeleted test users", "credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args() if len(args) < 1: parser.print_usage()", "= Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time() res =", "# Tests start here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn,", "2, 3]: start = time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal(", "\"baseDN: %s\" % self.base_dn def create_user(self, user_dn): ldif = \"\"\" dn: \"\"\" +", "python # -*- coding: utf-8 -*- # # Unix SMB/CIFS implementation. # This", "License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import optparse", "Credentials import samba.tests from samba.tests import delete_force from subunit.run import SubunitTestRunner import unittest", "sys.exit(1) host = args[0] lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL)", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "\"Average Search: %.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name):", "# use command line creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args", "group_dn, desc=None): ldif = \"\"\" dn: \"\"\" + group_dn + \"\"\" objectClass: group", "group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif)", "def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp", "parser.print_usage() sys.exit(1) host = args[0] lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() |", "show difference in execution time for bulk # creation of user objects. This", "%.3fs\" % ( x, float(res_search) ) print \"Average Search: %.3fs\" % float( Decimal(avg_search)", "software; you can redistribute it and/or modify # it under the terms of", "res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for item in", "x in [1, 2, 3]: start = time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE)", "\"\"\" + user_dn + \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user", "Decimal( str(time.time() - start) ) avg_del += res_del print \" Attempt %s DEL:", "self).setUp() self.ldb_admin = ldb self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\"", "from samba import gensec, sd_utils from samba.samdb import SamDB from samba.credentials import Credentials", "2010 # # This program is free software; you can redistribute it and/or", "the License, or # (at your option) any later version. # # This", "name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials()", "ldif = \"\"\" dn: \"\"\" + group_dn + \"\"\" objectClass: group sAMAccountName: \"\"\"", "user objects ===\\n\" % num avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x", "ldb self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\"", "return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username)", "avg_add += res_add print \" Attempt %s ADD: %.3fs\" % ( x, float(res_add)", "avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x in [1, 2, 3]: start", "ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn", "lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests start", "user objects ===\\n\" % num avg_search = Decimal(\"0.0\") for x in [1, 2,", "credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests start here # class SpeedTest(samba.tests.TestCase): def", "sd_utils from samba.samdb import SamDB from samba.credentials import Credentials import samba.tests from samba.tests", "parser.add_option_group(credopts) opts, args = parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1) host =", "be enabled preferably against MS Active Directory. It takes quite the time against", "against Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\",", "\"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options from ldb", "class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print \"\\n=== Test ADD/DEL", "super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print \"\\n=== Test ADD/DEL %s user objects ===\\n\"", "= Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time() self.create_bundle(num) res_add", "gensec, sd_utils from samba.samdb import SamDB from samba.credentials import Credentials import samba.tests from", "be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod)", "it will be useful, # but WITHOUT ANY WARRANTY; without even the implied", "# This program is free software; you can redistribute it and/or modify #", "ADD: %.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average DEL: %.3fs\" %", "import SamDB from samba.credentials import Credentials import samba.tests from samba.tests import delete_force from", "self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important unit running information if not \"://\" in", "ADD: %.3fs\" % ( x, float(res_add) ) # start = time.time() self.remove_bundle(num) res_del", "self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\"", "avg_del += res_del print \" Attempt %s DEL: %.3fs\" % ( x, float(res_del)", "from samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc import security from samba.auth import system_session", "General Public License for more details. # # You should have received a", "args = parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1) host = args[0] lp", "item in res if \"speedtestuser\" in str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin, dn)", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "\"\"\" + group_dn + \"\"\" objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\"", "samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options from ldb import (", ") avg_del += res_del print \" Attempt %s DEL: %.3fs\" % ( x,", "quite the time against Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self):", "dn: \"\"\" + group_dn + \"\"\" objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] +", "print \"Average DEL: %.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\") ) print \"\" def", "# This speed test aims to show difference in execution time for bulk", "here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return", "time against Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp()", "\"Average ADD: %.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average DEL: %.3fs\"", "% float( Decimal(avg_del) / Decimal(\"3.0\") ) print \"\" def test_00000(self): \"\"\" Remove possibly", "ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for item in res if", "def run_search_bundle(self, num, _ldb): print \"\\n=== Creating %s user objects ===\\n\" % num", "samba import gensec, sd_utils from samba.samdb import SamDB from samba.credentials import Credentials import", "= SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): #", "line creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args() if", "<http://www.gnu.org/licenses/>. # import optparse import sys import time import base64 from decimal import", "# class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0])", "% ( x, float(res_add) ) # start = time.time() self.remove_bundle(num) res_del = Decimal(", "%s DEL: %.3fs\" % ( x, float(res_del) ) print \"Average ADD: %.3fs\" %", "( x, float(res_add) ) # start = time.time() self.remove_bundle(num) res_del = Decimal( str(time.time()", "creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return", "gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def", "\"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self,", "the # GNU General Public License for more details. # # You should", "creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target =", "% (i+1, self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list", "This will help us compare # Samba4 vs MS Active Directory performance. #", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License", "expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn =", "for more details. # # You should have received a copy of the", "self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user", "not, see <http://www.gnu.org/licenses/>. # import optparse import sys import time import base64 from", "#!/usr/bin/env python # -*- coding: utf-8 -*- # # Unix SMB/CIFS implementation. #", "find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp()", "range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn,", "range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print \"\\n=== %s user objects created ===\\n\"", "( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack from", "samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc import security from samba.auth import system_session from", "1: parser.print_usage() sys.exit(1) host = args[0] lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features()", "in [1, 2, 3]: start = time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search", "for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self): res =", "/ Decimal(\"3.0\") ) print \"Average DEL: %.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\") )", "expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() - start) ) avg_search += res_search print", ") # start = time.time() self.remove_bundle(num) res_del = Decimal( str(time.time() - start) )", "or # (at your option) any later version. # # This program is", "Unix SMB/CIFS implementation. # This speed test aims to show difference in execution", "more details. # # You should have received a copy of the GNU", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "\"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important unit", "GNU General Public License for more details. # # You should have received", "under the terms of the GNU General Public License as published by #", "sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin,", "creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def", "from samba.dcerpc import security from samba.auth import system_session from samba import gensec, sd_utils", "SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self):", "def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print \"\\n=== Test ADD/DEL %s user", "run_bundle(self, num): print \"\\n=== Test ADD/DEL %s user objects ===\\n\" % num avg_add", "(i+1, self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list =", "program is distributed in the hope that it will be useful, # but", "# creation of user objects. This will help us compare # Samba4 vs", "self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print \"\\n=== Creating %s user objects ===\\n\" %", "distributed in the hope that it will be useful, # but WITHOUT ANY", "self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn def create_user(self, user_dn): ldif =", "= sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown()", "def _test_10000(self): \"\"\" This test should be enabled preferably against MS Active Directory.", "either version 3 of the License, or # (at your option) any later", "self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print \"\\n===", "\"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options)", "SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser))", "= Decimal( str(time.time() - start) ) avg_del += res_del print \" Attempt %s", "import SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts)", "% self.base_dn def create_user(self, user_dn): ldif = \"\"\" dn: \"\"\" + user_dn +", "sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" %", "# the Free Software Foundation; either version 3 of the License, or #", "\"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le'))", "range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self, count): for i in range(count): delete_force(self.ldb_admin,", "command line creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args()", "any later version. # # This program is distributed in the hope that", "Free Software Foundation; either version 3 of the License, or # (at your", "def get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features()", "test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def", "import system_session from samba import gensec, sd_utils from samba.samdb import SamDB from samba.credentials", "def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid())", "Directory. It takes quite the time against Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class", "Creating %s user objects ===\\n\" % num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid),", "it under the terms of the GNU General Public License as published by", "\"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif = \"\"\" dn:", "MS Active Directory. It takes quite the time against Samba4 (1-2 days). \"\"\"", "i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print \"\\n=== %s user objects", "bulk # creation of user objects. This will help us compare # Samba4", "res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() - start) ) avg_search", "in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print \"\\n=== %s user objects created", "user to see objects but not attributes, all attributes will be filtered out", "= ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn", "url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" %", "% self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important unit running information if not \"://\"", "you can redistribute it and/or modify # it under the terms of the", "from samba.credentials import Credentials import samba.tests from samba.tests import delete_force from subunit.run import", "group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for", "+ \"\"\" groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i", "of the GNU General Public License as published by # the Free Software", "===\\n\" % num avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x in [1,", "create_user(self, user_dn): ldif = \"\"\" dn: \"\"\" + user_dn + \"\"\" sAMAccountName: \"\"\"", "credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner() rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful():", "of the License, or # (at your option) any later version. # #", "self.ldb_admin) def test_search2_01000(self): # allow the user to see objects but not attributes,", "PARTICULAR PURPOSE. See the # GNU General Public License for more details. #", "General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #", "subunit.run import SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser)", "\" Attempt %s ADD: %.3fs\" % ( x, float(res_add) ) # start =", "= optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line", "Important unit running information if not \"://\" in host: host = \"ldap://%s\" %", "\"\"\" Remove possibly undeleted test users from previous test \"\"\" self.remove_test_users() def test_00010(self):", "% self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for item in res if \"speedtestuser\"", "print \"\\n=== %s user objects created ===\\n\" % num print \"\\n=== Test search", "float(res_search) ) print \"Average Search: %.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num)", "session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner() rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc", "self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def", "[item.dn for item in res if \"speedtestuser\" in str(item.dn)] for dn in dn_list:", "\"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self):", "(i+1, self.base_dn), mod) print \"\\n=== %s user objects created ===\\n\" % num print", "hope that it will be useful, # but WITHOUT ANY WARRANTY; without even", "import optparse import sys import time import base64 from decimal import Decimal sys.path.insert(0,", "self.remove_bundle(num) res_del = Decimal( str(time.time() - start) ) avg_del += res_del print \"", "def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This test should be enabled preferably against", "import time import base64 from decimal import Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\",", "samba.getopt as options from ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS)", "in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" %", "self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif =", "parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command", "user objects ===\\n\" % num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for", "LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc import security", "\"\" def test_00000(self): \"\"\" Remove possibly undeleted test users from previous test \"\"\"", "\"://\" in host: host = \"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"] ldb =", "def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print \"\\n=== Creating", "mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) #", "Active Directory. It takes quite the time against Samba4 (1-2 days). \"\"\" self.run_bundle(10000)", "free software; you can redistribute it and/or modify # it under the terms", "% num avg_search = Decimal(\"0.0\") for x in [1, 2, 3]: start =", "(at your option) any later version. # # This program is distributed in", "preferably against MS Active Directory. It takes quite the time against Samba4 (1-2", "This program is distributed in the hope that it will be useful, #", "Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time()", "===\\n\" % num print \"\\n=== Test search on %s user objects ===\\n\" %", "samba.tests import delete_force from subunit.run import SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py [options]", "<host>\") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line creds if available", "import Credentials import samba.tests from samba.tests import delete_force from subunit.run import SubunitTestRunner import", "useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #", "General Public License as published by # the Free Software Foundation; either version", "import sys import time import base64 from decimal import Decimal sys.path.insert(0, \"bin/python\") import", "received a copy of the GNU General Public License # along with this", "Tests start here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\",", "compare # Samba4 vs MS Active Directory performance. # Copyright (C) <NAME> <<EMAIL>>", "\"speedtestuser\" in str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self):", "3]: start = time.time() res = _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time()", "x, float(res_search) ) print \"Average Search: %.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\") )", "samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options from ldb import ( SCOPE_BASE,", "get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() |", "= options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line creds if available credopts =", "self).setUp() def run_bundle(self, num): print \"\\n=== Test ADD/DEL %s user objects ===\\n\" %", "by # the Free Software Foundation; either version 3 of the License, or", "ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack,", "= security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn def create_user(self, user_dn):", "If not, see <http://www.gnu.org/licenses/>. # import optparse import sys import time import base64", "runner = SubunitTestRunner() rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1 if", "dn: \"\"\" + user_dn + \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass:", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1) host", "on %s user objects ===\\n\" % num avg_search = Decimal(\"0.0\") for x in", "_test_10000(self): \"\"\" This test should be enabled preferably against MS Active Directory. It", "if not \"://\" in host: host = \"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"]", ") print \"Average Search: %.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def", "attributes will be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" %", "= SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner() rc = 0 if", "self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest,", "Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import", "( x, float(res_del) ) print \"Average ADD: %.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\")", "Active Directory performance. # Copyright (C) <NAME> <<EMAIL>> 2010 # # This program", "copy of the GNU General Public License # along with this program. If", "return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn = ldb.domain_dn() self.domain_sid", "self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print \"\\n=== %s user objects created ===\\n\" %", "along with this program. If not, see <http://www.gnu.org/licenses/>. # import optparse import sys", "base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None):", "\"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>)", "desc=None): ldif = \"\"\" dn: \"\"\" + group_dn + \"\"\" objectClass: group sAMAccountName:", "self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self, count): for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\"", "# # This program is distributed in the hope that it will be", "import security from samba.auth import system_session from samba import gensec, sd_utils from samba.samdb", "samba.credentials import Credentials import samba.tests from samba.tests import delete_force from subunit.run import SubunitTestRunner", "Attempt %s ADD: %.3fs\" % ( x, float(res_add) ) # start = time.time()", "\"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num,", "\"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options from ldb import ( SCOPE_BASE, SCOPE_SUBTREE,", "<NAME> <<EMAIL>> 2010 # # This program is free software; you can redistribute", "= ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb", "delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print \"\\n===", "Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average DEL: %.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\")", "print \" Attempt %s DEL: %.3fs\" % ( x, float(res_del) ) print \"Average", "self.base_dn) def get_ldb_connection(self, target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation())", "coding: utf-8 -*- # # Unix SMB/CIFS implementation. # This speed test aims", "mod) self.run_search_bundle(1000, self.ldb_user) # Important unit running information if not \"://\" in host:", "License as published by # the Free Software Foundation; either version 3 of", "avg_del = Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time() self.create_bundle(num)", "be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of", "ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc import security from", "+ \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif = \"\"\"", "base64 from decimal import Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\")", "def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow the user to see objects", "print \" Attempt %s ADD: %.3fs\" % ( x, float(res_add) ) # start", "test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow the user to see objects but", "in res if \"speedtestuser\" in str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin, dn) class", "% ( x, float(res_search) ) print \"Average Search: %.3fs\" % float( Decimal(avg_search) /", "/ Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def", "\"\\n=== %s user objects created ===\\n\" % num print \"\\n=== Test search on", "avg_search += res_search print \" Attempt %s SEARCH: %.3fs\" % ( x, float(res_search)", "to show difference in execution time for bulk # creation of user objects.", "is distributed in the hope that it will be useful, # but WITHOUT", "program. If not, see <http://www.gnu.org/licenses/>. # import optparse import sys import time import", "SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc", "run_search_bundle(self, num, _ldb): print \"\\n=== Creating %s user objects ===\\n\" % num self.create_bundle(num)", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public", "in the hope that it will be useful, # but WITHOUT ANY WARRANTY;", ") print \"Average DEL: %.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\") ) print \"\"", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for", "DEL: %.3fs\" % ( x, float(res_del) ) print \"Average ADD: %.3fs\" % float(", "dn in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self,", "use command line creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args =", "[\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner() rc =", "%.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average DEL: %.3fs\" % float(", "and/or modify # it under the terms of the GNU General Public License", "It takes quite the time against Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest):", "GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>.", "float(res_del) ) print \"Average ADD: %.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\") ) print", "% (i+1, self.base_dn), mod) print \"\\n=== %s user objects created ===\\n\" % num", "lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow the user", "should be enabled preferably against MS Active Directory. It takes quite the time", "<<EMAIL>> 2010 # # This program is free software; you can redistribute it", "as options from ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from", "\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE)", "# # Tests start here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res =", "= Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x in [1, 2, 3]: start =", "help us compare # Samba4 vs MS Active Directory performance. # Copyright (C)", "users from previous test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def", "\"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self,", "= time.time() self.create_bundle(num) res_add = Decimal( str(time.time() - start) ) avg_add += res_add", "start = time.time() self.remove_bundle(num) res_del = Decimal( str(time.time() - start) ) avg_del +=", "num print \"\\n=== Test search on %s user objects ===\\n\" % num avg_search", "system_session from samba import gensec, sd_utils from samba.samdb import SamDB from samba.credentials import", "= \"\"\" dn: \"\"\" + group_dn + \"\"\" objectClass: group sAMAccountName: \"\"\" +", "www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1,", "start = time.time() self.create_bundle(num) res_add = Decimal( str(time.time() - start) ) avg_add +=", "the user to see objects but not attributes, all attributes will be filtered", "Decimal(\"3.0\") ) print \"\" def test_00000(self): \"\"\" Remove possibly undeleted test users from", "scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin = ldb self.base_dn = ldb.domain_dn()", "can redistribute it and/or modify # it under the terms of the GNU", "from decimal import Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import", "samba.dcerpc import security from samba.auth import system_session from samba import gensec, sd_utils from", "num avg_search = Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time()", "= _ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() - start) ) avg_search +=", "import Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as", "# This program is distributed in the hope that it will be useful,", "# along with this program. If not, see <http://www.gnu.org/licenses/>. # import optparse import", "but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This test should be enabled", "all attributes will be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\"", "test_00000(self): \"\"\" Remove possibly undeleted test users from previous test \"\"\" self.remove_test_users() def", "samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options from ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError,", "host = \"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"] ldb = SamDB(host, credentials=creds, session_info=system_session(),", "target_username, target_password): creds_tmp = Credentials() creds_tmp.set_username(target_username) creds_tmp.set_password(<PASSWORD>) creds_tmp.set_domain(creds.get_domain()) creds_tmp.set_realm(creds.get_realm()) creds_tmp.set_workstation(creds.get_workstation()) creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL)", "execution time for bulk # creation of user objects. This will help us", "version 3 of the License, or # (at your option) any later version.", "(str(self.user_sid), str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn), mod) print \"\\n===", "attributes, all attributes will be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid))", "# # Unix SMB/CIFS implementation. # This speed test aims to show difference", "% (i+1, self.base_dn)) def remove_bundle(self, count): for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" %", "def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This", "= \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn),", "ndr_pack, ndr_unpack from samba.dcerpc import security from samba.auth import system_session from samba import", "expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for item in res if \"speedtestuser\" in str(item.dn)]", "terms of the GNU General Public License as published by # the Free", "import base64 from decimal import Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\",", "self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif = \"\"\" dn: \"\"\" + group_dn +", "not attributes, all attributes will be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid),", "_ldb): print \"\\n=== Creating %s user objects ===\\n\" % num self.create_bundle(num) mod =", "of the GNU General Public License # along with this program. If not,", "not \"://\" in host: host = \"ldap://%s\" % host ldb_options = [\"modules:paged_searches\"] ldb", "License for more details. # # You should have received a copy of", "available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args() if len(args) < 1:", "aims to show difference in execution time for bulk # creation of user", "user_dn + \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd:: \"\"\"", "security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn def create_user(self, user_dn): ldif", "\" Attempt %s DEL: %.3fs\" % ( x, float(res_del) ) print \"Average ADD:", "sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line creds if available credopts", "delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print \"\\n=== Creating %s user objects ===\\n\"", "with this program. If not, see <http://www.gnu.org/licenses/>. # import optparse import sys import", "redistribute it and/or modify # it under the terms of the GNU General", "allow the user to see objects but not attributes, all attributes will be", "modify # it under the terms of the GNU General Public License as", "Foundation; either version 3 of the License, or # (at your option) any", "is free software; you can redistribute it and/or modify # it under the", "= ldb self.base_dn = ldb.domain_dn() self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN:", "print \"\" def test_00000(self): \"\"\" Remove possibly undeleted test users from previous test", "test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This test", "num avg_add = Decimal(\"0.0\") avg_del = Decimal(\"0.0\") for x in [1, 2, 3]:", "+ group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count):", "print \"\\n=== Creating %s user objects ===\\n\" % num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\"", "str(time.time() - start) ) avg_search += res_search print \" Attempt %s SEARCH: %.3fs\"", "user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\"", "opts, args = parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1) host = args[0]", "\" Attempt %s SEARCH: %.3fs\" % ( x, float(res_search) ) print \"Average Search:", "security from samba.auth import system_session from samba import gensec, sd_utils from samba.samdb import", "decimal import Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt", "in execution time for bulk # creation of user objects. This will help", "print \"baseDN: %s\" % self.base_dn def create_user(self, user_dn): ldif = \"\"\" dn: \"\"\"", "in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num):", "test_search2_01000(self): # allow the user to see objects but not attributes, all attributes", "# allow the user to see objects but not attributes, all attributes will", "self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self, target_username, target_password):", "groupType: 4 url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i in range(count):", "search on %s user objects ===\\n\" % num avg_search = Decimal(\"0.0\") for x", "ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self):", "from samba.auth import system_session from samba import gensec, sd_utils from samba.samdb import SamDB", "self.get_ldb_connection(\"acltestuser\", \"samba123@\") self.user_sid = self.sd_utils.get_object_sid(self.get_user_dn(\"acltestuser\")) def tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self,", "res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin =", "remove_bundle(self, count): for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self):", "= sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests start here", "# Samba4 vs MS Active Directory performance. # Copyright (C) <NAME> <<EMAIL>> 2010", "See the # GNU General Public License for more details. # # You", "= \"\"\" dn: \"\"\" + user_dn + \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] +", "+ \"\"\" objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url:", "str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp()", "\"\"\" self.ldb_admin.add_ldif(ldif) def create_bundle(self, count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn))", "user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif)", "dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print", "but not attributes, all attributes will be filtered out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" %", "+ \"\"\" objectClass: group sAMAccountName: \"\"\" + group_dn.split(\",\")[0][3:] + \"\"\" groupType: 4 url:", "def test_00000(self): \"\"\" Remove possibly undeleted test users from previous test \"\"\" self.remove_test_users()", "= \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important", "_ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_SUBTREE) res_search = Decimal( str(time.time() - start) ) avg_search += res_search", "SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print \"\\n=== Test ADD/DEL %s", "your option) any later version. # # This program is distributed in the", "delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\",", "Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn) def get_ldb_connection(self,", "= Decimal( str(time.time() - start) ) avg_add += res_add print \" Attempt %s", "% num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i in range(num):", "ldb = SamDB(host, credentials=creds, session_info=system_session(), lp=lp, options=ldb_options) runner = SubunitTestRunner() rc = 0", "information if not \"://\" in host: host = \"ldap://%s\" % host ldb_options =", "out mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user)", "delete_force from subunit.run import SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts", "+ \"\"\" sAMAccountName: \"\"\" + user_dn.split(\",\")[0][3:] + \"\"\" objectClass: user unicodePwd:: \"\"\" +", "against MS Active Directory. It takes quite the time against Samba4 (1-2 days).", "ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE) return ndr_unpack(security.dom_sid,res[0][\"objectSid\"][0]) def setUp(self): super(SpeedTest, self).setUp() self.ldb_admin", "speed test aims to show difference in execution time for bulk # creation", "float( Decimal(avg_del) / Decimal(\"3.0\") ) print \"\" def test_00000(self): \"\"\" Remove possibly undeleted", "%s user objects ===\\n\" % num self.create_bundle(num) mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid))", "% (str(self.user_sid), str(self.user_sid)) self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important unit running", "+= res_del print \" Attempt %s DEL: %.3fs\" % ( x, float(res_del) )", "DEL: %.3fs\" % float( Decimal(avg_del) / Decimal(\"3.0\") ) print \"\" def test_00000(self): \"\"\"", "dn) class SpeedTestAddDel(SpeedTest): def setUp(self): super(SpeedTestAddDel, self).setUp() def run_bundle(self, num): print \"\\n=== Test", "from samba.samdb import SamDB from samba.credentials import Credentials import samba.tests from samba.tests import", "objects ===\\n\" % num avg_search = Decimal(\"0.0\") for x in [1, 2, 3]:", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "later version. # # This program is distributed in the hope that it", "parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts)", "Attempt %s DEL: %.3fs\" % ( x, float(res_del) ) print \"Average ADD: %.3fs\"", "enabled preferably against MS Active Directory. It takes quite the time against Samba4", "sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options from", "res if \"speedtestuser\" in str(item.dn)] for dn in dn_list: delete_force(self.ldb_admin, dn) class SpeedTestAddDel(SpeedTest):", "self.run_search_bundle(1000, self.ldb_user) # Important unit running information if not \"://\" in host: host", "Public License as published by # the Free Software Foundation; either version 3", "unit running information if not \"://\" in host: host = \"ldap://%s\" % host", "def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin) self.ldb_user = self.get_ldb_connection(\"acltestuser\", \"samba123@\")", "\"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils = sd_utils.SDUtils(self.ldb_admin)", "count): for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_test_users(self): res", "the time against Samba4 (1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest,", "WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS", "if len(args) < 1: parser.print_usage() sys.exit(1) host = args[0] lp = sambaopts.get_loadparm() creds", "host = args[0] lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) #", "= Decimal( str(time.time() - start) ) avg_search += res_search print \" Attempt %s", "self.base_dn def create_user(self, user_dn): ldif = \"\"\" dn: \"\"\" + user_dn + \"\"\"", "= 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1 if not runner.run(unittest.makeSuite(AclSearchSpeedTest)).wasSuccessful(): rc =", "# # You should have received a copy of the GNU General Public", "return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow the user to", "self.domain_sid = security.dom_sid(ldb.get_domain_sid()) self.user_pass = \"<PASSWORD>@\" print \"baseDN: %s\" % self.base_dn def create_user(self,", "Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time() self.create_bundle(num) res_add =", "from previous test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self):", "Samba4 vs MS Active Directory performance. # Copyright (C) <NAME> <<EMAIL>> 2010 #", "%s SEARCH: %.3fs\" % ( x, float(res_search) ) print \"Average Search: %.3fs\" %", "possibly undeleted test users from previous test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def", "FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more", "test_01000(self): self.run_bundle(1000) def _test_10000(self): \"\"\" This test should be enabled preferably against MS", "creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests start here # class", "from subunit.run import SubunitTestRunner import unittest parser = optparse.OptionParser(\"speedtest.py [options] <host>\") sambaopts =", "mod = \"(A;;LC;;;%s)(D;;RP;;;%s)\" % (str(self.user_sid), str(self.user_sid)) for i in range(num): self.sd_utils.dacl_add_ace(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1,", ") avg_add += res_add print \" Attempt %s ADD: %.3fs\" % ( x,", "have received a copy of the GNU General Public License # along with", "import samba.getopt as options from ldb import ( SCOPE_BASE, SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM,", "+= res_search print \" Attempt %s SEARCH: %.3fs\" % ( x, float(res_search) )", "SCOPE_SUBTREE, LdbError, ERR_NO_SUCH_OBJECT, ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc import", "ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc import security from samba.auth import", "| gensec.FEATURE_SEAL) # # Tests start here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb):", "Public License for more details. # # You should have received a copy", "options.SambaOptions(parser) parser.add_option_group(sambaopts) parser.add_option_group(options.VersionOptions(parser)) # use command line creds if available credopts = options.CredentialsOptions(parser)", "user objects. This will help us compare # Samba4 vs MS Active Directory", "creds_tmp.set_gensec_features(creds_tmp.get_gensec_features() | gensec.FEATURE_SEAL) ldb_target = SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000,", "lp=lp, options=ldb_options) runner = SubunitTestRunner() rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc =", "options=ldb_options) runner = SubunitTestRunner() rc = 0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1", "def remove_bundle(self, count): for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def", "(1-2 days). \"\"\" self.run_bundle(10000) class AclSearchSpeedTest(SpeedTest): def setUp(self): super(AclSearchSpeedTest, self).setUp() self.ldb_admin.newuser(\"acltestuser\", \"samba123@\") self.sd_utils", "unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\" self.ldb_admin.add_ldif(ldif) def", "Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\" % (name, self.base_dn)", "Remove possibly undeleted test users from previous test \"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10)", "%.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self, name): return \"CN=%s,CN=Users,%s\"", "count): for i in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self, count): for", ") avg_search += res_search print \" Attempt %s SEARCH: %.3fs\" % ( x,", "res_search print \" Attempt %s SEARCH: %.3fs\" % ( x, float(res_search) ) print", "vs MS Active Directory performance. # Copyright (C) <NAME> <<EMAIL>> 2010 # #", "if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) opts, args = parser.parse_args() if len(args) <", "str(time.time() - start) ) avg_add += res_add print \" Attempt %s ADD: %.3fs\"", "# Unix SMB/CIFS implementation. # This speed test aims to show difference in", "\"\"\" self.remove_test_users() def test_00010(self): self.run_bundle(10) def test_00100(self): self.run_bundle(100) def test_01000(self): self.run_bundle(1000) def _test_10000(self):", "tearDown(self): super(AclSearchSpeedTest, self).tearDown() delete_force(self.ldb_admin, self.get_user_dn(\"acltestuser\")) def run_search_bundle(self, num, _ldb): print \"\\n=== Creating %s", "in range(count): self.create_user(\"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn)) def remove_bundle(self, count): for i in range(count):", "start) ) avg_add += res_add print \" Attempt %s ADD: %.3fs\" % (", "print \" Attempt %s SEARCH: %.3fs\" % ( x, float(res_search) ) print \"Average", "print \"Average Search: %.3fs\" % float( Decimal(avg_search) / Decimal(\"3.0\") ) self.remove_bundle(num) def get_user_dn(self,", "Decimal(\"0.0\") for x in [1, 2, 3]: start = time.time() res = _ldb.search(base=self.base_dn,", "program is free software; you can redistribute it and/or modify # it under", "This program is free software; you can redistribute it and/or modify # it", "%.3fs\" % ( x, float(res_del) ) print \"Average ADD: %.3fs\" % float( Decimal(avg_add)", "Directory performance. # Copyright (C) <NAME> <<EMAIL>> 2010 # # This program is", "0 if not runner.run(unittest.makeSuite(SpeedTestAddDel)).wasSuccessful(): rc = 1 if not runner.run(unittest.makeSuite(AclSearchSpeedTest)).wasSuccessful(): rc = 1", "in [1, 2, 3]: start = time.time() self.create_bundle(num) res_add = Decimal( str(time.time() -", "SamDB(url=host, credentials=creds_tmp, lp=lp) return ldb_target def test_search_01000(self): self.run_search_bundle(1000, self.ldb_admin) def test_search2_01000(self): # allow", "will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty", "ERR_UNWILLING_TO_PERFORM, ERR_INSUFFICIENT_ACCESS_RIGHTS) from samba.ndr import ndr_pack, ndr_unpack from samba.dcerpc import security from samba.auth", "% num print \"\\n=== Test search on %s user objects ===\\n\" % num", "Copyright (C) <NAME> <<EMAIL>> 2010 # # This program is free software; you", "-*- coding: utf-8 -*- # # Unix SMB/CIFS implementation. # This speed test", "to see objects but not attributes, all attributes will be filtered out mod", "args[0] lp = sambaopts.get_loadparm() creds = credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests", "# # This program is free software; you can redistribute it and/or modify", "start here # class SpeedTest(samba.tests.TestCase): def find_domain_sid(self, ldb): res = ldb.search(base=self.base_dn, expression=\"(objectClass=*)\", scope=SCOPE_BASE)", "= time.time() self.remove_bundle(num) res_del = Decimal( str(time.time() - start) ) avg_del += res_del", "time.time() self.remove_bundle(num) res_del = Decimal( str(time.time() - start) ) avg_del += res_del print", "print \"Average ADD: %.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\") ) print \"Average DEL:", "= credopts.get_credentials(lp) creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL) # # Tests start here # class SpeedTest(samba.tests.TestCase):", "# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "Software Foundation; either version 3 of the License, or # (at your option)", "\"\"\" self.ldb_admin.add_ldif(ldif) def create_group(self, group_dn, desc=None): ldif = \"\"\" dn: \"\"\" + group_dn", "scope=SCOPE_SUBTREE) dn_list = [item.dn for item in res if \"speedtestuser\" in str(item.dn)] for", "objectClass: user unicodePwd:: \"\"\" + base64.b64encode((\"\\\"%s\\\"\" % self.user_pass).encode('utf-16-le')) + \"\"\" url: www.example.com \"\"\"", "x, float(res_del) ) print \"Average ADD: %.3fs\" % float( Decimal(avg_add) / Decimal(\"3.0\") )", "Decimal sys.path.insert(0, \"bin/python\") import samba samba.ensure_external_module(\"testtools\", \"testtools\") samba.ensure_external_module(\"subunit\", \"subunit/python\") import samba.getopt as options", "remove_test_users(self): res = ldb.search(base=\"cn=Users,%s\" % self.base_dn, expression=\"(objectClass=user)\", scope=SCOPE_SUBTREE) dn_list = [item.dn for item", "self.sd_utils.dacl_add_ace(\"CN=Users,%s\" % self.base_dn, mod) self.run_search_bundle(1000, self.ldb_user) # Important unit running information if not", "MS Active Directory performance. # Copyright (C) <NAME> <<EMAIL>> 2010 # # This", "self.base_dn)) def remove_bundle(self, count): for i in range(count): delete_force(self.ldb_admin, \"cn=speedtestuser%d,cn=Users,%s\" % (i+1, self.base_dn))", "import ndr_pack, ndr_unpack from samba.dcerpc import security from samba.auth import system_session from samba" ]
[ "return 'I sit on a Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The", "ClassicChair \"\"\" def sit_on(self) -> str: return 'I sit on a Classic Chair'", "sofa ...') return ClassicSofa() def client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa =", "__future__ import annotations from abc import ABC, abstractmethod class Chair(ABC): \"\"\" product interface", "Chair: ModernChair \"\"\" def sit_on(self) -> str: return 'I sit on a Modern", "and SOfa \"\"\" @abstractmethod def produce_chair(self) -> Chair: pass @abstractmethod def produce_sofa(self) ->", "produce Chair and SOfa \"\"\" @abstractmethod def produce_chair(self) -> Chair: pass @abstractmethod def", "__name__ == '__main__': print('\\r\\n--- I want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n--- I", "a Classic Chair' class ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa \"\"\" def lie_on(self)", "chair = factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n---", "Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares a set", "-> Sofa: print('ModernFurnitureFactory produce sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement", "Chair: ClassicChair \"\"\" def sit_on(self) -> str: return 'I sit on a Classic", "return 'I sit on a Classic Chair' class ModernSofa(Sofa): \"\"\" product implement Sofa:", "print('\\r\\n--- I want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n--- I want some Classic", "import ABC, abstractmethod class Chair(ABC): \"\"\" product interface 1: Chair \"\"\" @abstractmethod def", "product implement Sofa: ClassicSofa \"\"\" def lie_on(self) -> str: return 'I sit on", "ModernChair(Chair): \"\"\" product implement Chair: ModernChair \"\"\" def sit_on(self) -> str: return 'I", "Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\"", "implement Chair: ClassicChair \"\"\" def sit_on(self) -> str: return 'I sit on a", "FurnitureFactory to produce true product \"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair", "FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__':", "from abc import ABC, abstractmethod class Chair(ABC): \"\"\" product interface 1: Chair \"\"\"", "factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n--- I want", "to produce true product \"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair ...')", "ClassicSofa() def client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if", "products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair and SOfa \"\"\" @abstractmethod def produce_chair(self) ->", "-> str: return 'I sit on a Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口", "a Modern Chair' class ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair \"\"\" def sit_on(self)", "Sofa(ABC): \"\"\" product interface 2: Sofa \"\"\" @abstractmethod def lie_on(self) -> str: pass", "product implement Chair: ClassicChair \"\"\" def sit_on(self) -> str: return 'I sit on", "print('ModernFurnitureFactory produce sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to", "sit_on(self) -> str: pass class Sofa(ABC): \"\"\" product interface 2: Sofa \"\"\" @abstractmethod", "produce sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce", "sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n--- I want some Modern", "Demo 家具工厂 \"\"\" from __future__ import annotations from abc import ABC, abstractmethod class", "want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n--- I want some Classic Furniture ---\\r\\n')", "<reponame>edgardeng/design-patterns-in-python<gh_stars>0 \"\"\" 抽象工厂 代码实例 Abstract Factory Code Demo 家具工厂 \"\"\" from __future__ import", "Sofa: print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa() def client_code(factory: FurnitureFactory): chair = factory.produce_chair()", "-> str: return 'I sit on a Modern Chair' class ClassicChair(Chair): \"\"\" product", "some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n--- I want some Classic Furniture ---\\r\\n') client_code(ClassicFurnitureFactory())", "print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n--- I want some", "\"\"\" def lie_on(self) -> str: return 'I sit on a Modern Sofa' class", "FurnitureFactory to produce true product \"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair", "class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def produce_chair(self)", "sit on a Classic Chair' class ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa \"\"\"", "product \"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair ...') return ClassicChair() def", "\"\"\" product implement Sofa: ModernSofa \"\"\" def lie_on(self) -> str: return 'I sit", "The Abstract Factory interface declares a set of methods that return different abstract", "produce chair ...') return ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa ...')", "def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair ...') return ClassicChair() def produce_sofa(self) ->", "import annotations from abc import ABC, abstractmethod class Chair(ABC): \"\"\" product interface 1:", "def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair ...') return ModernChair() def produce_sofa(self) ->", "on a Classic Chair' class ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa \"\"\" def", "annotations from abc import ABC, abstractmethod class Chair(ABC): \"\"\" product interface 1: Chair", "that return different abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair and SOfa \"\"\"", "interface 1: Chair \"\"\" @abstractmethod def sit_on(self) -> str: pass class Sofa(ABC): \"\"\"", "product implement Chair: ModernChair \"\"\" def sit_on(self) -> str: return 'I sit on", "Chair' class ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair \"\"\" def sit_on(self) -> str:", "定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares a set of methods that return different", "抽象工厂 代码实例 Abstract Factory Code Demo 家具工厂 \"\"\" from __future__ import annotations from", "pass class ModernChair(Chair): \"\"\" product implement Chair: ModernChair \"\"\" def sit_on(self) -> str:", "'__main__': print('\\r\\n--- I want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n--- I want some", "\"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def produce_chair(self) -> Chair:", "Modern Sofa' class ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa \"\"\" def lie_on(self) ->", "@abstractmethod def produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to", "def lie_on(self) -> str: return 'I sit on a Classic Sofa' class FurnitureFactory(ABC):", "interface declares a set of methods that return different abstract products. 家具工厂生成沙发和椅子 Furniture", "on a Modern Sofa' class ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa \"\"\" def", "sit on a Modern Sofa' class ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa \"\"\"", "-> Chair: pass @abstractmethod def produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类", "def sit_on(self) -> str: pass class Sofa(ABC): \"\"\" product interface 2: Sofa \"\"\"", "@abstractmethod def sit_on(self) -> str: pass class Sofa(ABC): \"\"\" product interface 2: Sofa", "produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类", "\"\"\" product implement Chair: ClassicChair \"\"\" def sit_on(self) -> str: return 'I sit", "if __name__ == '__main__': print('\\r\\n--- I want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n---", "\"\"\" from __future__ import annotations from abc import ABC, abstractmethod class Chair(ABC): \"\"\"", "2: Sofa \"\"\" @abstractmethod def lie_on(self) -> str: pass class ModernChair(Chair): \"\"\" product", "-> str: return 'I sit on a Modern Sofa' class ClassicSofa(Sofa): \"\"\" product", "一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory", "Sofa: print('ModernFurnitureFactory produce sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory", "str: return 'I sit on a Modern Chair' class ClassicChair(Chair): \"\"\" product implement", "SOfa \"\"\" @abstractmethod def produce_chair(self) -> Chair: pass @abstractmethod def produce_sofa(self) -> Sofa:", "class Sofa(ABC): \"\"\" product interface 2: Sofa \"\"\" @abstractmethod def lie_on(self) -> str:", "str: pass class Sofa(ABC): \"\"\" product interface 2: Sofa \"\"\" @abstractmethod def lie_on(self)", "chair ...') return ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa ...') return", "a set of methods that return different abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce", "Abstract Factory Code Demo 家具工厂 \"\"\" from __future__ import annotations from abc import", "Abstract Factory interface declares a set of methods that return different abstract products.", "ModernChair \"\"\" def sit_on(self) -> str: return 'I sit on a Modern Chair'", "\"\"\" product implement Sofa: ClassicSofa \"\"\" def lie_on(self) -> str: return 'I sit", "sit on a Modern Chair' class ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair \"\"\"", "一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory", "Chair' class ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa \"\"\" def lie_on(self) -> str:", "from __future__ import annotations from abc import ABC, abstractmethod class Chair(ABC): \"\"\" product", "-> str: pass class Sofa(ABC): \"\"\" product interface 2: Sofa \"\"\" @abstractmethod def", "product interface 2: Sofa \"\"\" @abstractmethod def lie_on(self) -> str: pass class ModernChair(Chair):", "Furniture Factory produce Chair and SOfa \"\"\" @abstractmethod def produce_chair(self) -> Chair: pass", "ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa() def client_code(factory:", "on a Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface", "product \"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair ...') return ModernChair() def", "Factory interface declares a set of methods that return different abstract products. 家具工厂生成沙发和椅子", "代码实例 Abstract Factory Code Demo 家具工厂 \"\"\" from __future__ import annotations from abc", "\"\"\" product implement Chair: ModernChair \"\"\" def sit_on(self) -> str: return 'I sit", "str: return 'I sit on a Classic Chair' class ModernSofa(Sofa): \"\"\" product implement", "true product \"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair ...') return ModernChair()", "\"\"\" product interface 1: Chair \"\"\" @abstractmethod def sit_on(self) -> str: pass class", "of methods that return different abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair and", "def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\"", "pass @abstractmethod def produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory", "class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares a set of", "return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\"", "ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def produce_chair(self) ->", "def client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__", "abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair and SOfa \"\"\" @abstractmethod def produce_chair(self)", "def sit_on(self) -> str: return 'I sit on a Modern Chair' class ClassicChair(Chair):", "\"\"\" def sit_on(self) -> str: return 'I sit on a Classic Chair' class", "Sofa: ModernSofa \"\"\" def lie_on(self) -> str: return 'I sit on a Modern", "I want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n--- I want some Classic Furniture", "ModernSofa \"\"\" def lie_on(self) -> str: return 'I sit on a Modern Sofa'", "a Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares", "\"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares a set of methods that", "abc import ABC, abstractmethod class Chair(ABC): \"\"\" product interface 1: Chair \"\"\" @abstractmethod", "class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def produce_chair(self)", "return ClassicSofa() def client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on())", "lie_on(self) -> str: pass class ModernChair(Chair): \"\"\" product implement Chair: ModernChair \"\"\" def", "return 'I sit on a Modern Sofa' class ClassicSofa(Sofa): \"\"\" product implement Sofa:", "\"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair ...') return ClassicChair() def produce_sofa(self)", "sit_on(self) -> str: return 'I sit on a Modern Chair' class ClassicChair(Chair): \"\"\"", "-> Chair: print('ClassicFurnitureFactory produce chair ...') return ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory", "\"\"\" @abstractmethod def sit_on(self) -> str: pass class Sofa(ABC): \"\"\" product interface 2:", "\"\"\" @abstractmethod def produce_chair(self) -> Chair: pass @abstractmethod def produce_sofa(self) -> Sofa: pass", "Chair: pass @abstractmethod def produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement", "str: return 'I sit on a Modern Sofa' class ClassicSofa(Sofa): \"\"\" product implement", "print('ModernFurnitureFactory produce chair ...') return ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa", "to produce true product \"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair ...')", "FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares a set of methods", "return ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa() def", "def lie_on(self) -> str: pass class ModernChair(Chair): \"\"\" product implement Chair: ModernChair \"\"\"", "str: return 'I sit on a Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品", "interface 2: Sofa \"\"\" @abstractmethod def lie_on(self) -> str: pass class ModernChair(Chair): \"\"\"", "\"\"\" @abstractmethod def lie_on(self) -> str: pass class ModernChair(Chair): \"\"\" product implement Chair:", "def sit_on(self) -> str: return 'I sit on a Classic Chair' class ModernSofa(Sofa):", "sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true", "methods that return different abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair and SOfa", "Chair: print('ClassicFurnitureFactory produce chair ...') return ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce", "-> Chair: print('ModernFurnitureFactory produce chair ...') return ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory", "Chair: print('ModernFurnitureFactory produce chair ...') return ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce", "Code Demo 家具工厂 \"\"\" from __future__ import annotations from abc import ABC, abstractmethod", "print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa() def client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on())", "'I sit on a Modern Chair' class ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair", "\"\"\" def sit_on(self) -> str: return 'I sit on a Modern Chair' class", "Chair \"\"\" @abstractmethod def sit_on(self) -> str: pass class Sofa(ABC): \"\"\" product interface", "\"\"\" def lie_on(self) -> str: return 'I sit on a Classic Sofa' class", "'I sit on a Classic Chair' class ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa", "ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa \"\"\" def lie_on(self) -> str: return 'I", "sit on a Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory", "Sofa: ClassicSofa \"\"\" def lie_on(self) -> str: return 'I sit on a Classic", "家具工厂 \"\"\" from __future__ import annotations from abc import ABC, abstractmethod class Chair(ABC):", "produce chair ...') return ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa ...')", "== '__main__': print('\\r\\n--- I want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory()) print('\\r\\n--- I want", "ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def", "produce true product \"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair ...') return", "-> str: pass class ModernChair(Chair): \"\"\" product implement Chair: ModernChair \"\"\" def sit_on(self)", "implement FurnitureFactory to produce true product \"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce", "Factory Code Demo 家具工厂 \"\"\" from __future__ import annotations from abc import ABC,", "str: pass class ModernChair(Chair): \"\"\" product implement Chair: ModernChair \"\"\" def sit_on(self) ->", "Factory produce Chair and SOfa \"\"\" @abstractmethod def produce_chair(self) -> Chair: pass @abstractmethod", "print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n--- I want some Modern Furniture ---\\r\\n') client_code(ModernFurnitureFactory())", "chair ...') return ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa ...') return", "Chair(ABC): \"\"\" product interface 1: Chair \"\"\" @abstractmethod def sit_on(self) -> str: pass", "implement Sofa: ModernSofa \"\"\" def lie_on(self) -> str: return 'I sit on a", "lie_on(self) -> str: return 'I sit on a Modern Sofa' class ClassicSofa(Sofa): \"\"\"", "= factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n--- I", "ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa \"\"\" def lie_on(self) -> str: return 'I", "@abstractmethod def produce_chair(self) -> Chair: pass @abstractmethod def produce_sofa(self) -> Sofa: pass class", "produce_chair(self) -> Chair: pass @abstractmethod def produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\"", "...') return ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa ...') return ModernSofa()", "def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa() def client_code(factory: FurnitureFactory):", "ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair \"\"\" def sit_on(self) -> str: return 'I", "lie_on(self) -> str: return 'I sit on a Classic Sofa' class FurnitureFactory(ABC): \"\"\"", "ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa ...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory):", "produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair ...') return ClassicChair() def produce_sofa(self) -> Sofa:", "-> str: return 'I sit on a Classic Chair' class ModernSofa(Sofa): \"\"\" product", "set of methods that return different abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair", "Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares a", "print('ClassicFurnitureFactory produce chair ...') return ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa", "def lie_on(self) -> str: return 'I sit on a Modern Sofa' class ClassicSofa(Sofa):", "\"\"\" def produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair ...') return ModernChair() def produce_sofa(self)", "product implement Sofa: ModernSofa \"\"\" def lie_on(self) -> str: return 'I sit on", "sit_on(self) -> str: return 'I sit on a Classic Chair' class ModernSofa(Sofa): \"\"\"", "produce sofa ...') return ClassicSofa() def client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa", "return 'I sit on a Modern Chair' class ClassicChair(Chair): \"\"\" product implement Chair:", "product interface 1: Chair \"\"\" @abstractmethod def sit_on(self) -> str: pass class Sofa(ABC):", "class ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa \"\"\" def lie_on(self) -> str: return", "Sofa \"\"\" @abstractmethod def lie_on(self) -> str: pass class ModernChair(Chair): \"\"\" product implement", "class Chair(ABC): \"\"\" product interface 1: Chair \"\"\" @abstractmethod def sit_on(self) -> str:", "ClassicSofa \"\"\" def lie_on(self) -> str: return 'I sit on a Classic Sofa'", "def produce_chair(self) -> Chair: pass @abstractmethod def produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory):", "implement Sofa: ClassicSofa \"\"\" def lie_on(self) -> str: return 'I sit on a", "一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract Factory interface declares a set of methods that return", "return ModernChair() def produce_sofa(self) -> Sofa: print('ModernFurnitureFactory produce sofa ...') return ModernSofa() class", "家具工厂生成沙发和椅子 Furniture Factory produce Chair and SOfa \"\"\" @abstractmethod def produce_chair(self) -> Chair:", "produce true product \"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair ...') return", "implement Chair: ModernChair \"\"\" def sit_on(self) -> str: return 'I sit on a", "class ModernChair(Chair): \"\"\" product implement Chair: ModernChair \"\"\" def sit_on(self) -> str: return", "...') return ClassicSofa() def client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa()", "Chair and SOfa \"\"\" @abstractmethod def produce_chair(self) -> Chair: pass @abstractmethod def produce_sofa(self)", "Modern Chair' class ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair \"\"\" def sit_on(self) ->", "ABC, abstractmethod class Chair(ABC): \"\"\" product interface 1: Chair \"\"\" @abstractmethod def sit_on(self)", "\"\"\" product interface 2: Sofa \"\"\" @abstractmethod def lie_on(self) -> str: pass class", "a Modern Sofa' class ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa \"\"\" def lie_on(self)", "...') return ClassicChair() def produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa()", "@abstractmethod def lie_on(self) -> str: pass class ModernChair(Chair): \"\"\" product implement Chair: ModernChair", "ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def produce_chair(self) ->", "produce_chair(self) -> Chair: print('ModernFurnitureFactory produce chair ...') return ModernChair() def produce_sofa(self) -> Sofa:", "produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true", "Classic Chair' class ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa \"\"\" def lie_on(self) ->", "produce_sofa(self) -> Sofa: print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa() def client_code(factory: FurnitureFactory): chair", "factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n--- I want some Modern Furniture ---\\r\\n')", "pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product \"\"\" def", "different abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair and SOfa \"\"\" @abstractmethod def", "return different abstract products. 家具工厂生成沙发和椅子 Furniture Factory produce Chair and SOfa \"\"\" @abstractmethod", "\"\"\" 抽象工厂 代码实例 Abstract Factory Code Demo 家具工厂 \"\"\" from __future__ import annotations", "-> Sofa: print('ClassicFurnitureFactory produce sofa ...') return ClassicSofa() def client_code(factory: FurnitureFactory): chair =", "...') return ModernSofa() class ClassicFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product", "= factory.produce_sofa() print(sofa.lie_on()) if __name__ == '__main__': print('\\r\\n--- I want some Modern Furniture", "implement FurnitureFactory to produce true product \"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce", "class ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair \"\"\" def sit_on(self) -> str: return", "client_code(factory: FurnitureFactory): chair = factory.produce_chair() print(chair.sit_on()) sofa = factory.produce_sofa() print(sofa.lie_on()) if __name__ ==", "def produce_sofa(self) -> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce", "pass class Sofa(ABC): \"\"\" product interface 2: Sofa \"\"\" @abstractmethod def lie_on(self) ->", "'I sit on a Classic Sofa' class FurnitureFactory(ABC): \"\"\" 一个抽象工厂接口 定义了一系列方法,用来返回不同的抽象产品 The Abstract", "true product \"\"\" def produce_chair(self) -> Chair: print('ClassicFurnitureFactory produce chair ...') return ClassicChair()", "'I sit on a Modern Sofa' class ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa", "Sofa' class ClassicSofa(Sofa): \"\"\" product implement Sofa: ClassicSofa \"\"\" def lie_on(self) -> str:", "abstractmethod class Chair(ABC): \"\"\" product interface 1: Chair \"\"\" @abstractmethod def sit_on(self) ->", "declares a set of methods that return different abstract products. 家具工厂生成沙发和椅子 Furniture Factory", "1: Chair \"\"\" @abstractmethod def sit_on(self) -> str: pass class Sofa(ABC): \"\"\" product", "on a Modern Chair' class ClassicChair(Chair): \"\"\" product implement Chair: ClassicChair \"\"\" def", "class ModernSofa(Sofa): \"\"\" product implement Sofa: ModernSofa \"\"\" def lie_on(self) -> str: return", "-> Sofa: pass class ModernFurnitureFactory(FurnitureFactory): \"\"\" 一个抽象工厂的实现类 implement FurnitureFactory to produce true product" ]
[ "django.template import loader from django.shortcuts import render from django.http import Http404 # Create", "from django.http import Http404 # Create your views here. def index(request): return render(request=request,", "from django.shortcuts import render, get_object_or_404 from django.http import HttpResponse from django.template import loader", "get_object_or_404 from django.http import HttpResponse from django.template import loader from django.shortcuts import render", "django.shortcuts import render from django.http import Http404 # Create your views here. def", "import render, get_object_or_404 from django.http import HttpResponse from django.template import loader from django.shortcuts", "import render from django.http import Http404 # Create your views here. def index(request):", "HttpResponse from django.template import loader from django.shortcuts import render from django.http import Http404", "django.http import Http404 # Create your views here. def index(request): return render(request=request, template_name='homepage.html')", "render, get_object_or_404 from django.http import HttpResponse from django.template import loader from django.shortcuts import", "from django.http import HttpResponse from django.template import loader from django.shortcuts import render from", "loader from django.shortcuts import render from django.http import Http404 # Create your views", "from django.template import loader from django.shortcuts import render from django.http import Http404 #", "django.shortcuts import render, get_object_or_404 from django.http import HttpResponse from django.template import loader from", "django.http import HttpResponse from django.template import loader from django.shortcuts import render from django.http", "import loader from django.shortcuts import render from django.http import Http404 # Create your", "from django.shortcuts import render from django.http import Http404 # Create your views here.", "import HttpResponse from django.template import loader from django.shortcuts import render from django.http import", "render from django.http import Http404 # Create your views here. def index(request): return" ]
[ "cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#,", "= 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) setup(", "from distutils.core import setup from Cython.Build import cythonize import numpy from distutils.extension import", "'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) setup( name", "'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules =", "distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Compiler import Options print(\"numpy.get_include() =", "#! /usr/bin/env python3 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright", "= [numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"],", ") ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs", "setup( name = 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()]", "import setup from Cython.Build import cythonize import numpy from distutils.extension import Extension from", "# Distributed under terms of the MIT license. from distutils.core import setup from", "[\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name", "-*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME> # Distributed", "name = 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] )", "cythonize import numpy from distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Compiler", "of the MIT license. from distutils.core import setup from Cython.Build import cythonize import", "setup( name = 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()]", "Copyright 2019.12.19 <NAME> # Distributed under terms of the MIT license. from distutils.core", "import build_ext from Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name = 'calcgau',", "include_dirs = [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext':", "setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'),", "coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME> # Distributed under", "distutils.core import setup from Cython.Build import cythonize import numpy from distutils.extension import Extension", "python3 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME>", "from Cython.Distutils import build_ext from Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name", "vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME> # Distributed under terms of the MIT", "= 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules", "numpy from distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Compiler import Options", "%s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs =", "include_dirs = [numpy.get_include()] ) setup( name = 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path =", "<NAME> # Distributed under terms of the MIT license. from distutils.core import setup", "= cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"],", "= [numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'), ##include_path = [numpy.get_include()] #include_dirs", "= [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext},", "/usr/bin/env python3 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright 2019.12.19", "#setup( #name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'), ##include_path = [numpy.get_include()] #include_dirs = [numpy.get_include()]", "# # Copyright 2019.12.19 <NAME> # Distributed under terms of the MIT license.", "license. from distutils.core import setup from Cython.Build import cythonize import numpy from distutils.extension", "Extension from Cython.Distutils import build_ext from Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup(", "[Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup(", "Cython.Build import cythonize import numpy from distutils.extension import Extension from Cython.Distutils import build_ext", "#Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name = 'calcRCMC',", "the MIT license. from distutils.core import setup from Cython.Build import cythonize import numpy", "#include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) setup( name = 'calcVES', ext_modules =", "= %s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs", "Cython.Distutils import build_ext from Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name =", "#include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\",", "import cythonize import numpy from distutils.extension import Extension from Cython.Distutils import build_ext from", "Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules = cythonize('calcgau.pyx'),", "setup from Cython.Build import cythonize import numpy from distutils.extension import Extension from Cython.Distutils", "# Copyright 2019.12.19 <NAME> # Distributed under terms of the MIT license. from", "import Extension from Cython.Distutils import build_ext from Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include())", "ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'), ##include_path =", "build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'), ##include_path", "#name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'), ##include_path = [numpy.get_include()] #include_dirs = [numpy.get_include()] ##)", "# vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME> # Distributed under terms of the", ") setup( name = 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs =", "= cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) setup( name = 'calcVES',", "print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()]", "utf-8 -*- # vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME> # Distributed under terms", "= [numpy.get_include()] include_dirs = [numpy.get_include()] ) setup( name = 'calcVES', ext_modules = cythonize('calcVES.pyx'),", "[numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'), ##include_path = [numpy.get_include()] #include_dirs =", "include_dirs = [numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules = cythonize('calcRCMC.pyx'), ##include_path = [numpy.get_include()]", "= [numpy.get_include()] ) setup( name = 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()]", "-*- # vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME> # Distributed under terms of", "[numpy.get_include()] ) setup( name = 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs", "ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs =", "ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\",", "ext_modules = cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) setup( name =", "import numpy from distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Compiler import", "[numpy.get_include()] include_dirs = [numpy.get_include()] ) setup( name = 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path", "Distributed under terms of the MIT license. from distutils.core import setup from Cython.Build", "import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path", "= [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()])", "# -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright 2019.12.19 <NAME> #", "[\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules", "[numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules,", "build_ext from Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules", "language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name =", "from Cython.Build import cythonize import numpy from distutils.extension import Extension from Cython.Distutils import", "Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules = cythonize('calcgau.pyx'), #include_path =", "name = 'calcVES', ext_modules = cythonize('calcVES.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] )", "language=\"c++\")] setup(cmdclass={'build_ext': build_ext}, ext_modules=ext_modules, include_dirs = [numpy.get_include()]) #setup( #name = 'calcRCMC', #ext_modules =", "from Cython.Compiler import Options print(\"numpy.get_include() = %s\"%numpy.get_include()) setup( name = 'calcgau', ext_modules =", "from distutils.extension import Extension from Cython.Distutils import build_ext from Cython.Compiler import Options print(\"numpy.get_include()", "[numpy.get_include()] include_dirs = [numpy.get_include()] ) ext_modules = [Extension(\"calcRCMC\", [\"calcRCMC.pyx\"], language=\"c++\")]#, #Extension(\"module2\", [\"module2.pyx\"], language=\"c++\")]", "<gh_stars>1-10 #! /usr/bin/env python3 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # #", "under terms of the MIT license. from distutils.core import setup from Cython.Build import", "terms of the MIT license. from distutils.core import setup from Cython.Build import cythonize", "2019.12.19 <NAME> # Distributed under terms of the MIT license. from distutils.core import", "cythonize('calcgau.pyx'), #include_path = [numpy.get_include()] include_dirs = [numpy.get_include()] ) setup( name = 'calcVES', ext_modules", "MIT license. from distutils.core import setup from Cython.Build import cythonize import numpy from" ]
[ "`acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up test fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear", "of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading file into", "import os import unittest from click.testing import CliRunner from acdh_geonames_utils import acdh_geonames_utils as", "self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad)", "'YU' bad_country_code = 'BAAAD' good_ft_code = \"en\" bad_ft_code = \"de\" TEST_GN_FILE = os.path.join(", "cli good_country_code = 'YU' bad_country_code = 'BAAAD' good_ft_code = \"en\" bad_ft_code = \"de\"", "self.assertTrue(good != \"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad =", "= 'YU' bad_country_code = 'BAAAD' good_ft_code = \"en\" bad_ft_code = \"de\" TEST_GN_FILE =", "test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code ==", "= gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\" good_df =", "self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good !=", "gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code)", "unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test", "test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the", "def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self):", "= runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show this message and", "self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad", "setUp(self): \"\"\"Set up test fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear down test fixtures,", "= gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good =", "\"\"\"Test the CLI.\"\"\" runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0", "= gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner", "bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner = CliRunner()", "gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE)", "test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad,", "\"\") def test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356)", "bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good", "def tearDown(self): \"\"\"Tear down test fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test download of", "as gn from acdh_geonames_utils import cli good_country_code = 'YU' bad_country_code = 'BAAAD' good_ft_code", "def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code", "down test fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test download of zip.\"\"\" good =", "in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show", "self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad,", "gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner =", "CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in result.output help_result", "runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in", "assert 'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert", "== 0 assert 'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code ==", "for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up test fixtures, if any.\"\"\" def tearDown(self):", "= os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self):", "os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set", "import acdh_geonames_utils as gn from acdh_geonames_utils import cli good_country_code = 'YU' bad_country_code =", "1) self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\")", "good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad == \"\") def", "\"\"\"Tests for `acdh_geonames_utils` package.\"\"\" import os import unittest from click.testing import CliRunner from", "gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner = CliRunner() result =", "\"en\" bad_ft_code = \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests", "runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show this message and exit.'", "= gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping of", "os import unittest from click.testing import CliRunner from acdh_geonames_utils import acdh_geonames_utils as gn", "9356) def test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df =", "gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good", "self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df", "= gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self):", "into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self):", "\"\") def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code)", "bad_country_code = 'BAAAD' good_ft_code = \"en\" bad_ft_code = \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\",", "result = runner.invoke(cli.main) assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in result.output help_result =", "gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU')", "test fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test download of zip.\"\"\" good = gn.download_country_zip(good_country_code)", "up test fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear down test fixtures, if any.\"\"\"", "= gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad =", "= gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code)", "download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def", "test fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear down test fixtures, if any.\"\"\" def", "def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad ==", "['--help']) assert help_result.exit_code == 0 assert '--help Show this message and exit.' in", "= \"en\" bad_ft_code = \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase):", "import cli good_country_code = 'YU' bad_country_code = 'BAAAD' good_ft_code = \"en\" bad_ft_code =", "def setUp(self): \"\"\"Set up test fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear down test", "good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\"", "any.\"\"\" def tearDown(self): \"\"\"Tear down test fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test download", "good_ft_code = \"en\" bad_ft_code = \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" ) class", "good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping", "acdh_geonames_utils import cli good_country_code = 'YU' bad_country_code = 'BAAAD' good_ft_code = \"en\" bad_ft_code", "for `acdh_geonames_utils` package.\"\"\" import os import unittest from click.testing import CliRunner from acdh_geonames_utils", "gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\"", "bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\" df", "def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test", "\"\"\"Test unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading", "assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert", "`acdh_geonames_utils` package.\"\"\" import os import unittest from click.testing import CliRunner from acdh_geonames_utils import", ") class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up test fixtures,", "result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show this", "unittest from click.testing import CliRunner from acdh_geonames_utils import acdh_geonames_utils as gn from acdh_geonames_utils", "def test_001_download(self): \"\"\"Test download of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\"))", "gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad", "fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test download of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad", "gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\"", "import unittest from click.testing import CliRunner from acdh_geonames_utils import acdh_geonames_utils as gn from", "gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\")", "loading download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df)", "loading file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading", "bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad =", "#!/usr/bin/env python \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" import os import unittest from click.testing import", "test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df),", "download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def", "acdh_geonames_utils import acdh_geonames_utils as gn from acdh_geonames_utils import cli good_country_code = 'YU' bad_country_code", "gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good", "\"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up test fixtures, if any.\"\"\" def", "CliRunner from acdh_geonames_utils import acdh_geonames_utils as gn from acdh_geonames_utils import cli good_country_code =", "bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad", "gn from acdh_geonames_utils import cli good_country_code = 'YU' bad_country_code = 'BAAAD' good_ft_code =", "'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help", "test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad == \"\")", "= gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner = CliRunner() result", "the CLI.\"\"\" runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0 assert", "TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up test fixtures, if any.\"\"\"", "= gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download and", "self.assertEqual(bad, \"\") def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\")", "= \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils`", "zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\"", "self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df),", "= gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code)", "from click.testing import CliRunner from acdh_geonames_utils import acdh_geonames_utils as gn from acdh_geonames_utils import", "if any.\"\"\" def test_001_download(self): \"\"\"Test download of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad =", "import CliRunner from acdh_geonames_utils import acdh_geonames_utils as gn from acdh_geonames_utils import cli good_country_code", "help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show this message", "test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self):", "python \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" import os import unittest from click.testing import CliRunner", "good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good =", "gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad", "package.\"\"\" import os import unittest from click.testing import CliRunner from acdh_geonames_utils import acdh_geonames_utils", "file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading download", "= gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code)", "\"\"\"Test download of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\")", "tearDown(self): \"\"\"Tear down test fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test download of zip.\"\"\"", "package.\"\"\" def setUp(self): \"\"\"Set up test fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear down", "== \"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def", "if any.\"\"\" def tearDown(self): \"\"\"Tear down test fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test", "TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def", "runner.invoke(cli.main) assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help'])", "result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code", "self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner = CliRunner() result = runner.invoke(cli.main)", "pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\"", "\"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good)", "bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good =", "download of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def", "df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\" good_df", "bad_ft_code = \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for", "test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test", "\"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code) self.assertIsNotNone(good) self.assertIsNone(bad) def test_command_line_interface(self):", "test_001_download(self): \"\"\"Test download of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad,", "acdh_geonames_utils as gn from acdh_geonames_utils import cli good_country_code = 'YU' bad_country_code = 'BAAAD'", "good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download", "\"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up", "\"\") def test_003_unzip(self): \"\"\"Test unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def", "of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self):", "into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test loading download into", "and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\") def test_003_unzip(self):", "from acdh_geonames_utils import acdh_geonames_utils as gn from acdh_geonames_utils import cli good_country_code = 'YU'", "0 assert 'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0", "\"\"\"Test loading file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def test_005_dl_to_df(self): \"\"\"Test", "CLI.\"\"\" runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main'", "click.testing import CliRunner from acdh_geonames_utils import acdh_geonames_utils as gn from acdh_geonames_utils import cli", "'BAAAD' good_ft_code = \"en\" bad_ft_code = \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" )", "def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\"))", "assert help_result.exit_code == 0 assert '--help Show this message and exit.' in help_result.output", "zip.\"\"\" good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.zip\")) self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test", "def test_005_dl_to_df(self): \"\"\"Test loading download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU')", "= runner.invoke(cli.main) assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in result.output help_result = runner.invoke(cli.main,", "= CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0 assert 'acdh_geonames_utils.cli.main' in result.output", "\"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\"", "self.assertIsNone(bad) def test_command_line_interface(self): \"\"\"Test the CLI.\"\"\" runner = CliRunner() result = runner.invoke(cli.main) assert", "= 'BAAAD' good_ft_code = \"en\" bad_ft_code = \"de\" TEST_GN_FILE = os.path.join( \"./fixtures\", \"AL.txt\"", "any.\"\"\" def test_001_download(self): \"\"\"Test download of zip.\"\"\" good = gn.download_country_zip(good_country_code) bad = gn.download_country_zip(bad_country_code)", "pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1) self.assertFalse(bad_df) def test_006_dl_ft(self): good", "class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up test fixtures, if", "from acdh_geonames_utils import cli good_country_code = 'YU' bad_country_code = 'BAAAD' good_ft_code = \"en\"", "unzipping of zip.\"\"\" bad = gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading file", "self.assertFalse(bad_df) def test_006_dl_ft(self): good = gn.dl_feature_codes(good_ft_code) bad = gn.dl_feature_codes(bad_ft_code) self.assertTrue(good != \"\") self.assertTrue(bad", "!= \"\") self.assertTrue(bad == \"\") def test_007_dl_ft_as_df(self): good = gn.feature_codes_df(good_ft_code) bad = gn.feature_codes_df(bad_ft_code)", "\"\"\"Test download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad = gn.download_and_unzip_country_zip(bad_country_code) self.assertTrue(good.endswith(f\"{good_country_code}.txt\")) self.assertEqual(bad, \"\")", "fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear down test fixtures, if any.\"\"\" def test_001_download(self):", "good_country_code = 'YU' bad_country_code = 'BAAAD' good_ft_code = \"en\" bad_ft_code = \"de\" TEST_GN_FILE", "\"AL.txt\" ) class TestAcdh_geonames_utils(unittest.TestCase): \"\"\"Tests for `acdh_geonames_utils` package.\"\"\" def setUp(self): \"\"\"Set up test", "\"\"\"Test loading download into pandas.DataFrame\"\"\" good_df = gn.download_to_df('YU') bad_df = gn.download_to_df('YUUU') self.assertEqual(len(good_df), 1)", "= gn.unzip_country_zip(\"\") self.assertEqual(bad, \"\") def test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\" df =", "\"\"\"Tear down test fixtures, if any.\"\"\" def test_001_download(self): \"\"\"Test download of zip.\"\"\" good", "\"\"\"Set up test fixtures, if any.\"\"\" def tearDown(self): \"\"\"Tear down test fixtures, if", "self.assertEqual(bad, \"\") def test_002_download_and_unzip(self): \"\"\"Test download and unzip.\"\"\" good = gn.download_and_unzip_country_zip(good_country_code) bad =", "def test_004_file_to_df(self): \"\"\"Test loading file into pandas.DataFrame\"\"\" df = gn.countries_as_df(TEST_GN_FILE) self.assertEqual(len(df), 9356) def" ]
[ "while len(doUpdate) > 0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate = [] lock.release() photoGroups", "continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s -> %s\",", "*doUpdate] lock.release() return True return False def loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp", "0: toDo = False # loop through all elements for photo in elements:", "Map: %s -> %s\", src, key) plexData[src] = key # Update the pics", "config import ppTagConfig logger = exif_log.get_logger() doUpdate = [] lock = None firstRun", "based on date if fetchAndProcessByDate(): # failed so loop through all photoa to", "\"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata = p.fetchPlexApi(url) container =", "= uniqify(doUpdate) doUpdate = [] lock.release() photoGroups = {} # first group all", "'%s'\" % filename) return None return data def getXMP(data): XMP = None if", "if __name__ == '__main__': if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL", "equal in plex and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if", "p = plexUsers() # run at startup fetchPhotosAndProcess() # now start the observer", "i = i + 1 #logging.debug(\" updateMetaData: tagQuery is '%s'\" % tagQuery) data", "for tag in tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i =", "sys import getopt import logging import urllib import time import os import threading", "lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p = None # timer", "tagQuery is '%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename):", "doUpdateTemp = uniqify(doUpdate) doUpdate = [] lock.release() photoGroups = {} # first group", "True return False def loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate", "if not event.is_directory: if (event.event_type == 'modified' or event.event_type == 'created' or event.event_type", "## python 3 # pip install watchdog import sys import getopt import logging", "wrong section) break elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size", "ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is not None", "def triggerProcess(): global t global lastTS lastTS = datetime.now() if t is None", "if fetchAndProcessByDate(): # failed so loop through all photoa to find the rest", "can process any creates first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to", "defer those to a full scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] #", "is unreadable\" % filename) return None except: logging.error(\"Exif process_file error: '%s'\" % filename)", "(event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def on_created(self, event): self.process(event) if __name__ == '__main__':", "firstRun and len(doUpdateTemp) == 0: toDo = False break if not firstRun: for", "incoming event time lastTS = datetime.now() def updateMetadata(item, tags, rating): # update rating", "is '%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename): detailed", "found in this section '%s'\" % src) firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"]", "ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE", "= False #exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename),", "photo doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(),", "tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename): detailed = True stop_tag", "int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True start =", "tagQuery), \"PUT\") def getdata(filename): detailed = True stop_tag = DEFAULT_STOP_TAG debug = False", "keep processing until there is nothing more to do so we don't have", "datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d", "return parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2)", "start = 0 size = 1000 # Make a key list of all", "photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path seperator is equal in", "xml = data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\") return", "data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if not data: logging.info(\"No EXIF", "there is nothing more to do so we don't have to worry about", "# missing or not a photo doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode =", "= [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try:", "idle time so that plex can process any creates first while datetime.now()-lastTS <", "metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata' not in container: # no", "in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if photoElement: # this has exif data", "event): self.process(event) def on_created(self, event): self.process(event) if __name__ == '__main__': if ppTagConfig.LOG_LEVEL is", "= [] lock.release() photoGroups = {} # first group all photos by date", "now start the observer observer.start() try: while True: time.sleep(5) except KeyboardInterrupt: observer.stop() observer.join()", "parsedXMP = {} parsedXMP['rating'] = 0 parsedXMP['tags'] = [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF", "date '%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we failed to", "% (start, size) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize", "ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or", "import threading from datetime import datetime, date, timedelta from watchdog.observers import Observer from", "while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all photos based on", "date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else: # missing or not", "if not data: return None parsedXMP = getXMP(data) if not parsedXMP: parsedXMP =", "in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file in", "folder in p.photoLocations: if event.src_path.startswith(folder): # put file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\",", "= metadata[\"MediaContainer\"] if 'Metadata' not in container: # no photos in this time", "then defer those to a full scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp]", "updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for '%s'\" % filename) def parseExifAndTags(filename):", "make sure path seperator is equal in plex and ppTag if \"\\\\\" in", "global lastTS lastTS = datetime.now() if t is None or not t.is_alive() :", "exif data date = photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] =", "= 1000 # Make a key list of all pics in the date", "firstRun: # update tags and rating # print(key) # print(src) updateTagsAndRating(key, src) try:", "None or not t.is_alive() : logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq):", "lock dateSearchFailed = [] while len(doUpdate) > 0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate", "container: # no photos in this time range (probably wrong section) break elements", "path = photo.path() # make sure path seperator is equal in plex and", "True start = 0 size = 1000 # Make a key list of", "lock.release() triggerProcess() return logging.debug(\"Ignored file in wrong location: '%s'\" % event.src_path) else: logging.debug(\"Ignored", "#logging.debug(\" updateMetaData: tagQuery is '%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\")", "filepath in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if photoElement: # this has exif", "XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\") return XMP def updateTagsAndRating(key, filename):", "self.process(event) def on_created(self, event): self.process(event) if __name__ == '__main__': if ppTagConfig.LOG_LEVEL is None", "else: # must be in the timer thread so process backlog # keep", "logging.info(\"No XMP data for '%s'\" % filename) def parseExifAndTags(filename): data = getdata(filename) if", "probably firstRun if not firstRun and len(doUpdateTemp) == 0: toDo = False break", "== 'moved'): # check if file belongs to monitored section for folder in", "for src in doUpdateTemp: logging.info(\"Skipped file not found in this section '%s'\" %", "src = src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or firstRun: # update tags and", "import PhotoElement from config import ppTagConfig logger = exif_log.get_logger() doUpdate = [] lock", "fetchPhotosAndProcess(): global firstRun global lastTS if firstRun: # complete update on startup requested", "= {} if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\"", "False # loop through all elements for photo in elements: mediaType = photo[\"type\"]", "= 1000 #print('loop through all, started %i' % int(time.time())) if p.photoSection: while toDo:", "details=detailed, strict=strict, debug=debug) img_file.close() if not data: logging.info(\"No EXIF information for '%s'\" %", "-> %s\", src, key) plexData[src] = key # Update the pics that changed", "EXIF information for '%s'\" % filename) return None if 'JPEGThumbnail' in data: del", "failed to process something then trigger a full scan if len(dateSearchFailed) > 0:", "ApplicationNotes' in data: try: xml = data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable", "Observer from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import", "if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s", "= [photoElement] else: # missing or not a photo doUpdateTemp.remove(filepath) for date in", "logging.error(\"Unable to parse XMP\") return XMP def updateTagsAndRating(key, filename): data = getdata(filename) if", "more to do so we don't have to worry about missed triggers while", "exif_log.get_logger() doUpdate = [] lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p", "= ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed,", "= False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type 'modified' |", "\"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"]", "import sys import getopt import logging import urllib import time import os import", "if file belongs to monitored section for folder in p.photoLocations: if event.src_path.startswith(folder): #", "len(doUpdateTemp) == 0: toDo = False break if not firstRun: for src in", "ppTagConfig logger = exif_log.get_logger() doUpdate = [] lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START", "color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename), 'rb') data =", "# first group all photos by date for filepath in doUpdateTemp[:] : photoElement", "metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"]", "of idle time so that plex can process any creates first while datetime.now()-lastTS", "time import os import threading from datetime import datetime, date, timedelta from watchdog.observers", "None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p = None # timer t =", "DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file, exif_log, __version__ from plexUsers import plexUsers from", "for filepath in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if photoElement: # this has", "plexData[src] = key # Update the pics that changed in the date range", "#!/usr/bin/env python ## python 3 # pip install watchdog import sys import getopt", "mediaType != \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make", "strict=strict, debug=debug) img_file.close() if not data: logging.info(\"No EXIF information for '%s'\" % filename)", "wait for 120 seconds of idle time so that plex can process any", "range plexData = {} if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection)", ": logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq)) # order", "all photos by date for filepath in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if", "import getopt import logging import urllib import time import os import threading from", "data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) # write the metadata # prepare", "must be in the timer thread so process backlog # keep processing until", "return False def loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate =", "all pics in the date range plexData = {} if p.photoSection: while toDo:", "not firstRun and len(doUpdateTemp) == 0: toDo = False break if not firstRun:", "not firstRun: for src in doUpdateTemp: logging.info(\"Skipped file not found in this section", "plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from photoElement import PhotoElement from config import ppTagConfig", "p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata' not in container: # no photos in", "src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s -> %s\", src, key) plexData[src] =", "seconds of idle time so that plex can process any creates first while", "event '%s' for file '%s'\" % (event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def on_created(self,", "#exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename), 'rb') data", "datetime import datetime, date, timedelta from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler", "data: return None parsedXMP = getXMP(data) if not parsedXMP: parsedXMP = {} parsedXMP['rating']", "event.is_directory True | False event.src_path path/to/observed/file \"\"\" if not event.is_directory: if (event.event_type ==", "XMP\") return XMP def updateTagsAndRating(key, filename): data = getdata(filename) if not data: return", "for photo in photoGroups[date]: path = photo.path() # make sure path seperator is", "rating),\"PUT\", False, user.token) # write the metadata # prepare the tags tagQuery =", "str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements", "in plex and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path", "os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock() # setup observer observer = Observer() observer.schedule(PhotoHandler(),", "= exif_log.get_logger() doUpdate = [] lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex", "wrong location: '%s'\" % event.src_path) else: logging.debug(\"Ignored event '%s' for file '%s'\" %", "if (event.event_type == 'modified' or event.event_type == 'created' or event.event_type == 'moved'): #", "requested loopThroughAllPhotos() else: # must be in the timer thread so process backlog", "in data: try: xml = data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to", "\"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s ->", "startup requested loopThroughAllPhotos() else: # must be in the timer thread so process", "'%s'\" % (event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def on_created(self, event): self.process(event) if __name__", "container = metadata[\"MediaContainer\"] if 'Metadata' not in container: # no photos in this", "i = 0 for tag in tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i,", "tag in tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i", "logging.info(\"Processing by date '%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we", "urllib.parse.quote(tag.encode('utf-8'))) i = i + 1 #logging.debug(\" updateMetaData: tagQuery is '%s'\" % tagQuery)", "else: # missing or not a photo doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode", "scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if we failed to process", "observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() # run at", "the date range plexData = {} if p.photoSection: while toDo: url = \"/library/sections/\"", "'%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename): detailed =", "logging import urllib import time import os import threading from datetime import datetime,", "in the date range plexData = {} if p.photoSection: while toDo: url =", "(start, size) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize =", "file belongs to monitored section for folder in p.photoLocations: if event.src_path.startswith(folder): # put", "ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s", "= datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global", "in data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s'", "to worry about missed triggers while len(doUpdate) > 0: # wait for 120", "% (event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def on_created(self, event): self.process(event) if __name__ ==", "event): self.process(event) if __name__ == '__main__': if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL ==", "threading from datetime import datetime, date, timedelta from watchdog.observers import Observer from watchdog.events", "data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename): detailed = True stop_tag =", "parsedXMP['rating'] = 0 parsedXMP['tags'] = [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable,", "= int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True start", "| 'deleted' event.is_directory True | False event.src_path path/to/observed/file \"\"\" if not event.is_directory: if", "parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else:", "size = 1000 # Make a key list of all pics in the", "scan if len(dateSearchFailed) > 0: logging.warning(\"Some updated files were not found by date", "if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start,", "[*dateSearchFailed, *doUpdateTemp] # if we failed to process something then trigger a full", "order preserving def fetchPhotosAndProcess(): global firstRun global lastTS if firstRun: # complete update", "getXMP(data) if not parsedXMP: parsedXMP = {} parsedXMP['rating'] = 0 parsedXMP['tags'] = []", "in this section '%s'\" % src) firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"]", "'deleted' event.is_directory True | False event.src_path path/to/observed/file \"\"\" if not event.is_directory: if (event.event_type", "{} # first group all photos by date for filepath in doUpdateTemp[:] :", "None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s')", "missed triggers while len(doUpdate) > 0: # wait for 120 seconds of idle", "photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s -> %s\", src, key) plexData[src] = key #", "if t is None or not t.is_alive() : logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess)", "in elements: mediaType = photo[\"type\"] if mediaType != \"photo\": continue key = photo[\"ratingKey\"]", "file '%s'\" % (event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def on_created(self, event): self.process(event) if", "if mediaType != \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\"", "complete update on startup requested loopThroughAllPhotos() else: # must be in the timer", "+ str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"]", "process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if not data: logging.info(\"No EXIF information for", "run at startup fetchPhotosAndProcess() # now start the observer observer.start() try: while True:", "exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file, exif_log, __version__ from plexUsers import", "in the date range for photo in photoGroups[date]: path = photo.path() # make", "logging.debug(\"Ignored file in wrong location: '%s'\" % event.src_path) else: logging.debug(\"Ignored event '%s' for", "not found by date range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return True", "= parseExifAndTags(filepath) if photoElement: # this has exif data date = photoElement.date() if", "src, key) plexData[src] = key # Update the pics that changed in the", "1) #logging.info(\" Map: %s -> %s\", src, key) plexData[src] = key # Update", "data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\") return XMP def", "if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode)", "img_file.close() if not data: logging.info(\"No EXIF information for '%s'\" % filename) return None", "totalSize-offset-size == 0: toDo = False # loop through all elements for photo", "= start + size if totalSize-offset-size == 0: toDo = False # loop", "metadata # prepare the tags tagQuery = \"?\" i = 0 for tag", "all, started %i' % int(time.time())) if p.photoSection: while toDo: url = \"/library/sections/\" +", "doUpdate global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate = [] toDo = True start", "= ppTagConfig.FORCE_RUN_AT_START # plex p = None # timer t = None #", "section) break elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size =", "in p.photoLocations: if event.src_path.startswith(folder): # put file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1)", "# loop through all elements for photo in elements: mediaType = photo[\"type\"] if", "photos based on date if fetchAndProcessByDate(): # failed so loop through all photoa", "# no photos in this time range (probably wrong section) break elements =", "'%s'\" % filename) def parseExifAndTags(filename): data = getdata(filename) if not data: return None", "datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback to the modify date on the", "is None or not t.is_alive() : logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def", "range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return True return False def loopThroughAllPhotos():", "if event.src_path.startswith(folder): # put file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path", "for '%s'\" % filename) def parseExifAndTags(filename): data = getdata(filename) if not data: return", "lightroomTags import parse_xmp_for_lightroom_tags from photoElement import PhotoElement from config import ppTagConfig logger =", "datetime.now() if t is None or not t.is_alive() : logging.info(\"Starting timer\") t =", "= [] while len(doUpdate) > 0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate = []", "lastTS = datetime.now() if t is None or not t.is_alive() : logging.info(\"Starting timer\")", "size) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize = container[\"totalSize\"]", "break if not firstRun: for src in doUpdateTemp: logging.info(\"Skipped file not found in", "%(message)s') if ppTagConfig.TIMEZONE is not None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock()", "photoElement import PhotoElement from config import ppTagConfig logger = exif_log.get_logger() doUpdate = []", "time so that plex can process any creates first while datetime.now()-lastTS < timedelta(seconds=120):", "filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag,", "\"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path", "'%s' for file '%s'\" % (event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def on_created(self, event):", "rating): # update rating for user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\",", "path in plexData.keys(): logging.info(\"Processing by date '%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path)", "timedelta from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG,", "len(dateSearchFailed) > 0: logging.warning(\"Some updated files were not found by date range.\") lock.acquire()", "if 'Metadata' not in container: # no photos in this time range (probably", "# ok if missing, probably firstRun if not firstRun and len(doUpdateTemp) == 0:", "= threading.Lock() # setup observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p =", "photoElement: # this has exif data date = photoElement.date() if date in photoGroups.keys():", "% (start, size) #logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if", "event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file in wrong location: '%s'\" %", "recursive=True) p = plexUsers() # run at startup fetchPhotosAndProcess() # now start the", "exif_log, __version__ from plexUsers import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from photoElement import", "None # last incoming event time lastTS = datetime.now() def updateMetadata(item, tags, rating):", "# timer t = None # last incoming event time lastTS = datetime.now()", "the modify date on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return", "filename) def parseExifAndTags(filename): data = getdata(filename) if not data: return None parsedXMP =", "= datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback to the modify date on", "process backlog # keep processing until there is nothing more to do so", "all photos based on date if fetchAndProcessByDate(): # failed so loop through all", "in wrong location: '%s'\" % event.src_path) else: logging.debug(\"Ignored event '%s' for file '%s'\"", "path = path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing by date '%s'\" % path)", "by date '%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we failed", "1000 # Make a key list of all pics in the date range", "# update tags and rating # print(key) # print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src)", "updated files were not found by date range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate]", "date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback to the modify date", "of all pics in the date range plexData = {} if p.photoSection: while", "import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from photoElement import PhotoElement from config import", "[] toDo = True start = 0 size = 1000 #print('loop through all,", "file not found in this section '%s'\" % src) firstRun = False class", "int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True start = 0 size = 1000", "logging.warning(\"Some updated files were not found by date range.\") lock.acquire() doUpdate = [*dateSearchFailed,", "filename) return None except: logging.error(\"Exif process_file error: '%s'\" % filename) return None return", "if missing, probably firstRun if not firstRun and len(doUpdateTemp) == 0: toDo =", "pip install watchdog import sys import getopt import logging import urllib import time", "= parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\") return XMP def updateTagsAndRating(key, filename): data", "to monitored section for folder in p.photoLocations: if event.src_path.startswith(folder): # put file into", "# Make a key list of all pics in the date range plexData", "# if we failed to process something then defer those to a full", "plex and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path in", "until there is nothing more to do so we don't have to worry", "user.token) # write the metadata # prepare the tags tagQuery = \"?\" i", "seperator is equal in plex and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path =", "False def loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate = []", "toDo = False # loop through all elements for photo in elements: mediaType", "= getXMP(data) if not parsedXMP: parsedXMP = {} parsedXMP['rating'] = 0 parsedXMP['tags'] =", "lock.release() return True return False def loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp =", "ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing by date '%s'\" %", "\"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i + 1 #logging.debug(\" updateMetaData: tagQuery is '%s'\"", "= {} # first group all photos by date for filepath in doUpdateTemp[:]", "def updateMetadata(item, tags, rating): # update rating for user in p.users: data =", "process_file, exif_log, __version__ from plexUsers import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from photoElement", "\"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\", url) metadata =", "so process backlog # keep processing until there is nothing more to do", "'moved' | 'deleted' event.is_directory True | False event.src_path path/to/observed/file \"\"\" if not event.is_directory:", "datetime, date, timedelta from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags", "None if 'Image ApplicationNotes' in data: try: xml = data['Image ApplicationNotes'].printable XMP =", "on date if fetchAndProcessByDate(): # failed so loop through all photoa to find", "date for filepath in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if photoElement: # this", "else: photoGroups[date] = [photoElement] else: # missing or not a photo doUpdateTemp.remove(filepath) for", "doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d')", "parseExifAndTags(filepath) if photoElement: # this has exif data date = photoElement.date() if date", "'created' | 'moved' | 'deleted' event.is_directory True | False event.src_path path/to/observed/file \"\"\" if", "datetime.now() def updateMetadata(item, tags, rating): # update rating for user in p.users: data", "getXMP(data): XMP = None if 'Image ApplicationNotes' in data: try: xml = data['Image", "data date = photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement]", "something then defer those to a full scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed,", "XMP data for '%s'\" % filename) def parseExifAndTags(filename): data = getdata(filename) if not", "toDo = False break if not firstRun: for src in doUpdateTemp: logging.info(\"Skipped file", "if ppTagConfig.TIMEZONE is not None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock() #", "logger = exif_log.get_logger() doUpdate = [] lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START #", "p.photoLocations: if event.src_path.startswith(folder): # put file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if", "= photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else: #", "event.event_type 'modified' | 'created' | 'moved' | 'deleted' event.is_directory True | False event.src_path", "getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP", "= uniqify(doUpdate) doUpdate = [] toDo = True start = 0 size =", "# Try to find all photos based on date if fetchAndProcessByDate(): # failed", "event.src_path) else: logging.debug(\"Ignored event '%s' for file '%s'\" % (event.event_type,event.src_path)) def on_modified(self, event):", "section '%s'\" % src) firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self,", "i + 1 #logging.debug(\" updateMetaData: tagQuery is '%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\"", "event.src_path path/to/observed/file \"\"\" if not event.is_directory: if (event.event_type == 'modified' or event.event_type ==", "DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date()", "event.src_path.startswith(folder): # put file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not", "src) try: doUpdateTemp.remove(src) except: pass # ok if missing, probably firstRun if not", "in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) # write the metadata", "if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing by", "updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we failed to process something then defer", "dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if we failed to process something then trigger", "doUpdate = [] lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p =", "# prepare the tags tagQuery = \"?\" i = 0 for tag in", "XMP = None if 'Image ApplicationNotes' in data: try: xml = data['Image ApplicationNotes'].printable", "if 'Image ApplicationNotes' in data: try: xml = data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml)", "= False break if not firstRun: for src in doUpdateTemp: logging.info(\"Skipped file not", "a full scan if len(dateSearchFailed) > 0: logging.warning(\"Some updated files were not found", "tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i + 1 #logging.debug(\" updateMetaData: tagQuery", "we failed to process something then trigger a full scan if len(dateSearchFailed) >", "into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in doUpdate: logging.info(\"Queued '%s'\",", "from config import ppTagConfig logger = exif_log.get_logger() doUpdate = [] lock = None", "doUpdateTemp or firstRun: # update tags and rating # print(key) # print(src) updateTagsAndRating(key,", "and len(doUpdateTemp) == 0: toDo = False break if not firstRun: for src", "process something then defer those to a full scan if len(doUpdateTemp): dateSearchFailed =", "container[\"size\"] start = start + size if totalSize-offset-size == 0: toDo = False", "changed in the date range for photo in photoGroups[date]: path = photo.path() #", "sure path seperator is equal in plex and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH:", "'modified' or event.event_type == 'created' or event.event_type == 'moved'): # check if file", "data = getdata(filename) if not data: return None parsedXMP = getXMP(data) if not", "# order preserving def fetchPhotosAndProcess(): global firstRun global lastTS if firstRun: # complete", "from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file, exif_log, __version__ from plexUsers", "photos in this time range (probably wrong section) break elements = container[\"Metadata\"] totalSize", "filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for '%s'\" % filename) def", "not found in this section '%s'\" % src) firstRun = False class PhotoHandler(PatternMatchingEventHandler):", "def uniqify(seq): return list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess(): global firstRun global lastTS", "src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or firstRun: # update tags and rating #", "from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES", "== 0: toDo = False break if not firstRun: for src in doUpdateTemp:", "plexData = {} if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) +", "# failed so loop through all photoa to find the rest loopThroughAllPhotos() def", "import process_file, exif_log, __version__ from plexUsers import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from", "date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF", "not in container: # no photos in this time range (probably wrong section)", "0 for tag in tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i", "failed so loop through all photoa to find the rest loopThroughAllPhotos() def fetchAndProcessByDate():", "updateTagsAndRating(key, filename): data = getdata(filename) if not data: return parsedXMP = getXMP(data) if", "parse_xmp_for_lightroom_tags from photoElement import PhotoElement from config import ppTagConfig logger = exif_log.get_logger() doUpdate", "#logging.info(\" Map: %s -> %s\", src, key) plexData[src] = key # Update the", "'%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True start = 0", "getopt import logging import urllib import time import os import threading from datetime", "== 'modified' or event.event_type == 'created' or event.event_type == 'moved'): # check if", "logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq)) # order preserving", "size = container[\"size\"] start = start + size if totalSize-offset-size == 0: toDo", "for user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) # write", "% filename) return None if 'JPEGThumbnail' in data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in", "# fallback to the modify date on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date", "stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if not data: logging.info(\"No EXIF information for '%s'\"", "timedelta(days=1)).timestamp())-1 toDo = True start = 0 size = 1000 # Make a", "mediaType = photo[\"type\"] if mediaType != \"photo\": continue key = photo[\"ratingKey\"] src =", "0 parsedXMP['tags'] = [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date()", "in plexData.keys(): logging.info(\"Processing by date '%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) #", "!= \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure", "import Observer from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread", "def on_modified(self, event): self.process(event) def on_created(self, event): self.process(event) if __name__ == '__main__': if", "None except: logging.error(\"Exif process_file error: '%s'\" % filename) return None return data def", "\"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\"", "= False # loop through all elements for photo in elements: mediaType =", "logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is not None : os.environ['TZ'] =", "'%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback", "data = getdata(filename) if not data: return parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing", "tagQuery = \"?\" i = 0 for tag in tags: tagQuery = tagQuery", "event): \"\"\" event.event_type 'modified' | 'created' | 'moved' | 'deleted' event.is_directory True |", "triggerProcess() return logging.debug(\"Ignored file in wrong location: '%s'\" % event.src_path) else: logging.debug(\"Ignored event", "return None parsedXMP = getXMP(data) if not parsedXMP: parsedXMP = {} parsedXMP['rating'] =", "len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if we failed to process something then", "= None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p = None # timer t", "that changed in the date range for photo in photoGroups[date]: path = photo.path()", "p = None # timer t = None # last incoming event time", "import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file, exif_log, __version__ from plexUsers import plexUsers", "file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in doUpdate: logging.info(\"Queued", "on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'],", "toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True start = 0 size", "list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path)", "__name__ == '__main__': if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL =", "% filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for '%s'\" % filename)", "os import threading from datetime import datetime, date, timedelta from watchdog.observers import Observer", "not None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock() # setup observer observer", "to parse XMP\") return XMP def updateTagsAndRating(key, filename): data = getdata(filename) if not", "else: logging.info(\"No XMP data for '%s'\" % filename) def parseExifAndTags(filename): data = getdata(filename)", "install watchdog import sys import getopt import logging import urllib import time import", "<filename>pptag.py #!/usr/bin/env python ## python 3 # pip install watchdog import sys import", "# last incoming event time lastTS = datetime.now() def updateMetadata(item, tags, rating): #", "timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq)) # order preserving def", "ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\") return XMP def updateTagsAndRating(key,", "the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed = [] while", "firstRun: for src in doUpdateTemp: logging.info(\"Skipped file not found in this section '%s'\"", "has exif data date = photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date]", "= p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename): detailed = True stop_tag = DEFAULT_STOP_TAG", "if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if we failed to process something", "doUpdateTemp.remove(src) except: pass # ok if missing, probably firstRun if not firstRun and", "try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename), 'rb') data = process_file(img_file,", "fallback to the modify date on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date =", "key # Update the pics that changed in the date range for photo", "= p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) # write the metadata # prepare the", "this time range (probably wrong section) break elements = container[\"Metadata\"] totalSize = container[\"totalSize\"]", "datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess():", "# complete update on startup requested loopThroughAllPhotos() else: # must be in the", "or not t.is_alive() : logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return", "date = photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else:", "range for photo in photoGroups[date]: path = photo.path() # make sure path seperator", "if src in doUpdateTemp or firstRun: # update tags and rating # print(key)", "list of all pics in the date range plexData = {} if p.photoSection:", "None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock() # setup observer observer =", "= photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s -> %s\", src, key)", "path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we failed to process something then", "for date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') +", "forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in doUpdate: logging.info(\"Queued '%s'\", event.src_path)", "logging.error(\"Exif process_file error: '%s'\" % filename) return None return data def getXMP(data): XMP", "backlog # keep processing until there is nothing more to do so we", "= True start = 0 size = 1000 # Make a key list", "in plex and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src", "is nothing more to do so we don't have to worry about missed", "debug = False strict = False color = False #exif_log.setup_logger(debug, color) try: filename", "is equal in plex and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\")", "first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all photos based", "if firstRun: # complete update on startup requested loopThroughAllPhotos() else: # must be", "% event.src_path) else: logging.debug(\"Ignored event '%s' for file '%s'\" % (event.event_type,event.src_path)) def on_modified(self,", "try: doUpdateTemp.remove(src) except: pass # ok if missing, probably firstRun if not firstRun", "None return data def getXMP(data): XMP = None if 'Image ApplicationNotes' in data:", "PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file, exif_log, __version__ from", "lastTS = datetime.now() def updateMetadata(item, tags, rating): # update rating for user in", "from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file,", "= getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No", "data: logging.info(\"No EXIF information for '%s'\" % filename) return None if 'JPEGThumbnail' in", "p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) +", "date range plexData = {} if p.photoSection: while toDo: url = \"/library/sections/\" +", "%(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is not None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock", "prepare the tags tagQuery = \"?\" i = 0 for tag in tags:", "so loop through all photoa to find the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global", "XMP def updateTagsAndRating(key, filename): data = getdata(filename) if not data: return parsedXMP =", "path seperator is equal in plex and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src", "%s\", src, key) plexData[src] = key # Update the pics that changed in", "(start, size) #logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata'", "in container: # no photos in this time range (probably wrong section) break", "0: logging.warning(\"Some updated files were not found by date range.\") lock.acquire() doUpdate =", "= Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() # run at startup fetchPhotosAndProcess()", "= process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if not data: logging.info(\"No EXIF information", "= photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path seperator is equal", "continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path seperator", "datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t global lastTS lastTS", "logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file in wrong location:", "url = \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode)", "if totalSize-offset-size == 0: toDo = False # loop through all elements for", "startup fetchPhotosAndProcess() # now start the observer observer.start() try: while True: time.sleep(5) except", "global doUpdate global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate = [] toDo = True", "# run at startup fetchPhotosAndProcess() # now start the observer observer.start() try: while", "\"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or firstRun: #", "we don't have to worry about missed triggers while len(doUpdate) > 0: #", "photo[\"type\"] if mediaType != \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1)", "t global lastTS lastTS = datetime.now() if t is None or not t.is_alive()", "= tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i + 1 #logging.debug(\" updateMetaData:", "= p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset =", "try: xml = data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\")", "filename) return None return data def getXMP(data): XMP = None if 'Image ApplicationNotes'", "group all photos by date for filepath in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath)", "= datetime.now() def updateMetadata(item, tags, rating): # update rating for user in p.users:", "1) if pptag_path not in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess()", "= 0 for tag in tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8')))", "start + size if totalSize-offset-size == 0: toDo = False # loop through", "import ppTagConfig logger = exif_log.get_logger() doUpdate = [] lock = None firstRun =", "# write the metadata # prepare the tags tagQuery = \"?\" i =", "in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or firstRun: # update", "False strict = False color = False #exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH", "< timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all photos based on date if", "in doUpdateTemp or firstRun: # update tags and rating # print(key) # print(src)", "class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type 'modified' | 'created' |", "ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or firstRun: # update tags", "file in wrong location: '%s'\" % event.src_path) else: logging.debug(\"Ignored event '%s' for file", "process_file error: '%s'\" % filename) return None return data def getXMP(data): XMP =", "len(doUpdate) > 0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate = [] lock.release() photoGroups =", "python 3 # pip install watchdog import sys import getopt import logging import", "fetchAndProcessByDate(): # failed so loop through all photoa to find the rest loopThroughAllPhotos()", "t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess():", "date on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename, date,", "plex and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src in", "firstRun global lastTS if firstRun: # complete update on startup requested loopThroughAllPhotos() else:", "url = \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata = p.fetchPlexApi(url)", "photo.rating()*2) doUpdateTemp.remove(path) # if we failed to process something then defer those to", "in data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\" % filename) return None", "%H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback to", "uniqify(doUpdate) doUpdate = [] lock.release() photoGroups = {} # first group all photos", "PhotoElement from config import ppTagConfig logger = exif_log.get_logger() doUpdate = [] lock =", "first group all photos by date for filepath in doUpdateTemp[:] : photoElement =", "full scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if we failed to", "so we don't have to worry about missed triggers while len(doUpdate) > 0:", "if mediaType != \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #", "PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t global lastTS lastTS = datetime.now()", "> 0: logging.warning(\"Some updated files were not found by date range.\") lock.acquire() doUpdate", "something then trigger a full scan if len(dateSearchFailed) > 0: logging.warning(\"Some updated files", "if not firstRun: for src in doUpdateTemp: logging.info(\"Skipped file not found in this", "doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file in wrong location: '%s'\" % event.src_path) else:", "= datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t global lastTS", "photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo =", "if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or firstRun:", "fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed = [] while len(doUpdate) > 0: lock.acquire()", "data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\" % filename) return None except:", "return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t global lastTS lastTS =", "update on startup requested loopThroughAllPhotos() else: # must be in the timer thread", "patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type 'modified' | 'created' | 'moved' |", "ppTagConfig.FORCE_RUN_AT_START # plex p = None # timer t = None # last", "lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return True return False def loopThroughAllPhotos(): global", "getdata(filename) if not data: return None parsedXMP = getXMP(data) if not parsedXMP: parsedXMP", "filename) return None if 'JPEGThumbnail' in data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in data:", "loop through all elements for photo in elements: mediaType = photo[\"type\"] if mediaType", "# make sure path seperator is equal in plex and ppTag if \"/\"", "def loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate = [] toDo", "the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating'])", "if not parsedXMP: parsedXMP = {} parsedXMP['rating'] = 0 parsedXMP['tags'] = [] try:", "for 120 seconds of idle time so that plex can process any creates", "threading.Lock() # setup observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers()", "update rating for user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token)", "key list of all pics in the date range plexData = {} if", "# make sure path seperator is equal in plex and ppTag if \"\\\\\"", "None # timer t = None # last incoming event time lastTS =", "% filename) def parseExifAndTags(filename): data = getdata(filename) if not data: return None parsedXMP", "print(key) # print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass # ok if missing,", "Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() # run at startup fetchPhotosAndProcess() #", "%i' % int(time.time())) if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) +", "\"\"\" if not event.is_directory: if (event.event_type == 'modified' or event.event_type == 'created' or", "any creates first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all", "toDo = True start = 0 size = 1000 # Make a key", "False event.src_path path/to/observed/file \"\"\" if not event.is_directory: if (event.event_type == 'modified' or event.event_type", "= 0 size = 1000 #print('loop through all, started %i' % int(time.time())) if", "updateMetadata(item, tags, rating): # update rating for user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\"", "not data: logging.info(\"No EXIF information for '%s'\" % filename) return None if 'JPEGThumbnail'", "+ str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\",", "except: logging.error(\"Exif process_file error: '%s'\" % filename) return None return data def getXMP(data):", "in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo", "by date range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return True return False", "ppTagConfig.TIMEZONE is not None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock() # setup", "PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type 'modified' | 'created' | 'moved'", "filename): data = getdata(filename) if not data: return parsedXMP = getXMP(data) if parsedXMP:", "event.is_directory: if (event.event_type == 'modified' or event.event_type == 'created' or event.event_type == 'moved'):", "# pip install watchdog import sys import getopt import logging import urllib import", "setup observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() # run", "date, timedelta from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import", "worry about missed triggers while len(doUpdate) > 0: # wait for 120 seconds", "is equal in plex and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\")", "= src.replace(\"/\",\"\\\\\") if src in doUpdateTemp or firstRun: # update tags and rating", "don't have to worry about missed triggers while len(doUpdate) > 0: # wait", "tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i + 1", "doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file in wrong", "= key # Update the pics that changed in the date range for", "to process something then defer those to a full scan if len(doUpdateTemp): dateSearchFailed", "tags tagQuery = \"?\" i = 0 for tag in tags: tagQuery =", "event.event_type == 'created' or event.event_type == 'moved'): # check if file belongs to", "import datetime, date, timedelta from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler from", "try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback to the modify", "doUpdateTemp.remove(path) # if we failed to process something then defer those to a", "t = None # last incoming event time lastTS = datetime.now() def updateMetadata(item,", "def fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed = [] while len(doUpdate) > 0:", "size if totalSize-offset-size == 0: toDo = False # loop through all elements", "= getdata(filename) if not data: return None parsedXMP = getXMP(data) if not parsedXMP:", "updateMetaData: tagQuery is '%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def", "getdata(filename) if not data: return parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" %", "%s\", url) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata' not in container:", "'moved'): # check if file belongs to monitored section for folder in p.photoLocations:", "= False color = False #exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename", "str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url) container", "tags, rating): # update rating for user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item,", "[*dateSearchFailed, *doUpdate] lock.release() return True return False def loopThroughAllPhotos(): global doUpdate global firstRun", "def updateTagsAndRating(key, filename): data = getdata(filename) if not data: return parsedXMP = getXMP(data)", "= photo[\"type\"] if mediaType != \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\",", "False break if not firstRun: for src in doUpdateTemp: logging.info(\"Skipped file not found", "or firstRun: # update tags and rating # print(key) # print(src) updateTagsAndRating(key, src)", "if 'JPEGThumbnail' in data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del data['TIFFThumbnail'] except", "exif.exifread import process_file, exif_log, __version__ from plexUsers import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags", "date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1", "unreadable\" % filename) return None except: logging.error(\"Exif process_file error: '%s'\" % filename) return", "from datetime import datetime, date, timedelta from watchdog.observers import Observer from watchdog.events import", "mediaType != \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map:", "p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) # write the metadata #", "[] lock.release() photoGroups = {} # first group all photos by date for", "= container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"] start = start + size", "if we failed to process something then defer those to a full scan", "return None except: logging.error(\"Exif process_file error: '%s'\" % filename) return None return data", "watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from", "in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else: # missing or not a", "%(item, tagQuery), \"PUT\") def getdata(filename): detailed = True stop_tag = DEFAULT_STOP_TAG debug =", "date if fetchAndProcessByDate(): # failed so loop through all photoa to find the", "logging.info(\"No EXIF information for '%s'\" % filename) return None if 'JPEGThumbnail' in data:", "doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if photoElement: # this has exif data date", "in doUpdateTemp: logging.info(\"Skipped file not found in this section '%s'\" % src) firstRun", "+ \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\", url) metadata", "'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if not data: logging.info(\"No", "# keep processing until there is nothing more to do so we don't", "toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata =", "- %(message)s') if ppTagConfig.TIMEZONE is not None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock =", "data for '%s'\" % filename) def parseExifAndTags(filename): data = getdata(filename) if not data:", "# put file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in", "+ size if totalSize-offset-size == 0: toDo = False # loop through all", "pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release()", "dateSearchFailed = [] while len(doUpdate) > 0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate =", "0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate = [] lock.release() photoGroups = {} #", "'%s'\" % event.src_path) else: logging.debug(\"Ignored event '%s' for file '%s'\" % (event.event_type,event.src_path)) def", "% filename) return None except: logging.error(\"Exif process_file error: '%s'\" % filename) return None", "creates first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all photos", "totalSize = container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"] start = start +", "Make a key list of all pics in the date range plexData =", "print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass # ok if missing, probably firstRun", "global t global lastTS lastTS = datetime.now() if t is None or not", "0 size = 1000 # Make a key list of all pics in", "+ timedelta(days=1)).timestamp())-1 toDo = True start = 0 size = 1000 # Make", "= data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\") return XMP", "datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all photos based on date", "0: toDo = False break if not firstRun: for src in doUpdateTemp: logging.info(\"Skipped", "not t.is_alive() : logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq))", "src in doUpdateTemp: logging.info(\"Skipped file not found in this section '%s'\" % src)", "== '__main__': if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL'", "not data: return None parsedXMP = getXMP(data) if not parsedXMP: parsedXMP = {}", "True start = 0 size = 1000 #print('loop through all, started %i' %", "on startup requested loopThroughAllPhotos() else: # must be in the timer thread so", "python ## python 3 # pip install watchdog import sys import getopt import", "% int(time.time())) if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\"", "(probably wrong section) break elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"]", "src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path seperator is equal in plex", ": os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock() # setup observer observer = Observer()", "%(item, rating),\"PUT\", False, user.token) # write the metadata # prepare the tags tagQuery", "not event.is_directory: if (event.event_type == 'modified' or event.event_type == 'created' or event.event_type ==", "+ filename img_file = open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug)", "= [*dateSearchFailed, *doUpdateTemp] # if we failed to process something then trigger a", "break elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"]", "event.event_type == 'moved'): # check if file belongs to monitored section for folder", "[] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date", "toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" +", "int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for '%s'\" % filename) def parseExifAndTags(filename): data =", "time lastTS = datetime.now() def updateMetadata(item, tags, rating): # update rating for user", "import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file, exif_log, __version__", "parsedXMP = getXMP(data) if not parsedXMP: parsedXMP = {} parsedXMP['rating'] = 0 parsedXMP['tags']", "have to worry about missed triggers while len(doUpdate) > 0: # wait for", "logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for '%s'\"", "Try to find all photos based on date if fetchAndProcessByDate(): # failed so", "or not a photo doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp())", "through all elements for photo in elements: mediaType = photo[\"type\"] if mediaType !=", "for photo in elements: mediaType = photo[\"type\"] if mediaType != \"photo\": continue key", "t.start() def uniqify(seq): return list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess(): global firstRun global", "a full scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if we failed", "else: logging.debug(\"Ignored event '%s' for file '%s'\" % (event.event_type,event.src_path)) def on_modified(self, event): self.process(event)", "[photoElement] else: # missing or not a photo doUpdateTemp.remove(filepath) for date in photoGroups.keys():", "p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) # write the metadata # prepare the tags", "doUpdateTemp = uniqify(doUpdate) doUpdate = [] toDo = True start = 0 size", "belongs to monitored section for folder in p.photoLocations: if event.src_path.startswith(folder): # put file", "firstRun doUpdateTemp = uniqify(doUpdate) doUpdate = [] toDo = True start = 0", "+ str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" %", "doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return True return False def loopThroughAllPhotos(): global doUpdate", "date = datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t global", "= open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if not", "= None if 'Image ApplicationNotes' in data: try: xml = data['Image ApplicationNotes'].printable XMP", "metadata[\"MediaContainer\"] if 'Metadata' not in container: # no photos in this time range", "= [*dateSearchFailed, *doUpdate] lock.release() return True return False def loopThroughAllPhotos(): global doUpdate global", "while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata", "key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path seperator is", "tags and rating # print(key) # print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass", "*doUpdateTemp] # if we failed to process something then trigger a full scan", "Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except:", "0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: #", "detailed = True stop_tag = DEFAULT_STOP_TAG debug = False strict = False color", "path seperator is equal in plex and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path", "# now start the observer observer.start() try: while True: time.sleep(5) except KeyboardInterrupt: observer.stop()", "# plex p = None # timer t = None # last incoming", "if we failed to process something then trigger a full scan if len(dateSearchFailed)", "\"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"]", "update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire()", "a key list of all pics in the date range plexData = {}", "pass # ok if missing, probably firstRun if not firstRun and len(doUpdateTemp) ==", "try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date =", "datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t", "update tags and rating # print(key) # print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except:", "rating # print(key) # print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass # ok", "# Update the pics that changed in the date range for photo in", "plexUsers import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from photoElement import PhotoElement from config", "at startup fetchPhotosAndProcess() # now start the observer observer.start() try: while True: time.sleep(5)", "time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all photos based on date if fetchAndProcessByDate(): #", "we failed to process something then defer those to a full scan if", "'%Y:%m:%d %H:%M:%S').date() except: # fallback to the modify date on the file datetimeModified", "False, user.token) # write the metadata # prepare the tags tagQuery = \"?\"", "trigger a full scan if len(dateSearchFailed) > 0: logging.warning(\"Some updated files were not", "started %i' % int(time.time())) if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection)", "= \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) +", "% path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we failed to process something", "on_modified(self, event): self.process(event) def on_created(self, event): self.process(event) if __name__ == '__main__': if ppTagConfig.LOG_LEVEL", "stop_tag = DEFAULT_STOP_TAG debug = False strict = False color = False #exif_log.setup_logger(debug,", "failed to process something then defer those to a full scan if len(doUpdateTemp):", "for folder in p.photoLocations: if event.src_path.startswith(folder): # put file into forced update list", "[] lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p = None #", "1) # make sure path seperator is equal in plex and ppTag if", "| 'created' | 'moved' | 'deleted' event.is_directory True | False event.src_path path/to/observed/file \"\"\"", "import logging import urllib import time import os import threading from datetime import", "from plexUsers import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from photoElement import PhotoElement from", "watchdog import sys import getopt import logging import urllib import time import os", "[] while len(doUpdate) > 0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate = [] lock.release()", "'%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we failed to process", "fetchPhotosAndProcess() # now start the observer observer.start() try: while True: time.sleep(5) except KeyboardInterrupt:", "to process something then trigger a full scan if len(dateSearchFailed) > 0: logging.warning(\"Some", "in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing by date '%s'\"", "'%s'\" % src) firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event):", "find the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed = []", "photoGroups[date]: path = photo.path() # make sure path seperator is equal in plex", "offset = container[\"offset\"] size = container[\"size\"] start = start + size if totalSize-offset-size", "about missed triggers while len(doUpdate) > 0: # wait for 120 seconds of", "updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass # ok if missing, probably firstRun if", "this section '%s'\" % src) firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def", "by date for filepath in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if photoElement: #", "img_file = open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if", "to find the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed =", "path/to/observed/file \"\"\" if not event.is_directory: if (event.event_type == 'modified' or event.event_type == 'created'", "= None # last incoming event time lastTS = datetime.now() def updateMetadata(item, tags,", "or event.event_type == 'moved'): # check if file belongs to monitored section for", "if len(dateSearchFailed) > 0: logging.warning(\"Some updated files were not found by date range.\")", "str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\", url)", ": photoElement = parseExifAndTags(filepath) if photoElement: # this has exif data date =", "is not None : os.environ['TZ'] = ppTagConfig.TIMEZONE lock = threading.Lock() # setup observer", "False #exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename), 'rb')", "elements: mediaType = photo[\"type\"] if mediaType != \"photo\": continue key = photo[\"ratingKey\"] src", "in this time range (probably wrong section) break elements = container[\"Metadata\"] totalSize =", "def parseExifAndTags(filename): data = getdata(filename) if not data: return None parsedXMP = getXMP(data)", "\"PUT\") def getdata(filename): detailed = True stop_tag = DEFAULT_STOP_TAG debug = False strict", "# this has exif data date = photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement)", "loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate = [] toDo =", "through all, started %i' % int(time.time())) if p.photoSection: while toDo: url = \"/library/sections/\"", "size) #logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata' not", "data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\" %", "in photoGroups[date]: path = photo.path() # make sure path seperator is equal in", "check if file belongs to monitored section for folder in p.photoLocations: if event.src_path.startswith(folder):", "+ \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url) container =", "filename img_file = open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close()", "preserving def fetchPhotosAndProcess(): global firstRun global lastTS if firstRun: # complete update on", "if date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else: # missing or", "that plex can process any creates first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) #", "% filename) return None return data def getXMP(data): XMP = None if 'Image", "uniqify(doUpdate) doUpdate = [] toDo = True start = 0 size = 1000", "lock = threading.Lock() # setup observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p", "if not data: return parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" % filename)", "logging.debug(\"'%s' is unreadable\" % filename) return None except: logging.error(\"Exif process_file error: '%s'\" %", "size = 1000 #print('loop through all, started %i' % int(time.time())) if p.photoSection: while", "= threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess(): global", "photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else: # missing or not a photo", "key) plexData[src] = key # Update the pics that changed in the date", "False color = False #exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file", "parsedXMP: parsedXMP = {} parsedXMP['rating'] = 0 parsedXMP['tags'] = [] try: date =", "write the metadata # prepare the tags tagQuery = \"?\" i = 0", "0 size = 1000 #print('loop through all, started %i' % int(time.time())) if p.photoSection:", "photoGroups = {} # first group all photos by date for filepath in", "the metadata # prepare the tags tagQuery = \"?\" i = 0 for", "strict = False color = False #exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH +", "date range for photo in photoGroups[date]: path = photo.path() # make sure path", "%(i, urllib.parse.quote(tag.encode('utf-8'))) i = i + 1 #logging.debug(\" updateMetaData: tagQuery is '%s'\" %", "data: try: xml = data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse", "{} if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" +", "def process(self, event): \"\"\" event.event_type 'modified' | 'created' | 'moved' | 'deleted' event.is_directory", "path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() # run at startup fetchPhotosAndProcess() # now start", "= getdata(filename) if not data: return parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\"", "plexUsers() # run at startup fetchPhotosAndProcess() # now start the observer observer.start() try:", "timer t = None # last incoming event time lastTS = datetime.now() def", "not a photo doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode", "threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess(): global firstRun", "= metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size =", "missing, probably firstRun if not firstRun and len(doUpdateTemp) == 0: toDo = False", "%s -> %s\", src, key) plexData[src] = key # Update the pics that", "= path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing by date '%s'\" % path) updateMetadata(plexData[path],", "not data: return parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key,", "open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict, debug=debug) img_file.close() if not data:", "modify date on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename,", "ok if missing, probably firstRun if not firstRun and len(doUpdateTemp) == 0: toDo", "timer thread so process backlog # keep processing until there is nothing more", "= True start = 0 size = 1000 #print('loop through all, started %i'", "= container[\"offset\"] size = container[\"size\"] start = start + size if totalSize-offset-size ==", "to find all photos based on date if fetchAndProcessByDate(): # failed so loop", "| 'moved' | 'deleted' event.is_directory True | False event.src_path path/to/observed/file \"\"\" if not", "triggers while len(doUpdate) > 0: # wait for 120 seconds of idle time", "observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() # run at startup", "#logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata' not in", "the tags tagQuery = \"?\" i = 0 for tag in tags: tagQuery", "photoa to find the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed", "found by date range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return True return", "the pics that changed in the date range for photo in photoGroups[date]: path", "= datetime.now() if t is None or not t.is_alive() : logging.info(\"Starting timer\") t", "container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"] start = start", "ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file = open(str(filename), 'rb') data = process_file(img_file, stop_tag=stop_tag, details=detailed, strict=strict,", "not parsedXMP: parsedXMP = {} parsedXMP['rating'] = 0 parsedXMP['tags'] = [] try: date", "= photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s -> %s\", src, key) plexData[src] = key", "= \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata = p.fetchPlexApi(url) container", "+ \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i + 1 #logging.debug(\" updateMetaData: tagQuery is", "pptag_path not in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored", "== '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is", "ppTagConfig.TIMEZONE lock = threading.Lock() # setup observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True)", "parse XMP\") return XMP def updateTagsAndRating(key, filename): data = getdata(filename) if not data:", "except: logging.error(\"Unable to parse XMP\") return XMP def updateTagsAndRating(key, filename): data = getdata(filename)", "date, parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t global lastTS lastTS = datetime.now() if", "list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess(): global firstRun global lastTS if firstRun: #", "'TIFFThumbnail' in data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\" % filename) return", "be in the timer thread so process backlog # keep processing until there", "p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"]", "photo.path() # make sure path seperator is equal in plex and ppTag if", "FIELD_TYPES from exif.exifread import process_file, exif_log, __version__ from plexUsers import plexUsers from lightroomTags", "to a full scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if we", "'created' or event.event_type == 'moved'): # check if file belongs to monitored section", "# must be in the timer thread so process backlog # keep processing", "if pptag_path not in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return", "data: return parsedXMP = getXMP(data) if parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'],", "sure path seperator is equal in plex and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX:", "to do so we don't have to worry about missed triggers while len(doUpdate)", "doUpdate = [] lock.release() photoGroups = {} # first group all photos by", "= [] toDo = True start = 0 size = 1000 #print('loop through", "False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type 'modified' | 'created'", "processing until there is nothing more to do so we don't have to", "plex can process any creates first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try", "loopThroughAllPhotos() else: # must be in the timer thread so process backlog #", "through all photoa to find the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global", "del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\" % filename) return None except: logging.error(\"Exif", "(event.event_type == 'modified' or event.event_type == 'created' or event.event_type == 'moved'): # check", "'%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True start = 0 size = 1000 #", "while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\"", "+ str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL: %s\", url) metadata = p.fetchPlexApi(url)", "process something then trigger a full scan if len(dateSearchFailed) > 0: logging.warning(\"Some updated", "src) firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type", "from photoElement import PhotoElement from config import ppTagConfig logger = exif_log.get_logger() doUpdate =", "= int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True start = 0 size =", "not in doUpdate: logging.info(\"Queued '%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file", "def on_created(self, event): self.process(event) if __name__ == '__main__': if ppTagConfig.LOG_LEVEL is None or", "time range (probably wrong section) break elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset", "fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode = int((datetime.strptime(date.isoformat(), '%Y-%m-%d') + timedelta(days=1)).timestamp())-1 toDo = True", "1000 #print('loop through all, started %i' % int(time.time())) if p.photoSection: while toDo: url", "import os import threading from datetime import datetime, date, timedelta from watchdog.observers import", "doUpdate global lock dateSearchFailed = [] while len(doUpdate) > 0: lock.acquire() doUpdateTemp =", "# print(key) # print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass # ok if", "'__main__': if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL),", "1 #logging.debug(\" updateMetaData: tagQuery is '%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery),", "doUpdateTemp: logging.info(\"Skipped file not found in this section '%s'\" % src) firstRun =", "equal in plex and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if", "'modified' | 'created' | 'moved' | 'deleted' event.is_directory True | False event.src_path path/to/observed/file", "= None # timer t = None # last incoming event time lastTS", "if 'TIFFThumbnail' in data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\" % filename)", "rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed = [] while len(doUpdate)", "except: # fallback to the modify date on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename))", "# print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass # ok if missing, probably", "global lastTS if firstRun: # complete update on startup requested loopThroughAllPhotos() else: #", "# setup observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() #", "section for folder in p.photoLocations: if event.src_path.startswith(folder): # put file into forced update", "photo in elements: mediaType = photo[\"type\"] if mediaType != \"photo\": continue key =", "'%s'\" % filename) return None if 'JPEGThumbnail' in data: del data['JPEGThumbnail'] if 'TIFFThumbnail'", "firstRun if not firstRun and len(doUpdateTemp) == 0: toDo = False break if", "3 # pip install watchdog import sys import getopt import logging import urllib", "'%s'\", event.src_path) lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file in wrong location: '%s'\"", "= photo.path() # make sure path seperator is equal in plex and ppTag", "0: # wait for 120 seconds of idle time so that plex can", "loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global lock dateSearchFailed = [] while len(doUpdate) >", "monitored section for folder in p.photoLocations: if event.src_path.startswith(folder): # put file into forced", "missing or not a photo doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(),", "None parsedXMP = getXMP(data) if not parsedXMP: parsedXMP = {} parsedXMP['rating'] = 0", "start = start + size if totalSize-offset-size == 0: toDo = False #", "user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) # write the", "lastTS if firstRun: # complete update on startup requested loopThroughAllPhotos() else: # must", "nothing more to do so we don't have to worry about missed triggers", "> 0: lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate = [] lock.release() photoGroups = {}", "= container[\"size\"] start = start + size if totalSize-offset-size == 0: toDo =", "'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is not None : os.environ['TZ']", "def fetchPhotosAndProcess(): global firstRun global lastTS if firstRun: # complete update on startup", "path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing by date '%s'\" % path) updateMetadata(plexData[path], photo.tags(),", "url) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata' not in container: #", "= {} parsedXMP['rating'] = 0 parsedXMP['tags'] = [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF", "import time import os import threading from datetime import datetime, date, timedelta from", "last incoming event time lastTS = datetime.now() def updateMetadata(item, tags, rating): # update", "+ \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size)", "urllib import time import os import threading from datetime import datetime, date, timedelta", "return data def getXMP(data): XMP = None if 'Image ApplicationNotes' in data: try:", "\"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) #logging.info(\"URL:", "== 0: toDo = False # loop through all elements for photo in", "data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\" % filename) return None except: logging.error(\"Exif process_file", "metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset", "data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is", "lock.acquire() doUpdateTemp = uniqify(doUpdate) doUpdate = [] lock.release() photoGroups = {} # first", "information for '%s'\" % filename) return None if 'JPEGThumbnail' in data: del data['JPEGThumbnail']", "parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for '%s'\" % filename) def parseExifAndTags(filename): data", "{} parsedXMP['rating'] = 0 parsedXMP['tags'] = [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag", "parseExifAndTags(filename): data = getdata(filename) if not data: return None parsedXMP = getXMP(data) if", "the date range for photo in photoGroups[date]: path = photo.path() # make sure", "lock.acquire() doUpdate.append(pptag_path) lock.release() triggerProcess() return logging.debug(\"Ignored file in wrong location: '%s'\" % event.src_path)", "make sure path seperator is equal in plex and ppTag if \"/\" in", "except IOError: logging.debug(\"'%s' is unreadable\" % filename) return None except: logging.error(\"Exif process_file error:", "+ \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size) metadata = p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] elements =", "those to a full scan if len(doUpdateTemp): dateSearchFailed = [*dateSearchFailed, *doUpdateTemp] # if", "parse_xmp_for_lightroom_tags(xml) except: logging.error(\"Unable to parse XMP\") return XMP def updateTagsAndRating(key, filename): data =", "this has exif data date = photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement) else:", "del data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del data['TIFFThumbnail'] except IOError: logging.debug(\"'%s' is unreadable\"", "= datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable,", "no photos in this time range (probably wrong section) break elements = container[\"Metadata\"]", "ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type 'modified' | 'created' | 'moved' | 'deleted'", "photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if we failed to process something then defer those", "while len(doUpdate) > 0: # wait for 120 seconds of idle time so", "global lock dateSearchFailed = [] while len(doUpdate) > 0: lock.acquire() doUpdateTemp = uniqify(doUpdate)", "rating for user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False, user.token) #", "logging.debug(\"Ignored event '%s' for file '%s'\" % (event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def", "photo in photoGroups[date]: path = photo.path() # make sure path seperator is equal", "None if 'JPEGThumbnail' in data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del data['TIFFThumbnail']", "return None if 'JPEGThumbnail' in data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del", "format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is not None : os.environ['TZ'] = ppTagConfig.TIMEZONE", "= plexUsers() # run at startup fetchPhotosAndProcess() # now start the observer observer.start()", "return XMP def updateTagsAndRating(key, filename): data = getdata(filename) if not data: return parsedXMP", "the timer thread so process backlog # keep processing until there is nothing", "120 seconds of idle time so that plex can process any creates first", "file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date() return PhotoElement(filename, date, parsedXMP['tags'], parsedXMP['rating']) def", "= container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"] start =", "put file into forced update list pptag_path=event.src_path.replace(ppTagConfig.PHOTOS_LIBRARY_PATH,\"\", 1) if pptag_path not in doUpdate:", "loop through all photoa to find the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate", "return list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess(): global firstRun global lastTS if firstRun:", "% src) firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\"", "find all photos based on date if fetchAndProcessByDate(): # failed so loop through", "p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename): detailed = True stop_tag = DEFAULT_STOP_TAG debug", "in the timer thread so process backlog # keep processing until there is", "from exif.exifread import process_file, exif_log, __version__ from plexUsers import plexUsers from lightroomTags import", "return True return False def loopThroughAllPhotos(): global doUpdate global firstRun doUpdateTemp = uniqify(doUpdate)", "parsedXMP['tags'], parsedXMP['rating']) def triggerProcess(): global t global lastTS lastTS = datetime.now() if t", "observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH, recursive=True) p = plexUsers() # run at startup fetchPhotosAndProcess() # now", "doUpdate = [] toDo = True start = 0 size = 1000 #print('loop", "IOError: logging.debug(\"'%s' is unreadable\" % filename) return None except: logging.error(\"Exif process_file error: '%s'\"", "%H:%M:%S').date() except: # fallback to the modify date on the file datetimeModified =", "DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback to the modify date on the file", "% tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item, tagQuery), \"PUT\") def getdata(filename): detailed = True", "# update rating for user in p.users: data = p.fetchPlexApi(\"/:/rate?key=%s&identifier=com.plexapp.plugins.library&rating=%i\" %(item, rating),\"PUT\", False,", "int(time.time())) if p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" %", "str(p.photoSection) + \"/all?originallyAvailableAt%3E=\" + str(fromTimecode) + \"&originallyAvailableAt%3C=\" + str(toTimecode) + \"&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start,", "getdata(filename): detailed = True stop_tag = DEFAULT_STOP_TAG debug = False strict = False", "if path in plexData.keys(): logging.info(\"Processing by date '%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2)", "= 0 parsedXMP['tags'] = [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d", "True stop_tag = DEFAULT_STOP_TAG debug = False strict = False color = False", "and rating # print(key) # print(src) updateTagsAndRating(key, src) try: doUpdateTemp.remove(src) except: pass #", "or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if", "photoElement.date() if date in photoGroups.keys(): photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else: # missing", "error: '%s'\" % filename) return None return data def getXMP(data): XMP = None", "triggerProcess(): global t global lastTS lastTS = datetime.now() if t is None or", "watchdog.events import PatternMatchingEventHandler from exif.exifread.tags import DEFAULT_STOP_TAG, FIELD_TYPES from exif.exifread import process_file, exif_log,", "data def getXMP(data): XMP = None if 'Image ApplicationNotes' in data: try: xml", "process(self, event): \"\"\" event.event_type 'modified' | 'created' | 'moved' | 'deleted' event.is_directory True", "uniqify(seq): return list(dict.fromkeys(seq)) # order preserving def fetchPhotosAndProcess(): global firstRun global lastTS if", "photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s -> %s\", src, key) plexData[src]", "= [] lock = None firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p = None", "== 'created' or event.event_type == 'moved'): # check if file belongs to monitored", "def getdata(filename): detailed = True stop_tag = DEFAULT_STOP_TAG debug = False strict =", "return logging.debug(\"Ignored file in wrong location: '%s'\" % event.src_path) else: logging.debug(\"Ignored event '%s'", "= ppTagConfig.TIMEZONE lock = threading.Lock() # setup observer observer = Observer() observer.schedule(PhotoHandler(), path=ppTagConfig.PHOTOS_LIBRARY_PATH,", "src in doUpdateTemp or firstRun: # update tags and rating # print(key) #", "'%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for '%s'\" %", "range (probably wrong section) break elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset =", "!= \"photo\": continue key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s", "if photoElement: # this has exif data date = photoElement.date() if date in", "container[\"offset\"] size = container[\"size\"] start = start + size if totalSize-offset-size == 0:", "photoGroups[date] = [photoElement] else: # missing or not a photo doUpdateTemp.remove(filepath) for date", "process any creates first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find", "and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src = src.replace(\"/\",\"\\\\\") if src in doUpdateTemp", "for file '%s'\" % (event.event_type,event.src_path)) def on_modified(self, event): self.process(event) def on_created(self, event): self.process(event)", "color = False #exif_log.setup_logger(debug, color) try: filename = ppTagConfig.PHOTOS_LIBRARY_PATH + filename img_file =", "in tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i +", "parsedXMP['tags'] = [] try: date = datetime.fromtimestamp(datetime.strptime(data['EXIF DateTimeOriginal'].printable+data['EXIF Tag 0x9011'].printable, '%Y:%m:%d %H:%M:%S%z').timestamp()).date() except:", "# wait for 120 seconds of idle time so that plex can process", "#print('loop through all, started %i' % int(time.time())) if p.photoSection: while toDo: url =", "ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing", "debug=debug) img_file.close() if not data: logging.info(\"No EXIF information for '%s'\" % filename) return", "len(doUpdate) > 0: # wait for 120 seconds of idle time so that", "+ 1 #logging.debug(\" updateMetaData: tagQuery is '%s'\" % tagQuery) data = p.fetchPlexApi(\"/library/metadata/%s%s\" %(item,", "= False strict = False color = False #exif_log.setup_logger(debug, color) try: filename =", "firstRun: # complete update on startup requested loopThroughAllPhotos() else: # must be in", "> 0: # wait for 120 seconds of idle time so that plex", "| False event.src_path path/to/observed/file \"\"\" if not event.is_directory: if (event.event_type == 'modified' or", "were not found by date range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return", "firstRun = ppTagConfig.FORCE_RUN_AT_START # plex p = None # timer t = None", "pics that changed in the date range for photo in photoGroups[date]: path =", "except: try: date = datetime.strptime(data['EXIF DateTimeOriginal'].printable, '%Y:%m:%d %H:%M:%S').date() except: # fallback to the", "photoGroups[date].append(photoElement) else: photoGroups[date] = [photoElement] else: # missing or not a photo doUpdateTemp.remove(filepath)", "photos by date for filepath in doUpdateTemp[:] : photoElement = parseExifAndTags(filepath) if photoElement:", "date range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release() return True return False def", "# check if file belongs to monitored section for folder in p.photoLocations: if", "\"?\" i = 0 for tag in tags: tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\"", "'': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is not", "location: '%s'\" % event.src_path) else: logging.debug(\"Ignored event '%s' for file '%s'\" % (event.event_type,event.src_path))", "event time lastTS = datetime.now() def updateMetadata(item, tags, rating): # update rating for", "import urllib import time import os import threading from datetime import datetime, date,", "firstRun = False class PhotoHandler(PatternMatchingEventHandler): patterns=[\"*\"] ignore_patterns=[\"*thumb*\"] def process(self, event): \"\"\" event.event_type 'modified'", "return None return data def getXMP(data): XMP = None if 'Image ApplicationNotes' in", "and ppTag if \"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path in plexData.keys():", "key = photo[\"ratingKey\"] src = photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) #logging.info(\" Map: %s -> %s\", src,", "= 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s - %(message)s') if ppTagConfig.TIMEZONE is not None :", "container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"] start = start + size if", "is None or ppTagConfig.LOG_LEVEL == '': ppTagConfig.LOG_LEVEL = 'CRITICAL' logging.basicConfig(level=getattr(logging,ppTagConfig.LOG_LEVEL), format='%(asctime)s %(levelname)s -", "files were not found by date range.\") lock.acquire() doUpdate = [*dateSearchFailed, *doUpdate] lock.release()", "= p.fetchPlexApi(url) container = metadata[\"MediaContainer\"] if 'Metadata' not in container: # no photos", "parsedXMP['rating']) def triggerProcess(): global t global lastTS lastTS = datetime.now() if t is", "if parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data", "'Metadata' not in container: # no photos in this time range (probably wrong", "= i + 1 #logging.debug(\" updateMetaData: tagQuery is '%s'\" % tagQuery) data =", "thread so process backlog # keep processing until there is nothing more to", "photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path seperator is equal in plex and ppTag", "global doUpdate global lock dateSearchFailed = [] while len(doUpdate) > 0: lock.acquire() doUpdateTemp", "def getXMP(data): XMP = None if 'Image ApplicationNotes' in data: try: xml =", "= 0 size = 1000 # Make a key list of all pics", "self.process(event) if __name__ == '__main__': if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL == '':", "to the modify date on the file datetimeModified = datetime.fromtimestamp(os.path.getmtime(filename)) date = datetimeModified.date()", "global firstRun doUpdateTemp = uniqify(doUpdate) doUpdate = [] toDo = True start =", "DEFAULT_STOP_TAG debug = False strict = False color = False #exif_log.setup_logger(debug, color) try:", "'JPEGThumbnail' in data: del data['JPEGThumbnail'] if 'TIFFThumbnail' in data: del data['TIFFThumbnail'] except IOError:", "lastTS lastTS = datetime.now() if t is None or not t.is_alive() : logging.info(\"Starting", "t.is_alive() : logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start() def uniqify(seq): return list(dict.fromkeys(seq)) #", "lock.release() photoGroups = {} # first group all photos by date for filepath", "start = 0 size = 1000 #print('loop through all, started %i' % int(time.time()))", "t is None or not t.is_alive() : logging.info(\"Starting timer\") t = threading.Timer(120,fetchPhotosAndProcess) t.start()", "= photo[\"Media\"][0][\"Part\"][0][\"file\"].replace(ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX,\"\", 1) # make sure path seperator is equal in plex and", "if not firstRun and len(doUpdateTemp) == 0: toDo = False break if not", "logging.info(\"Skipped file not found in this section '%s'\" % src) firstRun = False", "True | False event.src_path path/to/observed/file \"\"\" if not event.is_directory: if (event.event_type == 'modified'", "container = metadata[\"MediaContainer\"] elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size", "all photoa to find the rest loopThroughAllPhotos() def fetchAndProcessByDate(): global doUpdate global lock", "import parse_xmp_for_lightroom_tags from photoElement import PhotoElement from config import ppTagConfig logger = exif_log.get_logger()", "so that plex can process any creates first while datetime.now()-lastTS < timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1)", "toDo = True start = 0 size = 1000 #print('loop through all, started", "\"\"\" event.event_type 'modified' | 'created' | 'moved' | 'deleted' event.is_directory True | False", "__version__ from plexUsers import plexUsers from lightroomTags import parse_xmp_for_lightroom_tags from photoElement import PhotoElement", "or event.event_type == 'created' or event.event_type == 'moved'): # check if file belongs", "seperator is equal in plex and ppTag if \"\\\\\" in ppTagConfig.PHOTOS_LIBRARY_PATH: src =", "tagQuery = tagQuery + \"tag[%s].tag.tag=%s&\" %(i, urllib.parse.quote(tag.encode('utf-8'))) i = i + 1 #logging.debug(\"", "full scan if len(dateSearchFailed) > 0: logging.warning(\"Some updated files were not found by", "plexData.keys(): logging.info(\"Processing by date '%s'\" % path) updateMetadata(plexData[path], photo.tags(), photo.rating()*2) doUpdateTemp.remove(path) # if", "pics in the date range plexData = {} if p.photoSection: while toDo: url", "= DEFAULT_STOP_TAG debug = False strict = False color = False #exif_log.setup_logger(debug, color)", "a photo doUpdateTemp.remove(filepath) for date in photoGroups.keys(): fromTimecode = int(datetime.strptime(date.isoformat(), '%Y-%m-%d').timestamp()) toTimecode =", "\"/\" in ppTagConfig.PHOTOS_LIBRARY_PATH_PLEX: path = path.replace(\"\\\\\",\"/\") if path in plexData.keys(): logging.info(\"Processing by date", "p.photoSection: while toDo: url = \"/library/sections/\" + str(p.photoSection) + \"/all?clusterZoomLevel=1&X-Plex-Container-Start=%i&X-Plex-Container-Size=%i\" % (start, size)", "plex p = None # timer t = None # last incoming event", "from lightroomTags import parse_xmp_for_lightroom_tags from photoElement import PhotoElement from config import ppTagConfig logger", "on_created(self, event): self.process(event) if __name__ == '__main__': if ppTagConfig.LOG_LEVEL is None or ppTagConfig.LOG_LEVEL", "elements = container[\"Metadata\"] totalSize = container[\"totalSize\"] offset = container[\"offset\"] size = container[\"size\"] start", "elements for photo in elements: mediaType = photo[\"type\"] if mediaType != \"photo\": continue", "do so we don't have to worry about missed triggers while len(doUpdate) >", "'Image ApplicationNotes' in data: try: xml = data['Image ApplicationNotes'].printable XMP = parse_xmp_for_lightroom_tags(xml) except:", "= True stop_tag = DEFAULT_STOP_TAG debug = False strict = False color =", "= \"?\" i = 0 for tag in tags: tagQuery = tagQuery +", "for '%s'\" % filename) return None if 'JPEGThumbnail' in data: del data['JPEGThumbnail'] if", "if not data: logging.info(\"No EXIF information for '%s'\" % filename) return None if", "then trigger a full scan if len(dateSearchFailed) > 0: logging.warning(\"Some updated files were", "except: pass # ok if missing, probably firstRun if not firstRun and len(doUpdateTemp)", "photoElement = parseExifAndTags(filepath) if photoElement: # this has exif data date = photoElement.date()", "# if we failed to process something then trigger a full scan if", "global firstRun global lastTS if firstRun: # complete update on startup requested loopThroughAllPhotos()", "parsedXMP: logging.info(\"Processing '%s'\" % filename) updateMetadata(key, parsedXMP['tags'], int(parsedXMP['rating'])*2) else: logging.info(\"No XMP data for", "all elements for photo in elements: mediaType = photo[\"type\"] if mediaType != \"photo\":", "Update the pics that changed in the date range for photo in photoGroups[date]:", "timedelta(seconds=120): time.sleep(120-(datetime.now()-lastTS).total_seconds()+1) # Try to find all photos based on date if fetchAndProcessByDate():" ]
[ "= \"Weekly_Sales\" default_missing_value = 0 # check the datatype of user-defined input variables", "data set\") # convert to pandas df = X.to_pandas() # order by group(s)", "= 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\",", "[ts_column]) # cross join of dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key']", "value - additional columns will be null for the default values\"\"\" # Column", "depends on the column # check if user-defined inputs exist in the dataset", "isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should be <str>\") # don't check datatype of", "fill all nulls with default value - this is appropriate for TS experiments,", "values\"\"\" # Column names in our dataset ts_column = \"Date\" group_by_columns = [\"Store\",", "present in the dataset\") if target_column not in features: raise ValueError(\"Column: '\" +", "by Date records and fill with a default value - additional columns will", "check datatype of default_missing_value because it depends on the column # check if", "axis=1) # join back to the original dataset df_filled = pd.merge(df, all_vals, how=\"outer\")", "be <str>\") # don't check datatype of default_missing_value because it depends on the", "missing Group by Date records and fill with a default value - additional", "datatype of user-defined input variables if not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should", "<str>\") if not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should be <list>\") if not", "\"Dept\"] target_column = \"Weekly_Sales\" default_missing_value = 0 # check the datatype of user-defined", "ValueError(\"Variable: 'ts_column' should be <str>\") if not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should", "default_missing_value because it depends on the column # check if user-defined inputs exist", "check if user-defined inputs exist in the dataset features = list(X.names) if ts_column", "isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should be <list>\") if not isinstance(target_column, str): raise", "the data set\") # convert to pandas df = X.to_pandas() # order by", "'\" + ts_column + \"' is not present in the data set\") for", "any missing Group by Date records and fill with a default value -", "the data set\") for _ in group_by_columns: if _ not in features: raise", "'ts_column' should be <str>\") if not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should be", "how=\"outer\").drop(\"key\", axis=1) # join back to the original dataset df_filled = pd.merge(df, all_vals,", "not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should be <str>\") if not isinstance(group_by_columns, list):", "is not present in the dataset\") if target_column not in features: raise ValueError(\"Column:", "# check if user-defined inputs exist in the dataset features = list(X.names) if", "target_column + \"' is not present in the data set\") # convert to", "ts_column = \"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value = 0", "ValueError(\"Group by Column: '\" + str(_) + \"' is not present in the", "+ \"' is not present in the data set\") # convert to pandas", "0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back to the original", "ValueError(\"Column: '\" + ts_column + \"' is not present in the data set\")", "in group_by_columns: if _ not in features: raise ValueError(\"Group by Column: '\" +", "in features: raise ValueError(\"Column: '\" + target_column + \"' is not present in", "user-defined inputs exist in the dataset features = list(X.names) if ts_column not in", "+ \"' is not present in the dataset\") if target_column not in features:", "not in features: raise ValueError(\"Column: '\" + target_column + \"' is not present", "all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back to the original dataset", "# order by group(s) and time df = df.sort_values(group_by_columns + [ts_column]) # cross", "TS experiments, even if there were existing nulls df_filled[target_column] = df_filled[target_column].fillna(0) return df_filled", "pd.merge(df, all_vals, how=\"outer\") # fill all nulls with default value - this is", "is appropriate for TS experiments, even if there were existing nulls df_filled[target_column] =", "not in features: raise ValueError(\"Column: '\" + ts_column + \"' is not present", "additional columns will be null for the default values\"\"\" # Column names in", "+ \"' is not present in the data set\") for _ in group_by_columns:", "unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back to the original dataset df_filled = pd.merge(df,", "= X.to_pandas() # order by group(s) and time df = df.sort_values(group_by_columns + [ts_column])", "<str>\") # don't check datatype of default_missing_value because it depends on the column", "cross join of dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0", "the datatype of user-defined input variables if not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column'", "_ in group_by_columns: if _ not in features: raise ValueError(\"Group by Column: '\"", "[\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value = 0 # check the datatype of", "raise ValueError(\"Column: '\" + ts_column + \"' is not present in the data", "columns=[ts_column]) unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals = pd.merge(unique_dates,", "\"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value = 0 # check", "on the column # check if user-defined inputs exist in the dataset features", "unique_groups['key'] = 0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back to", "0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1)", "time df = df.sort_values(group_by_columns + [ts_column]) # cross join of dates and groups", "records and fill with a default value - additional columns will be null", "column # check if user-defined inputs exist in the dataset features = list(X.names)", "should be <str>\") if not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should be <list>\")", "+ target_column + \"' is not present in the data set\") # convert", "+ ts_column + \"' is not present in the data set\") for _", "isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should be <str>\") if not isinstance(group_by_columns, list): raise", "columns will be null for the default values\"\"\" # Column names in our", "and time df = df.sort_values(group_by_columns + [ts_column]) # cross join of dates and", "and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key']", "- this is appropriate for TS experiments, even if there were existing nulls", "target_column not in features: raise ValueError(\"Column: '\" + target_column + \"' is not", "# don't check datatype of default_missing_value because it depends on the column #", "not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should be <list>\") if not isinstance(target_column, str):", "exist in the dataset features = list(X.names) if ts_column not in features: raise", "present in the data set\") for _ in group_by_columns: if _ not in", "join back to the original dataset df_filled = pd.merge(df, all_vals, how=\"outer\") # fill", "appropriate for TS experiments, even if there were existing nulls df_filled[target_column] = df_filled[target_column].fillna(0)", "# fill all nulls with default value - this is appropriate for TS", "user-defined input variables if not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should be <str>\")", "is not present in the data set\") for _ in group_by_columns: if _", "Group by Date records and fill with a default value - additional columns", "if target_column not in features: raise ValueError(\"Column: '\" + target_column + \"' is", "to pandas df = X.to_pandas() # order by group(s) and time df =", "target_column = \"Weekly_Sales\" default_missing_value = 0 # check the datatype of user-defined input", "the dataset\") if target_column not in features: raise ValueError(\"Column: '\" + target_column +", "Column names in our dataset ts_column = \"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column", "with default value - this is appropriate for TS experiments, even if there", "in the dataset features = list(X.names) if ts_column not in features: raise ValueError(\"Column:", "default value - additional columns will be null for the default values\"\"\" #", "for the default values\"\"\" # Column names in our dataset ts_column = \"Date\"", "for TS experiments, even if there were existing nulls df_filled[target_column] = df_filled[target_column].fillna(0) return", "group(s) and time df = df.sort_values(group_by_columns + [ts_column]) # cross join of dates", "pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back to the original dataset df_filled =", "= [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value = 0 # check the datatype", "features: raise ValueError(\"Column: '\" + target_column + \"' is not present in the", "unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) #", "= list(X.names) if ts_column not in features: raise ValueError(\"Column: '\" + ts_column +", "not present in the data set\") for _ in group_by_columns: if _ not", "input variables if not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should be <str>\") if", "the column # check if user-defined inputs exist in the dataset features =", "\"\"\"Add any missing Group by Date records and fill with a default value", "default value - this is appropriate for TS experiments, even if there were", "back to the original dataset df_filled = pd.merge(df, all_vals, how=\"outer\") # fill all", "ValueError(\"Column: '\" + target_column + \"' is not present in the data set\")", "in our dataset ts_column = \"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\"", "in the dataset\") if target_column not in features: raise ValueError(\"Column: '\" + target_column", "= df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join", "join of dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups", "\"' is not present in the data set\") for _ in group_by_columns: if", "= df.sort_values(group_by_columns + [ts_column]) # cross join of dates and groups unique_dates =", "not present in the dataset\") if target_column not in features: raise ValueError(\"Column: '\"", "+ str(_) + \"' is not present in the dataset\") if target_column not", "because it depends on the column # check if user-defined inputs exist in", "the default values\"\"\" # Column names in our dataset ts_column = \"Date\" group_by_columns", "check the datatype of user-defined input variables if not isinstance(ts_column, str): raise ValueError(\"Variable:", "'target_column' should be <str>\") # don't check datatype of default_missing_value because it depends", "variables if not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should be <str>\") if not", "dataset features = list(X.names) if ts_column not in features: raise ValueError(\"Column: '\" +", "str(_) + \"' is not present in the dataset\") if target_column not in", "= 0 # check the datatype of user-defined input variables if not isinstance(ts_column,", "not present in the data set\") # convert to pandas df = X.to_pandas()", "should be <str>\") # don't check datatype of default_missing_value because it depends on", "inputs exist in the dataset features = list(X.names) if ts_column not in features:", "+ [ts_column]) # cross join of dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column])", "set\") # convert to pandas df = X.to_pandas() # order by group(s) and", "_ not in features: raise ValueError(\"Group by Column: '\" + str(_) + \"'", "not isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should be <str>\") # don't check datatype", "be null for the default values\"\"\" # Column names in our dataset ts_column", "Column: '\" + str(_) + \"' is not present in the dataset\") if", "present in the data set\") # convert to pandas df = X.to_pandas() #", "= 0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back to the", "# Column names in our dataset ts_column = \"Date\" group_by_columns = [\"Store\", \"Dept\"]", "unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0", "not in features: raise ValueError(\"Group by Column: '\" + str(_) + \"' is", "how=\"outer\") # fill all nulls with default value - this is appropriate for", "with a default value - additional columns will be null for the default", "if user-defined inputs exist in the dataset features = list(X.names) if ts_column not", "'\" + target_column + \"' is not present in the data set\") #", "pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals =", "ValueError(\"Column: 'target_column' should be <str>\") # don't check datatype of default_missing_value because it", "and fill with a default value - additional columns will be null for", "in the data set\") # convert to pandas df = X.to_pandas() # order", "= pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals", "unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals = pd.merge(unique_dates, unique_groups,", "= \"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value = 0 #", "the original dataset df_filled = pd.merge(df, all_vals, how=\"outer\") # fill all nulls with", "raise ValueError(\"Variable: 'ts_column' should be <str>\") if not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns'", "convert to pandas df = X.to_pandas() # order by group(s) and time df", "is not present in the data set\") # convert to pandas df =", "null for the default values\"\"\" # Column names in our dataset ts_column =", "order by group(s) and time df = df.sort_values(group_by_columns + [ts_column]) # cross join", "don't check datatype of default_missing_value because it depends on the column # check", "fill with a default value - additional columns will be null for the", "be <list>\") if not isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should be <str>\") #", "# join back to the original dataset df_filled = pd.merge(df, all_vals, how=\"outer\") #", "pandas df = X.to_pandas() # order by group(s) and time df = df.sort_values(group_by_columns", "of user-defined input variables if not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should be", "dataset df_filled = pd.merge(df, all_vals, how=\"outer\") # fill all nulls with default value", "for _ in group_by_columns: if _ not in features: raise ValueError(\"Group by Column:", "list(X.names) if ts_column not in features: raise ValueError(\"Column: '\" + ts_column + \"'", "raise ValueError(\"Group by Column: '\" + str(_) + \"' is not present in", "= pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back to the original dataset df_filled", "default values\"\"\" # Column names in our dataset ts_column = \"Date\" group_by_columns =", "of default_missing_value because it depends on the column # check if user-defined inputs", "df = X.to_pandas() # order by group(s) and time df = df.sort_values(group_by_columns +", "df_filled = pd.merge(df, all_vals, how=\"outer\") # fill all nulls with default value -", "\"' is not present in the dataset\") if target_column not in features: raise", "if not isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should be <str>\") # don't check", "dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates()", "datatype of default_missing_value because it depends on the column # check if user-defined", "raise ValueError(\"Column: 'target_column' should be <str>\") # don't check datatype of default_missing_value because", "should be <list>\") if not isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should be <str>\")", "all nulls with default value - this is appropriate for TS experiments, even", "all_vals, how=\"outer\") # fill all nulls with default value - this is appropriate", "# check the datatype of user-defined input variables if not isinstance(ts_column, str): raise", "features: raise ValueError(\"Column: '\" + ts_column + \"' is not present in the", "features = list(X.names) if ts_column not in features: raise ValueError(\"Column: '\" + ts_column", "ts_column not in features: raise ValueError(\"Column: '\" + ts_column + \"' is not", "features: raise ValueError(\"Group by Column: '\" + str(_) + \"' is not present", "# cross join of dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] =", "by Column: '\" + str(_) + \"' is not present in the dataset\")", "this is appropriate for TS experiments, even if there were existing nulls df_filled[target_column]", "if _ not in features: raise ValueError(\"Group by Column: '\" + str(_) +", "df = df.sort_values(group_by_columns + [ts_column]) # cross join of dates and groups unique_dates", "of dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups =", "<list>\") if not isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should be <str>\") # don't", "0 # check the datatype of user-defined input variables if not isinstance(ts_column, str):", "in features: raise ValueError(\"Column: '\" + ts_column + \"' is not present in", "our dataset ts_column = \"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value", "a default value - additional columns will be null for the default values\"\"\"", "'group_by_columns' should be <list>\") if not isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should be", "be <str>\") if not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should be <list>\") if", "names in our dataset ts_column = \"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column =", "group_by_columns = [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value = 0 # check the", "= pd.merge(df, all_vals, how=\"outer\") # fill all nulls with default value - this", "dataset ts_column = \"Date\" group_by_columns = [\"Store\", \"Dept\"] target_column = \"Weekly_Sales\" default_missing_value =", "df.sort_values(group_by_columns + [ts_column]) # cross join of dates and groups unique_dates = pd.DataFrame(df[ts_column].unique(),", "nulls with default value - this is appropriate for TS experiments, even if", "\"Weekly_Sales\" default_missing_value = 0 # check the datatype of user-defined input variables if", "if not isinstance(ts_column, str): raise ValueError(\"Variable: 'ts_column' should be <str>\") if not isinstance(group_by_columns,", "ValueError(\"Column: 'group_by_columns' should be <list>\") if not isinstance(target_column, str): raise ValueError(\"Column: 'target_column' should", "str): raise ValueError(\"Variable: 'ts_column' should be <str>\") if not isinstance(group_by_columns, list): raise ValueError(\"Column:", "the dataset features = list(X.names) if ts_column not in features: raise ValueError(\"Column: '\"", "Date records and fill with a default value - additional columns will be", "default_missing_value = 0 # check the datatype of user-defined input variables if not", "list): raise ValueError(\"Column: 'group_by_columns' should be <list>\") if not isinstance(target_column, str): raise ValueError(\"Column:", "raise ValueError(\"Column: '\" + target_column + \"' is not present in the data", "- additional columns will be null for the default values\"\"\" # Column names", "in features: raise ValueError(\"Group by Column: '\" + str(_) + \"' is not", "# convert to pandas df = X.to_pandas() # order by group(s) and time", "ts_column + \"' is not present in the data set\") for _ in", "raise ValueError(\"Column: 'group_by_columns' should be <list>\") if not isinstance(target_column, str): raise ValueError(\"Column: 'target_column'", "group_by_columns: if _ not in features: raise ValueError(\"Group by Column: '\" + str(_)", "groups unique_dates = pd.DataFrame(df[ts_column].unique(), columns=[ts_column]) unique_dates['key'] = 0 unique_groups = df[group_by_columns].drop_duplicates() unique_groups['key'] =", "original dataset df_filled = pd.merge(df, all_vals, how=\"outer\") # fill all nulls with default", "str): raise ValueError(\"Column: 'target_column' should be <str>\") # don't check datatype of default_missing_value", "to the original dataset df_filled = pd.merge(df, all_vals, how=\"outer\") # fill all nulls", "by group(s) and time df = df.sort_values(group_by_columns + [ts_column]) # cross join of", "'\" + str(_) + \"' is not present in the dataset\") if target_column", "set\") for _ in group_by_columns: if _ not in features: raise ValueError(\"Group by", "will be null for the default values\"\"\" # Column names in our dataset", "X.to_pandas() # order by group(s) and time df = df.sort_values(group_by_columns + [ts_column]) #", "value - this is appropriate for TS experiments, even if there were existing", "df[group_by_columns].drop_duplicates() unique_groups['key'] = 0 all_vals = pd.merge(unique_dates, unique_groups, how=\"outer\").drop(\"key\", axis=1) # join back", "in the data set\") for _ in group_by_columns: if _ not in features:", "if not isinstance(group_by_columns, list): raise ValueError(\"Column: 'group_by_columns' should be <list>\") if not isinstance(target_column,", "data set\") for _ in group_by_columns: if _ not in features: raise ValueError(\"Group", "\"' is not present in the data set\") # convert to pandas df", "it depends on the column # check if user-defined inputs exist in the", "if ts_column not in features: raise ValueError(\"Column: '\" + ts_column + \"' is", "dataset\") if target_column not in features: raise ValueError(\"Column: '\" + target_column + \"'" ]
[ "07:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'),", "Generated by Django 3.2.1 on 2021-08-08 07:09 from django.db import migrations, models class", "3.2.1 on 2021-08-08 07:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "dependencies = [ ('admin_panel', '0016_wish_list'), ] operations = [ migrations.AddField( model_name='wish_list', name='is_wished', field=models.BooleanField(default=False),", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'), ]", "class Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'), ] operations = [ migrations.AddField( model_name='wish_list',", "on 2021-08-08 07:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "Django 3.2.1 on 2021-08-08 07:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'), ] operations = [ migrations.AddField( model_name='wish_list', name='is_wished',", "migrations, models class Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'), ] operations = [", "[ ('admin_panel', '0016_wish_list'), ] operations = [ migrations.AddField( model_name='wish_list', name='is_wished', field=models.BooleanField(default=False), ), ]", "# Generated by Django 3.2.1 on 2021-08-08 07:09 from django.db import migrations, models", "= [ ('admin_panel', '0016_wish_list'), ] operations = [ migrations.AddField( model_name='wish_list', name='is_wished', field=models.BooleanField(default=False), ),", "by Django 3.2.1 on 2021-08-08 07:09 from django.db import migrations, models class Migration(migrations.Migration):", "2021-08-08 07:09 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('admin_panel',", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'), ] operations =", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'), ] operations", "models class Migration(migrations.Migration): dependencies = [ ('admin_panel', '0016_wish_list'), ] operations = [ migrations.AddField(" ]
[ "aff_path): # Warp the softmax cmd = 'reg_resample -ref %s -flo %s -trans", "\"\"\" import os from argparse import ArgumentParser import numpy as np import nibabel", "of the softmax prediction to the subject space. @author: <NAME> (<EMAIL>) \"\"\" import", "post-process the predicted softmax segmentation. This script performs rigid register of the softmax", "output_dir, '%s_inv.txt' % aff_name, ) cmd = 'reg_transform -invAff %s %s' % (aff_path,", "'%s_inv.txt' % aff_name, ) cmd = 'reg_transform -invAff %s %s' % (aff_path, save_inv_aff_path)", "that was used' 'to go from subject space to template space.') parser.add_argument('--input_img', required=True,", "effects due to padding with 0 AND change order of channels softmax_nii =", "import numpy as np import nibabel as nib parser = ArgumentParser() parser.add_argument('--softmax', required=True,", "= 'reg_transform -invAff %s %s' % (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path,", "= np.sum(softmax, axis=-1) softmax[:, :, :, 0] += 1. - sum_proba post_softmax_nii =", "os.system(cmd) # Fix border effects due to padding with 0 AND change order", "softmax cmd = 'reg_resample -ref %s -flo %s -trans %s -res %s -inter", "template space.') parser.add_argument('--aff', required=True, help='path to the Affine transformation that was used' 'to", "rigid register of the softmax prediction to the subject space. @author: <NAME> (<EMAIL>)", "required=True, help='path to the softmax prediction in the template space.') parser.add_argument('--aff', required=True, help='path", "softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse", "np.sum(softmax, axis=-1) softmax[:, :, :, 0] += 1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax,", "1 -pad 0 -voff' % \\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) # Fix", "preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt',", "transform print('Invert %s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the", "performs rigid register of the softmax prediction to the subject space. @author: <NAME>", "+= 1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if", "template space.') parser.add_argument('--input_img', required=True, help='Path to the SRR to preprocess') parser.add_argument('--output_folder', required=True) def", "not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' %", "parser.add_argument('--aff', required=True, help='path to the Affine transformation that was used' 'to go from", "-trans %s -res %s -inter 1 -pad 0 -voff' % \\ (ref_img_path, softmax_path,", "order of channels softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1)", "%s -res %s -inter 1 -pad 0 -voff' % \\ (ref_img_path, softmax_path, aff_path,", "aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' % aff_name, ) cmd", "of channels softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:,", "os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' % aff_name,", "save_path) def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse affine transform", "(<EMAIL>) \"\"\" import os from argparse import ArgumentParser import numpy as np import", "This script performs rigid register of the softmax prediction to the subject space.", "required=True) def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path", "= softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:, :, :, 0] += 1. -", "Warp the softmax cmd = 'reg_resample -ref %s -flo %s -trans %s -res", "print(inv_aff_path) # Warp the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax)", "not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse affine transform print('Invert %s' % args.aff)", "def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse affine transform print('Invert", "argparse import ArgumentParser import numpy as np import nibabel as nib parser =", "= invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp", "nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:, :, :, 0] +=", "to post-process the predicted softmax segmentation. This script performs rigid register of the", "if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse affine transform print('Invert %s' %", "output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir,", "the softmax prediction to the subject space. @author: <NAME> (<EMAIL>) \"\"\" import os", "inverse affine transform print('Invert %s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) #", "script performs rigid register of the softmax prediction to the subject space. @author:", "0 AND change order of channels softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba", "os.mkdir(args.output_folder) # Compute the inverse affine transform print('Invert %s' % args.aff) inv_aff_path =", "-flo %s -trans %s -res %s -inter 1 -pad 0 -voff' % \\", "-voff' % \\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) # Fix border effects due", "subject space. @author: <NAME> (<EMAIL>) \"\"\" import os from argparse import ArgumentParser import", "save_path, aff_path): # Warp the softmax cmd = 'reg_resample -ref %s -flo %s", "Affine transformation that was used' 'to go from subject space to template space.')", "SRR to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name", "args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax save_path = os.path.join(args.output_folder,", "parser.add_argument('--softmax', required=True, help='path to the softmax prediction in the template space.') parser.add_argument('--aff', required=True,", "sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder)", ":, :, 0] += 1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path)", "due to padding with 0 AND change order of channels softmax_nii = nib.load(save_path)", "np import nibabel as nib parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the", "def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp the softmax cmd = 'reg_resample -ref", "as np import nibabel as nib parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path to", "softmax prediction to the subject space. @author: <NAME> (<EMAIL>) \"\"\" import os from", "sum_proba = np.sum(softmax, axis=-1) softmax[:, :, :, 0] += 1. - sum_proba post_softmax_nii", "nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the", "(aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp the", "affine transform print('Invert %s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp", "os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if", "the softmax cmd = 'reg_resample -ref %s -flo %s -trans %s -res %s", "save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path,", "%s -inter 1 -pad 0 -voff' % \\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd)", "\"\"\" Use this script to post-process the predicted softmax segmentation. This script performs", "to template space.') parser.add_argument('--input_img', required=True, help='Path to the SRR to preprocess') parser.add_argument('--output_folder', required=True)", "save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' % aff_name, ) cmd = 'reg_transform -invAff %s", "os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp the softmax cmd", "border effects due to padding with 0 AND change order of channels softmax_nii", "return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp the softmax cmd =", "nib parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the softmax prediction in the", "space.') parser.add_argument('--input_img', required=True, help='Path to the SRR to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path,", "'to go from subject space to template space.') parser.add_argument('--input_img', required=True, help='Path to the", "help='Path to the SRR to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if not", "invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join(", "parser.add_argument('--input_img', required=True, help='Path to the SRR to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir):", "save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp the softmax", "os.path.join( output_dir, '%s_inv.txt' % aff_name, ) cmd = 'reg_transform -invAff %s %s' %", "ArgumentParser import numpy as np import nibabel as nib parser = ArgumentParser() parser.add_argument('--softmax',", "# Compute the inverse affine transform print('Invert %s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff,", "warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if __name__ == '__main__': args = parser.parse_args()", "prediction to the subject space. @author: <NAME> (<EMAIL>) \"\"\" import os from argparse", "-pad 0 -voff' % \\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) # Fix border", "channels softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:, :,", "the template space.') parser.add_argument('--aff', required=True, help='path to the Affine transformation that was used'", "to the Affine transformation that was used' 'to go from subject space to", "% (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp", "- sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if not os.path.exists(args.output_folder):", "register of the softmax prediction to the subject space. @author: <NAME> (<EMAIL>) \"\"\"", "%s -flo %s -trans %s -res %s -inter 1 -pad 0 -voff' %", "-invAff %s %s' % (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path,", "this script to post-process the predicted softmax segmentation. This script performs rigid register", "(ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) # Fix border effects due to padding with", "with 0 AND change order of channels softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32)", "'') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' % aff_name, ) cmd = 'reg_transform -invAff", "= 'reg_resample -ref %s -flo %s -trans %s -res %s -inter 1 -pad", "the Affine transformation that was used' 'to go from subject space to template", "the predicted softmax segmentation. This script performs rigid register of the softmax prediction", "-inter 1 -pad 0 -voff' % \\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) #", "aff_name, ) cmd = 'reg_transform -invAff %s %s' % (aff_path, save_inv_aff_path) os.system(cmd) return", "space to template space.') parser.add_argument('--input_img', required=True, help='Path to the SRR to preprocess') parser.add_argument('--output_folder',", "if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt'", "1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if not", "\\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) # Fix border effects due to padding", "softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:, :, :, 0] += 1.", "space. @author: <NAME> (<EMAIL>) \"\"\" import os from argparse import ArgumentParser import numpy", "softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if __name__ == '__main__': args = parser.parse_args() main(args)", "args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if __name__ == '__main__': args =", "numpy as np import nibabel as nib parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path", "softmax prediction in the template space.') parser.add_argument('--aff', required=True, help='path to the Affine transformation", "os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' % aff_name, ) cmd = 'reg_transform", "'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if __name__", "the SRR to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir)", "nib.save(post_softmax_nii, save_path) def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse affine", "invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s'", "os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' % aff_name, )", "the subject space. @author: <NAME> (<EMAIL>) \"\"\" import os from argparse import ArgumentParser", "script to post-process the predicted softmax segmentation. This script performs rigid register of", "to padding with 0 AND change order of channels softmax_nii = nib.load(save_path) softmax", "'reg_transform -invAff %s %s' % (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path,", "import ArgumentParser import numpy as np import nibabel as nib parser = ArgumentParser()", "the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img,", "post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) #", "required=True, help='path to the Affine transformation that was used' 'to go from subject", "the softmax prediction in the template space.') parser.add_argument('--aff', required=True, help='path to the Affine", "inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz')", "% \\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) # Fix border effects due to", "softmax segmentation. This script performs rigid register of the softmax prediction to the", "Warp the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax,", "# Fix border effects due to padding with 0 AND change order of", "%s %s' % (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path):", "in the template space.') parser.add_argument('--aff', required=True, help='path to the Affine transformation that was", "ref_img_path, save_path, aff_path): # Warp the softmax cmd = 'reg_resample -ref %s -flo", "= os.path.join( output_dir, '%s_inv.txt' % aff_name, ) cmd = 'reg_transform -invAff %s %s'", "%s' % (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): #", "as nib parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the softmax prediction in", "output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' %", "@author: <NAME> (<EMAIL>) \"\"\" import os from argparse import ArgumentParser import numpy as", "cmd = 'reg_transform -invAff %s %s' % (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path def", "nibabel as nib parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the softmax prediction", "subject space to template space.') parser.add_argument('--input_img', required=True, help='Path to the SRR to preprocess')", "-res %s -inter 1 -pad 0 -voff' % \\ (ref_img_path, softmax_path, aff_path, save_path)", "change order of channels softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax,", "softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:, :, :, 0] += 1. - sum_proba", "main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse affine transform print('Invert %s'", "segmentation. This script performs rigid register of the softmax prediction to the subject", "-ref %s -flo %s -trans %s -res %s -inter 1 -pad 0 -voff'", "from argparse import ArgumentParser import numpy as np import nibabel as nib parser", "print('Invert %s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax", "to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name =", "# Warp the softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax(", "print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if __name__ ==", ":, 0] += 1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def", "import os from argparse import ArgumentParser import numpy as np import nibabel as", "warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp the softmax cmd = 'reg_resample -ref %s", "def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path =", "softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:, :, :,", "# Warp the softmax cmd = 'reg_resample -ref %s -flo %s -trans %s", "required=True, help='Path to the SRR to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if", "softmax save_path = os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path,", "prediction in the template space.') parser.add_argument('--aff', required=True, help='path to the Affine transformation that", "= os.path.split(aff_path)[1].replace('.txt', '') save_inv_aff_path = os.path.join( output_dir, '%s_inv.txt' % aff_name, ) cmd =", "0 -voff' % \\ (ref_img_path, softmax_path, aff_path, save_path) os.system(cmd) # Fix border effects", "os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute the inverse affine transform print('Invert %s' % args.aff) inv_aff_path", "aff_path, save_path) os.system(cmd) # Fix border effects due to padding with 0 AND", "help='path to the softmax prediction in the template space.') parser.add_argument('--aff', required=True, help='path to", "os from argparse import ArgumentParser import numpy as np import nibabel as nib", "save_path) os.system(cmd) # Fix border effects due to padding with 0 AND change", "Fix border effects due to padding with 0 AND change order of channels", "to the subject space. @author: <NAME> (<EMAIL>) \"\"\" import os from argparse import", "= nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args): if not os.path.exists(args.output_folder): os.mkdir(args.output_folder) # Compute", "was used' 'to go from subject space to template space.') parser.add_argument('--input_img', required=True, help='Path", "cmd = 'reg_resample -ref %s -flo %s -trans %s -res %s -inter 1", ") cmd = 'reg_transform -invAff %s %s' % (aff_path, save_inv_aff_path) os.system(cmd) return save_inv_aff_path", "softmax_path, aff_path, save_path) os.system(cmd) # Fix border effects due to padding with 0", "space.') parser.add_argument('--aff', required=True, help='path to the Affine transformation that was used' 'to go", "AND change order of channels softmax_nii = nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba =", "= nib.load(save_path) softmax = softmax_nii.get_fdata().astype(np.float32) sum_proba = np.sum(softmax, axis=-1) softmax[:, :, :, 0]", "softmax[:, :, :, 0] += 1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii,", "= os.path.join(args.output_folder, 'softmax.nii.gz') print('warp %s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, )", "% args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if __name__ == '__main__': args", "parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the softmax prediction in the template", "from subject space to template space.') parser.add_argument('--input_img', required=True, help='Path to the SRR to", "'reg_resample -ref %s -flo %s -trans %s -res %s -inter 1 -pad 0", "= ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the softmax prediction in the template space.')", "to the SRR to preprocess') parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir):", "transformation that was used' 'to go from subject space to template space.') parser.add_argument('--input_img',", "to the softmax prediction in the template space.') parser.add_argument('--aff', required=True, help='path to the", "%s -trans %s -res %s -inter 1 -pad 0 -voff' % \\ (ref_img_path,", "ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the softmax prediction in the template space.') parser.add_argument('--aff',", "%s' % args.softmax) warp_softmax( softmax_path=args.softmax, ref_img_path=args.input_img, save_path=save_path, aff_path=inv_aff_path, ) if __name__ == '__main__':", "% aff_name, ) cmd = 'reg_transform -invAff %s %s' % (aff_path, save_inv_aff_path) os.system(cmd)", "save_inv_aff_path def warp_softmax(softmax_path, ref_img_path, save_path, aff_path): # Warp the softmax cmd = 'reg_resample", "Use this script to post-process the predicted softmax segmentation. This script performs rigid", "go from subject space to template space.') parser.add_argument('--input_img', required=True, help='Path to the SRR", "axis=-1) softmax[:, :, :, 0] += 1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine)", "predicted softmax segmentation. This script performs rigid register of the softmax prediction to", "%s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax save_path", "0] += 1. - sum_proba post_softmax_nii = nib.Nifti1Image(softmax, softmax_nii.affine) nib.save(post_softmax_nii, save_path) def main(args):", "used' 'to go from subject space to template space.') parser.add_argument('--input_img', required=True, help='Path to", "the inverse affine transform print('Invert %s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path)", "help='path to the Affine transformation that was used' 'to go from subject space", "padding with 0 AND change order of channels softmax_nii = nib.load(save_path) softmax =", "Compute the inverse affine transform print('Invert %s' % args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder)", "% args.aff) inv_aff_path = invert_affine(aff_path=args.aff, output_dir=args.output_folder) print(inv_aff_path) # Warp the softmax save_path =", "import nibabel as nib parser = ArgumentParser() parser.add_argument('--softmax', required=True, help='path to the softmax", "parser.add_argument('--output_folder', required=True) def invert_affine(aff_path, output_dir): if not os.path.exists(output_dir): os.mkdir(output_dir) aff_name = os.path.split(aff_path)[1].replace('.txt', '')", "<NAME> (<EMAIL>) \"\"\" import os from argparse import ArgumentParser import numpy as np" ]
[ "self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are raw oled functions # BELOW are some", "preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE,", "i = y + self.width-1 while i >= y: byte = 0 for", "SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT", "= 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP =", "def cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0) def", "# push out the whole lot def cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0,", "lines oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def", "= 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE DISPLAYON =", "= ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED():", "raw oled functions # BELOW are some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\",", "fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11 char max x 2 lines oled.blank()", "self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0) def onoff(self, onoff): if onoff == 0:", "31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The image on the \"canvas\" is", "128x64 long, ie whole canvas. for i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode,", "= 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN =", "font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4", "ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf',", "def display(self): \"\"\" The image on the \"canvas\" is flushed through to the", "i >= y: byte = 0 for n in range(0, step, self.width): byte", "0xAF EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6", "= SMBus(0) except: exit(7) self.cmd_mode = 0x00 self.data_mode = 0x40 self.addr = address", "the whole lot def cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1),", "# or # import oled96 # oled = oled96.OLED(0x3c) or 3d from PIL", "11 char max x 2 lines oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40),", "def _command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data): #", "except: exit(7) self.cmd_mode = 0x00 self.data_mode = 0x40 self.addr = address self.width =", "const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self,", "= 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY =", "& 0x01) << 8 byte >>= 1 buf.append(byte) i -= 1 self._data(buf) #", "self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display()", "0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <=", "0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49),", "import oled # or # import oled96 # oled = oled96.OLED(0x3c) or 3d", "ImageFont from smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19)", "= str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1,", "for i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The", "display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, # Column start/end address const.PAGEADDR, 0x00, self.pages-1)", "(pix[i + n] & 0x01) << 8 byte >>= 1 buf.append(byte) i -=", "= ImageDraw.Draw(self.image) # this is a \"draw\" object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"]", "19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self, address=0x3C): try: self.bus =", "SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE", "fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11 char max", "display(self): \"\"\" The image on the \"canvas\" is flushed through to the hardware", "self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class const: CHARGEPUMP =", "0x40 self.addr = address self.width = 128 self.height = 64 self.pages = int(self.height", "oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1)", "self.bus = SMBus(0) except: exit(7) self.cmd_mode = 0x00 self.data_mode = 0x40 self.addr =", "font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\",", "address pix = list(self.image.getdata()) step = self.width * 8 buf = [] for", "self.width-1, self.height-1), outline=0, fill=0) def onoff(self, onoff): if onoff == 0: self._command(const.DISPLAYOFF) else:", "= 0xA4 DISPLAYOFF = 0xAE DISPLAYON = 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY =", "exit(7) self.cmd_mode = 0x00 self.data_mode = 0x40 self.addr = address self.width = 128", "flushed through to the hardware display. Takes the 1-bit image and dumps it", "== 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are raw oled functions # BELOW", "fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1)", "jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0],", "font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1,", "ImageDraw, ImageFont from smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf',", "whole lot def cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0,", "yell2(self,str1=\"\", str2=\"\"): # 11 char max x 2 lines oled.blank() self.canvas.text((2, 10), str1,", "oled96.OLED(0x3c) or 3d from PIL import Image, ImageDraw, ImageFont from smbus import SMBus", "2 lines oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display()", "y + self.width-1 while i >= y: byte = 0 for n in", "const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON)", "list(cmd)) def _data(self, data): # In our library, only data operation used is", "1 line oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): # val", "0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2", "fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2,", "/ 8) self.image = Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) # this is", "#font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self, address=0x3C): try: self.bus = SMBus(1)", "0x00 self.data_mode = 0x40 self.addr = address self.width = 128 self.height = 64", "const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self,", ">>= 1 buf.append(byte) i -= 1 self._data(buf) # push out the whole lot", "NORMALDISPLAY = 0xA6 PAGEADDR = 0x22 SEGREMAP = 0xA0 SETCOMPINS = 0xDA SETCONTRAST", "= 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE DISPLAYON = 0xAF EXTERNALVCC =", "\"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, # Column start/end address const.PAGEADDR, 0x00, self.pages-1) #", "0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE DISPLAYON = 0xAF", "# 5 char max, 1 line oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display()", "fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0)", "= ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 =", "= 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC =", "self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1)", "is flushed through to the hardware display. Takes the 1-bit image and dumps", "= ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 =", "library, only data operation used is 128x64 long, ie whole canvas. for i", "outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display()", "2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1)", "outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0,", "to the hardware display. Takes the 1-bit image and dumps it to the", "0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY,", "self._data(buf) # push out the whole lot def cls(self): self.blank() self.display() def blank(self):", "self.height-1), outline=0, fill=0) def onoff(self, onoff): if onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON)", "0x22 SEGREMAP = 0xA0 SETCOMPINS = 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5", "= 0xC8 COMSCANINC = 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF =", "self.bus = SMBus(1) except: try: self.bus = SMBus(0) except: exit(7) self.cmd_mode = 0x00", "oled = oled96.OLED(0x3c) or 3d from PIL import Image, ImageDraw, ImageFont from smbus", "in range(0, step, self.width): byte |= (pix[i + n] & 0x01) << 8", "oled.display() def bar(self,str1,val,dispval=None): # val = 0 to 100 for graph, dispval if", "+ n] & 0x01) << 8 byte >>= 1 buf.append(byte) i -= 1", "operation used is 128x64 long, ie whole canvas. for i in range(0, len(data),", "# this is a \"draw\" object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF,", "0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2 oled = OLED()", "oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4,", "= ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 =", "fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class const: CHARGEPUMP", "or 3d from PIL import Image, ImageDraw, ImageFont from smbus import SMBus font1", "is 128x64 long, ie whole canvas. for i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr,", "0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9", "str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2,", "self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE,", "dispval == None: dispval = val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1,", "font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self, address=0x3C):", "const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode,", "oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1)", "font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class const:", "2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2,", "display. Takes the 1-bit image and dumps it to the SSD1306 OLED display.", "while i >= y: byte = 0 for n in range(0, step, self.width):", "n in range(0, step, self.width): byte |= (pix[i + n] & 0x01) <<", "if different from val. Autocentre. oled.blank() if dispval == None: dispval = val", "+ self.width-1 while i >= y: byte = 0 for n in range(0,", "COLUMNADDR = 0x21 COMSCANDEC = 0xC8 COMSCANINC = 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME", "oled96 import oled # or # import oled96 # oled = oled96.OLED(0x3c) or", "canvas. for i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\"", "from smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3", "= ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self, address=0x3C): try: self.bus = SMBus(1) except:", "the 1-bit image and dumps it to the SSD1306 OLED display. \"\"\" self._command(", "const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd)", "self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1],", "self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The image on the \"canvas\" is flushed through", "self.width = 128 self.height = 64 self.pages = int(self.height / 8) self.image =", "PIL import Image, ImageDraw, ImageFont from smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12)", "font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11 char", "self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43),", "class OLED(): def __init__(self, address=0x3C): try: self.bus = SMBus(1) except: try: self.bus =", "CHARGEPUMP = 0x8D COLUMNADDR = 0x21 COMSCANDEC = 0xC8 COMSCANINC = 0xC0 DISPLAYALLON", "dispval if different from val. Autocentre. oled.blank() if dispval == None: dispval =", "const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def", "36) class OLED(): def __init__(self, address=0x3C): try: self.bus = SMBus(1) except: try: self.bus", "0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd))", "step, self.width): byte |= (pix[i + n] & 0x01) << 8 byte >>=", "object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET,", "display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP,", "dispval = val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0,", "self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data): # In our library, only data operation", "= 0x20 NORMALDISPLAY = 0xA6 PAGEADDR = 0x22 SEGREMAP = 0xA0 SETCOMPINS =", "const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12,", "self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): # 5 char max, 1", "from oled96 import oled # or # import oled96 # oled = oled96.OLED(0x3c)", "\"draw\" object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F,", "lot def cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0)", "self.jnl4[3], font=font1, fill=1) oled.display() class const: CHARGEPUMP = 0x8D COLUMNADDR = 0x21 COMSCANDEC", "-= 1 self._data(buf) # push out the whole lot def cls(self): self.blank() self.display()", "len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The image on the \"canvas\"", "Image, ImageDraw, ImageFont from smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 =", "oled.display() def yell(self,str1=\"\", str2=\"\"): # 5 char max, 1 line oled.blank() self.canvas.text((2, 20),", "= ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self, address=0x3C): try:", ">= y: byte = 0 for n in range(0, step, self.width): byte |=", "1 self._data(buf) # push out the whole lot def cls(self): self.blank() self.display() def", "oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1)", "= 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX =", "a \"draw\" object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX,", "fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class const: CHARGEPUMP = 0x8D COLUMNADDR =", "self.image = Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) # this is a \"draw\"", "i -= 1 self._data(buf) # push out the whole lot def cls(self): self.blank()", "DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE DISPLAYON = 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY", "= self.width * 8 buf = [] for y in range(0, self.pages *", "* 8 buf = [] for y in range(0, self.pages * step, step):", "SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36)", "oled.display() class const: CHARGEPUMP = 0x8D COLUMNADDR = 0x21 COMSCANDEC = 0xC8 COMSCANINC", "our library, only data operation used is 128x64 long, ie whole canvas. for", "0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS,", "0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE,", "ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf',", "str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11", "# 11 char max x 2 lines oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1)", "range(0, self.pages * step, step): i = y + self.width-1 while i >=", "fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): # 5 char max, 1 line oled.blank() self.canvas.text((2,", "== None: dispval = val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5,", "self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): #", "= 0x40 self.addr = address self.width = 128 self.height = 64 self.pages =", "0x2 oled = OLED() import sys if __name__ == '__main__': print (sys.argv[0], 'is", "= 0x22 SEGREMAP = 0xA0 SETCOMPINS = 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV =", "None: dispval = val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1)", "ABOVE are raw oled functions # BELOW are some pre-formatted layouts def msgBox(self,hdr=\"\",", "n] & 0x01) << 8 byte >>= 1 buf.append(byte) i -= 1 self._data(buf)", "hardware display. Takes the 1-bit image and dumps it to the SSD1306 OLED", "0x01) << 8 byte >>= 1 buf.append(byte) i -= 1 self._data(buf) # push", "3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48),", "3d from PIL import Image, ImageDraw, ImageFont from smbus import SMBus font1 =", "0xA4 DISPLAYOFF = 0xAE DISPLAYON = 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7", "import sys if __name__ == '__main__': print (sys.argv[0], 'is an importable module:') exit()", "y: byte = 0 for n in range(0, step, self.width): byte |= (pix[i", "oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2,", "0xA0 SETCOMPINS = 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3", "0xC8 COMSCANINC = 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE", "self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display()", "start/end address const.PAGEADDR, 0x00, self.pages-1) # Page start/end address pix = list(self.image.getdata()) step", "(self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) # this is a \"draw\" object for preparing", "= val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31,", "0xA6 PAGEADDR = 0x22 SEGREMAP = 0xA0 SETCOMPINS = 0xDA SETCONTRAST = 0x81", "COMSCANINC = 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE DISPLAYON", "100 for graph, dispval if different from val. Autocentre. oled.blank() if dispval ==", "0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST,", "0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2 oled = OLED() import sys if", "self.width-1, # Column start/end address const.PAGEADDR, 0x00, self.pages-1) # Page start/end address pix", "for y in range(0, self.pages * step, step): i = y + self.width-1", "0, self.width-1, self.height-1), outline=0, fill=0) def onoff(self, onoff): if onoff == 0: self._command(const.DISPLAYOFF)", "the \"canvas\" is flushed through to the hardware display. Takes the 1-bit image", "byte >>= 1 buf.append(byte) i -= 1 self._data(buf) # push out the whole", "SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN", "self.cmd_mode, list(cmd)) def _data(self, data): # In our library, only data operation used", "push out the whole lot def cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0,", "= 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6 PAGEADDR = 0x22 SEGREMAP =", "= [] for y in range(0, self.pages * step, step): i = y", "onoff): if onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are raw oled", "# Page start/end address pix = list(self.image.getdata()) step = self.width * 8 buf", "byte = 0 for n in range(0, step, self.width): byte |= (pix[i +", "self.height = 64 self.pages = int(self.height / 8) self.image = Image.new('1', (self.width, self.height))", "pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0, 19,", "python # USAGE: # from oled96 import oled # or # import oled96", "SEGREMAP = 0xA0 SETCOMPINS = 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET", "INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6 PAGEADDR = 0x22 SEGREMAP", "0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB", "self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0) def onoff(self, onoff): if", "12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12)", "max x 2 lines oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2,", "BELOW are some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header autocentred", "def yell2(self,str1=\"\", str2=\"\"): # 11 char max x 2 lines oled.blank() self.canvas.text((2, 10),", "0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10", "def onoff(self, onoff): if onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are", "oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33),", "are some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header autocentred oled.blank()", "font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): # val = 0 to 100 for graph,", "self._command(const.DISPLAYON) # ABOVE are raw oled functions # BELOW are some pre-formatted layouts", "font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5", "x 2 lines oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1)", "= 128 self.height = 64 self.pages = int(self.height / 8) self.image = Image.new('1',", "data): # In our library, only data operation used is 128x64 long, ie", "step = self.width * 8 buf = [] for y in range(0, self.pages", "ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self, address=0x3C): try: self.bus", "# header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0),", "contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14,", "0x00, self.pages-1) # Page start/end address pix = list(self.image.getdata()) step = self.width *", "Column start/end address const.PAGEADDR, 0x00, self.pages-1) # Page start/end address pix = list(self.image.getdata())", "31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval,", "OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, # Column start/end address const.PAGEADDR, 0x00,", "to 100 for graph, dispval if different from val. Autocentre. oled.blank() if dispval", "str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): # 5", "for graph, dispval if different from val. Autocentre. oled.blank() if dispval == None:", "# ABOVE are raw oled functions # BELOW are some pre-formatted layouts def", "def _data(self, data): # In our library, only data operation used is 128x64", "in range(0, self.pages * step, step): i = y + self.width-1 while i", "= OLED() import sys if __name__ == '__main__': print (sys.argv[0], 'is an importable", "self.data_mode = 0x40 self.addr = address self.width = 128 self.height = 64 self.pages", "\"\"\" The image on the \"canvas\" is flushed through to the hardware display.", "self.pages-1) # Page start/end address pix = list(self.image.getdata()) step = self.width * 8", "different from val. Autocentre. oled.blank() if dispval == None: dispval = val dispval", "class const: CHARGEPUMP = 0x8D COLUMNADDR = 0x21 COMSCANDEC = 0xC8 COMSCANINC =", "<< 8 byte >>= 1 buf.append(byte) i -= 1 self._data(buf) # push out", "address self.width = 128 self.height = 64 self.pages = int(self.height / 8) self.image", "SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE", "self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11 char max x", "SMBus(0) except: exit(7) self.cmd_mode = 0x00 self.data_mode = 0x40 self.addr = address self.width", "font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): # 5 char max, 1 line oled.blank()", "range(0, step, self.width): byte |= (pix[i + n] & 0x01) << 8 byte", "0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE DISPLAYON = 0xAF EXTERNALVCC = 0x1", "step, step): i = y + self.width-1 while i >= y: byte =", "or # import oled96 # oled = oled96.OLED(0x3c) or 3d from PIL import", "import oled96 # oled = oled96.OLED(0x3c) or 3d from PIL import Image, ImageDraw,", "= 0xDB SWITCHCAPVCC = 0x2 oled = OLED() import sys if __name__ ==", "SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2 oled", "Takes the 1-bit image and dumps it to the SSD1306 OLED display. \"\"\"", "outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36),", "pix = list(self.image.getdata()) step = self.width * 8 buf = [] for y", "in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The image on", "<reponame>BLavery/PyBlynk<filename>PiBlynk-py/oled96/__init__.py #!/usr/bin/env python # USAGE: # from oled96 import oled # or #", "0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are raw oled functions # BELOW are", "= 0xA0 SETCOMPINS = 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET =", "__init__(self, address=0x3C): try: self.bus = SMBus(1) except: try: self.bus = SMBus(0) except: exit(7)", "OLED() import sys if __name__ == '__main__': print (sys.argv[0], 'is an importable module:')", "const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT,", "for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00,", "= 0xAE DISPLAYON = 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE =", "through to the hardware display. Takes the 1-bit image and dumps it to", "self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class const: CHARGEPUMP = 0x8D", "and dumps it to the SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1,", "Page start/end address pix = list(self.image.getdata()) step = self.width * 8 buf =", "self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1)", "= 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE =", "0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd):", "ie whole canvas. for i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def", "some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0,", "self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class const: CHARGEPUMP = 0x8D COLUMNADDR = 0x21", "0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1,", "if onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are raw oled functions", "0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr,", "the SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, # Column start/end address", "str2=\"\"): # 11 char max x 2 lines oled.blank() self.canvas.text((2, 10), str1, font=font2,", "fill=0) def onoff(self, onoff): if onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE", "const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def", "0 to 100 for graph, dispval if different from val. Autocentre. oled.blank() if", "else: self._command(const.DISPLAYON) # ABOVE are raw oled functions # BELOW are some pre-formatted", "= Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) # this is a \"draw\" object", "0x21 COMSCANDEC = 0xC8 COMSCANINC = 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4", "start/end address pix = list(self.image.getdata()) step = self.width * 8 buf = []", "8 byte >>= 1 buf.append(byte) i -= 1 self._data(buf) # push out the", "DISPLAYOFF = 0xAE DISPLAYON = 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE", "step): i = y + self.width-1 while i >= y: byte = 0", "oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0)", "str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11 char max x 2", "try: self.bus = SMBus(0) except: exit(7) self.cmd_mode = 0x00 self.data_mode = 0x40 self.addr", "str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): # val = 0 to 100 for", "val. Autocentre. oled.blank() if dispval == None: dispval = val dispval = str(int(dispval))", "header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr,", "5 char max, 1 line oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display() def", "fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0,", "fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def", "0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00", "= oled96.OLED(0x3c) or 3d from PIL import Image, ImageDraw, ImageFont from smbus import", "[] for y in range(0, self.pages * step, step): i = y +", "dispval, font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1),", "# from oled96 import oled # or # import oled96 # oled =", "are raw oled functions # BELOW are some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\",", "oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): # val = 0", "self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): # val = 0 to", "outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2],", "self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18),", "0x00, self.width-1, # Column start/end address const.PAGEADDR, 0x00, self.pages-1) # Page start/end address", "fill=1) oled.display() def bar(self,str1,val,dispval=None): # val = 0 to 100 for graph, dispval", "def yell(self,str1=\"\", str2=\"\"): # 5 char max, 1 line oled.blank() self.canvas.text((2, 20), str1,", "char max, 1 line oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None):", "ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf',", "0xA7 MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6 PAGEADDR = 0x22 SEGREMAP = 0xA0", "def bar(self,str1,val,dispval=None): # val = 0 to 100 for graph, dispval if different", "hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3,", "SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP", "OLED(): def __init__(self, address=0x3C): try: self.bus = SMBus(1) except: try: self.bus = SMBus(0)", "39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1)", "128 self.height = 64 self.pages = int(self.height / 8) self.image = Image.new('1', (self.width,", "const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF,", "self.width * 8 buf = [] for y in range(0, self.pages * step,", "char max x 2 lines oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40), str2,", "0x20 NORMALDISPLAY = 0xA6 PAGEADDR = 0x22 SEGREMAP = 0xA0 SETCOMPINS = 0xDA", "SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2 oled = OLED() import sys if __name__", "#!/usr/bin/env python # USAGE: # from oled96 import oled # or # import", "buf = [] for y in range(0, self.pages * step, step): i =", "font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11 char max x 2 lines", "out the whole lot def cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1,", "0 for n in range(0, step, self.width): byte |= (pix[i + n] &", "oled functions # BELOW are some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"):", "# BELOW are some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header", "fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0)", "_data(self, data): # In our library, only data operation used is 128x64 long,", "20), str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): # val = 0 to 100", "DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF = 0xAE DISPLAYON = 0xAF EXTERNALVCC", "except: try: self.bus = SMBus(0) except: exit(7) self.cmd_mode = 0x00 self.data_mode = 0x40", "on the \"canvas\" is flushed through to the hardware display. Takes the 1-bit", "is a \"draw\" object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80,", "def __init__(self, address=0x3C): try: self.bus = SMBus(1) except: try: self.bus = SMBus(0) except:", "# USAGE: # from oled96 import oled # or # import oled96 #", "oled.blank() if dispval == None: dispval = val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11,", "for n in range(0, step, self.width): byte |= (pix[i + n] & 0x01)", "The image on the \"canvas\" is flushed through to the hardware display. Takes", "onoff(self, onoff): if onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are raw", "str2=\"\"): # 5 char max, 1 line oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1)", "ImageDraw.Draw(self.image) # this is a \"draw\" object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command(", "used is 128x64 long, ie whole canvas. for i in range(0, len(data), 31):", "SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX", "functions # BELOW are some pre-formatted layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): #", "str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39),", "const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data):", "19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1,", "fill=1) oled.display() class const: CHARGEPUMP = 0x8D COLUMNADDR = 0x21 COMSCANDEC = 0xC8", "it to the SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, # Column", "PAGEADDR = 0x22 SEGREMAP = 0xA0 SETCOMPINS = 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV", "SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2 oled = OLED() import", "whole canvas. for i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self):", "from PIL import Image, ImageDraw, ImageFont from smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf',", "= 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN =", "= 0x8D COLUMNADDR = 0x21 COMSCANDEC = 0xC8 COMSCANINC = 0xC0 DISPLAYALLON =", "graph, dispval if different from val. Autocentre. oled.blank() if dispval == None: dispval", "font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): # 5 char", "max, 1 line oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): #", "self.pages = int(self.height / 8) self.image = Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image)", "layouts def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1,", "# import oled96 # oled = oled96.OLED(0x3c) or 3d from PIL import Image,", "oled.blank() self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\",", "= y + self.width-1 while i >= y: byte = 0 for n", "self.pages * step, step): i = y + self.width-1 while i >= y:", "ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self, address=0x3C): try: self.bus = SMBus(1) except: try:", "oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1,", "= 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE =", "self.width-1 while i >= y: byte = 0 for n in range(0, step,", "0x1 INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6 PAGEADDR = 0x22", "= 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2 oled = OLED() import sys", "oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1,", "list(data[i:i+31])) def display(self): \"\"\" The image on the \"canvas\" is flushed through to", "|= (pix[i + n] & 0x01) << 8 byte >>= 1 buf.append(byte) i", "64 self.pages = int(self.height / 8) self.image = Image.new('1', (self.width, self.height)) self.canvas =", "autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5,", "31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data): # In our library, only data", "MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6 PAGEADDR = 0x22 SEGREMAP = 0xA0 SETCOMPINS", "COMSCANDEC = 0xC8 COMSCANINC = 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME = 0xA4 DISPLAYOFF", "image and dumps it to the SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00,", "this is a \"draw\" object for preparing display contents self.jnl4=[\"\",\"Jnl:\",\"\",\"\"] self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV,", "def msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1),", "0xAE DISPLAYON = 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20", "Autocentre. oled.blank() if dispval == None: dispval = val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11)", "assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data): # In our library,", "SWITCHCAPVCC = 0x2 oled = OLED() import sys if __name__ == '__main__': print", "const: CHARGEPUMP = 0x8D COLUMNADDR = 0x21 COMSCANDEC = 0xC8 COMSCANINC = 0xC0", "19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19)", "0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40,", "address const.PAGEADDR, 0x00, self.pages-1) # Page start/end address pix = list(self.image.getdata()) step =", "0xDB SWITCHCAPVCC = 0x2 oled = OLED() import sys if __name__ == '__main__':", "1-bit image and dumps it to the SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR,", "40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1)", "try: self.bus = SMBus(1) except: try: self.bus = SMBus(0) except: exit(7) self.cmd_mode =", "str1=\"\", str2=\"\", str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0)", "oled # or # import oled96 # oled = oled96.OLED(0x3c) or 3d from", "self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1,", "str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def", "= 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT =", "smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 =", "= 0x00 self.data_mode = 0x40 self.addr = address self.width = 128 self.height =", "self.canvas = ImageDraw.Draw(self.image) # this is a \"draw\" object for preparing display contents", "font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6", "self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32,", "8 buf = [] for y in range(0, self.pages * step, step): i", "= SMBus(1) except: try: self.bus = SMBus(0) except: exit(7) self.cmd_mode = 0x00 self.data_mode", "self.width): byte |= (pix[i + n] & 0x01) << 8 byte >>= 1", "#font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class", "str2=\"\", str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11,", "= 64 self.pages = int(self.height / 8) self.image = Image.new('1', (self.width, self.height)) self.canvas", "range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The image on the", "self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3],", "32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank()", "to the SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, # Column start/end", "import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 19) font3 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf',", "= 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6 PAGEADDR =", "self.cmd_mode = 0x00 self.data_mode = 0x40 self.addr = address self.width = 128 self.height", "ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def", "bar(self,str1,val,dispval=None): # val = 0 to 100 for graph, dispval if different from", "font=font1, fill=1) oled.display() class const: CHARGEPUMP = 0x8D COLUMNADDR = 0x21 COMSCANDEC =", "const.COLUMNADDR, 0x00, self.width-1, # Column start/end address const.PAGEADDR, 0x00, self.pages-1) # Page start/end", "_command(self, *cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data): # In", "const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME, const.NORMALDISPLAY, const.DISPLAYON) def _command(self, *cmd): assert(len(cmd) <= 31)", "SMBus(1) except: try: self.bus = SMBus(0) except: exit(7) self.cmd_mode = 0x00 self.data_mode =", "data operation used is 128x64 long, ie whole canvas. for i in range(0,", "const.PAGEADDR, 0x00, self.pages-1) # Page start/end address pix = list(self.image.getdata()) step = self.width", "oled = OLED() import sys if __name__ == '__main__': print (sys.argv[0], 'is an", "*cmd): assert(len(cmd) <= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data): # In our", "only data operation used is 128x64 long, ie whole canvas. for i in", "const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1,", "self._command( const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00,", "self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((2+(11-len(hdr))/2*124/11, 2, 0), hdr, font=font5, fill=1) self.canvas.text((4,23),", "# val = 0 to 100 for graph, dispval if different from val.", "In our library, only data operation used is 128x64 long, ie whole canvas.", "12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36) class OLED(): def __init__(self,", "val = 0 to 100 for graph, dispval if different from val. Autocentre.", "dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40),", "0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1", "font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1,", "* step, step): i = y + self.width-1 while i >= y: byte", "#print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1) self.canvas.rectangle((int((val*126)/100),", "0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN = 0x10 SETLOWCOLUMN = 0x00 SETMULTIPLEX = 0xA8", "self.height)) self.canvas = ImageDraw.Draw(self.image) # this is a \"draw\" object for preparing display", "def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0) def onoff(self, onoff): if onoff", "def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1, fill=0) self.canvas.text((4, 3),", "0x8D COLUMNADDR = 0x21 COMSCANDEC = 0xC8 COMSCANINC = 0xC0 DISPLAYALLON = 0xA5", "i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The image", "self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1,", "oled.display() def yell2(self,str1=\"\", str2=\"\"): # 11 char max x 2 lines oled.blank() self.canvas.text((2,", "10), str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): #", "fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1) oled.display() def yell2(self,str1=\"\", str2=\"\"):", "from val. Autocentre. oled.blank() if dispval == None: dispval = val dispval =", "fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): # 5 char max,", "= 0xA6 PAGEADDR = 0x22 SEGREMAP = 0xA0 SETCOMPINS = 0xDA SETCONTRAST =", "<= 31) self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd)) def _data(self, data): # In our library, only", "8) self.image = Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) # this is a", "long, ie whole canvas. for i in range(0, len(data), 31): self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31]))", "if dispval == None: dispval = val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2),", "y in range(0, self.pages * step, step): i = y + self.width-1 while", "SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC", "fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1, fill=1)", "USAGE: # from oled96 import oled # or # import oled96 # oled", "self.addr = address self.width = 128 self.height = 64 self.pages = int(self.height /", "1 buf.append(byte) i -= 1 self._data(buf) # push out the whole lot def", "buf.append(byte) i -= 1 self._data(buf) # push out the whole lot def cls(self):", "DISPLAYON = 0xAF EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY", "str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"): # 5 char max, 1 line", "val dispval = str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1,", "font=font5, fill=1) self.canvas.text((4,23), str1, font=font1, fill=1) self.canvas.text((4,36), str2, font=font1, fill=1) self.canvas.text((4,49), str3, font=font1,", "# Column start/end address const.PAGEADDR, 0x00, self.pages-1) # Page start/end address pix =", "const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC,", "SETCOMPINS = 0xDA SETCONTRAST = 0x81 SETDISPLAYCLOCKDIV = 0xD5 SETDISPLAYOFFSET = 0xD3 SETHIGHCOLUMN", "= address self.width = 128 self.height = 64 self.pages = int(self.height / 8)", "= 0 to 100 for graph, dispval if different from val. Autocentre. oled.blank()", "= 0x2 oled = OLED() import sys if __name__ == '__main__': print (sys.argv[0],", "msgBox(self,hdr=\"\", str1=\"\", str2=\"\", str3=\"\"): # header autocentred oled.blank() self.canvas.rectangle((0, 19, oled.width-1, oled.height-1), outline=1,", "= 0xA1 SETSTARTLINE = 0x40 SETVCOMDETECT = 0xDB SWITCHCAPVCC = 0x2 oled =", "self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class", "EXTERNALVCC = 0x1 INVERTDISPLAY = 0xA7 MEMORYMODE = 0x20 NORMALDISPLAY = 0xA6 PAGEADDR", "onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) # ABOVE are raw oled functions #", "str(int(dispval)) #print(2+(11-len(str1))/2*124/11) self.canvas.text((2+(11-len(str1))/2*124/11, 2), str1, font=font5, fill=1) self.canvas.rectangle((0, 31, oled.width-1, 40), outline=1, fill=1)", "= int(self.height / 8) self.image = Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) #", "= 0 for n in range(0, step, self.width): byte |= (pix[i + n]", "self.canvas.text((2, 10), str1, font=font2, fill=1) self.canvas.text((2,40), str2, font=font2, fill=1) oled.display() def yell(self,str1=\"\", str2=\"\"):", "0x00 SETMULTIPLEX = 0xA8 SETPRECHARGE = 0xD9 SETSEGMENTREMAP = 0xA1 SETSTARTLINE = 0x40", "self.bus.write_i2c_block_data(self.addr, self.data_mode, list(data[i:i+31])) def display(self): \"\"\" The image on the \"canvas\" is flushed", "self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0) def onoff(self, onoff):", "fill=0) self.canvas.text((4, 3), self.jnl4[0], font=font1, fill=1) self.canvas.text((4,18), self.jnl4[1], font=font1, fill=1) self.canvas.text((4,33), self.jnl4[2], font=font1,", "line oled.blank() self.canvas.text((2, 20), str1, font=font3, fill=1) oled.display() def bar(self,str1,val,dispval=None): # val =", "36) #font4 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 12) font5 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 19) #font6 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeMonoBold.ttf', 36)", "self.canvas.rectangle((int((val*126)/100), 32, oled.width-2, 39), outline=1, fill=0) self.canvas.text((2+(11-len(dispval))/2*124/11,43), dispval, font=font2, fill=1) oled.display() def jnl(self,str1):", "const.MEMORYMODE, 0x00, const.SEGREMAP, const.COMSCANDEC, const.SETCOMPINS, 0x12, const.SETCONTRAST, 0xCF, const.SETPRECHARGE, 0xF1, const.SETVCOMDETECT, 0x40, const.DISPLAYALLON_RESUME,", "import Image, ImageDraw, ImageFont from smbus import SMBus font1 = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', 12) font2", "font=font1, fill=1) self.canvas.text((4,48), self.jnl4[3], font=font1, fill=1) oled.display() class const: CHARGEPUMP = 0x8D COLUMNADDR", "oled96 # oled = oled96.OLED(0x3c) or 3d from PIL import Image, ImageDraw, ImageFont", "SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, # Column start/end address const.PAGEADDR,", "blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0) def onoff(self, onoff): if onoff ==", "= 0x21 COMSCANDEC = 0xC8 COMSCANINC = 0xC0 DISPLAYALLON = 0xA5 DISPLAYALLON_RESUME =", "the hardware display. Takes the 1-bit image and dumps it to the SSD1306", "self._command( const.COLUMNADDR, 0x00, self.width-1, # Column start/end address const.PAGEADDR, 0x00, self.pages-1) # Page", "int(self.height / 8) self.image = Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) # this", "Image.new('1', (self.width, self.height)) self.canvas = ImageDraw.Draw(self.image) # this is a \"draw\" object for", "= list(self.image.getdata()) step = self.width * 8 buf = [] for y in", "# In our library, only data operation used is 128x64 long, ie whole", "yell(self,str1=\"\", str2=\"\"): # 5 char max, 1 line oled.blank() self.canvas.text((2, 20), str1, font=font3,", "address=0x3C): try: self.bus = SMBus(1) except: try: self.bus = SMBus(0) except: exit(7) self.cmd_mode", "dumps it to the SSD1306 OLED display. \"\"\" self._command( const.COLUMNADDR, 0x00, self.width-1, #", "# oled = oled96.OLED(0x3c) or 3d from PIL import Image, ImageDraw, ImageFont from", "byte |= (pix[i + n] & 0x01) << 8 byte >>= 1 buf.append(byte)", "outline=0, fill=0) def onoff(self, onoff): if onoff == 0: self._command(const.DISPLAYOFF) else: self._command(const.DISPLAYON) #", "image on the \"canvas\" is flushed through to the hardware display. Takes the", "cls(self): self.blank() self.display() def blank(self): self.canvas.rectangle((0, 0, self.width-1, self.height-1), outline=0, fill=0) def onoff(self,", "list(self.image.getdata()) step = self.width * 8 buf = [] for y in range(0,", "font=font2, fill=1) oled.display() def jnl(self,str1): oled.blank() self.jnl4.pop(0) self.jnl4.append(str1) self.canvas.rectangle((0, 0, oled.width-1, oled.height-1), outline=1,", "\"canvas\" is flushed through to the hardware display. Takes the 1-bit image and", "const.DISPLAYOFF, const.SETDISPLAYCLOCKDIV, 0x80, const.SETMULTIPLEX, 0x3F, const.SETDISPLAYOFFSET, 0x00, const.SETSTARTLINE, const.CHARGEPUMP, 0x14, const.MEMORYMODE, 0x00, const.SEGREMAP," ]
[ "\"Apple\", \"MS\", \"Dell\"] zipped = zip(names, comps) for a, b in zipped: print(a,", "\"Harsh\", \"Navin\"] comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped = zip(names, comps) for", "[\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped = zip(names, comps) for a, b in zipped:", "\"MS\", \"Dell\"] zipped = zip(names, comps) for a, b in zipped: print(a, b)", "[\"Navin\", \"Kiran\", \"Harsh\", \"Navin\"] comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped = zip(names,", "<gh_stars>0 names = [\"Navin\", \"Kiran\", \"Harsh\", \"Navin\"] comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"]", "comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped = zip(names, comps) for a, b", "\"Navin\"] comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped = zip(names, comps) for a,", "= [\"Navin\", \"Kiran\", \"Harsh\", \"Navin\"] comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped =", "= [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped = zip(names, comps) for a, b in", "\"Kiran\", \"Harsh\", \"Navin\"] comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped = zip(names, comps)", "names = [\"Navin\", \"Kiran\", \"Harsh\", \"Navin\"] comps = [\"Dell\", \"Apple\", \"MS\", \"Dell\"] zipped" ]
[ "if self.switch == 1: return self.q2.pop(0) else: return self.q1.pop(0) # @return an integer", "self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length +=", "self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length += 1", "return self.length == 0 s = Stack() s.push(1) s.push(2) s.push(3) print s.top() print", "self.q1.pop(0) # @return an integer def top(self): if self.length ==0: return None if", "__init__(self): self.q1 = [] self.q2 =[] self.switch = 1 self.length = 0 #", "if self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0):", "else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length += 1 # @return nothing", "boolean def empty(self): return self.length == 0 s = Stack() s.push(1) s.push(2) s.push(3)", "0 s = Stack() s.push(1) s.push(2) s.push(3) print s.top() print s.pop() print s.pop()", "integer # @return nothing def push(self, x): if self.switch == 1: self.q1.append(x) while(len(self.q2)>0):", "==0: return None if self.switch == 1: return self.q2[0] else: return self.q1[0] #", "self.switch == 1: return self.q2.pop(0) else: return self.q1.pop(0) # @return an integer def", "x, an integer # @return nothing def push(self, x): if self.switch == 1:", "data structure here. def __init__(self): self.q1 = [] self.q2 =[] self.switch = 1", "return self.q1[0] # @return an boolean def empty(self): return self.length == 0 s", "self.length == 0 s = Stack() s.push(1) s.push(2) s.push(3) print s.top() print s.pop()", "1: return self.q2.pop(0) else: return self.q1.pop(0) # @return an integer def top(self): if", "return self.q2[0] else: return self.q1[0] # @return an boolean def empty(self): return self.length", "1 self.length += 1 # @return nothing def pop(self): if self.length ==0: return", "= [] self.q2 =[] self.switch = 1 self.length = 0 # @param x,", "pop(self): if self.length ==0: return None self.length -= 1 if self.switch == 1:", "# @return an integer def top(self): if self.length ==0: return None if self.switch", "self.switch = 1 self.length = 0 # @param x, an integer # @return", "else: return self.q1.pop(0) # @return an integer def top(self): if self.length ==0: return", "== 1: return self.q2[0] else: return self.q1[0] # @return an boolean def empty(self):", "class Stack: # initialize your data structure here. def __init__(self): self.q1 = []", "==0: return None self.length -= 1 if self.switch == 1: return self.q2.pop(0) else:", "s = Stack() s.push(1) s.push(2) s.push(3) print s.top() print s.pop() print s.pop() print", "return None if self.switch == 1: return self.q2[0] else: return self.q1[0] # @return", "[] self.q2 =[] self.switch = 1 self.length = 0 # @param x, an", "# @return an boolean def empty(self): return self.length == 0 s = Stack()", "== 0 s = Stack() s.push(1) s.push(2) s.push(3) print s.top() print s.pop() print", "an integer # @return nothing def push(self, x): if self.switch == 1: self.q1.append(x)", "1 if self.switch == 1: return self.q2.pop(0) else: return self.q1.pop(0) # @return an", "return self.q2.pop(0) else: return self.q1.pop(0) # @return an integer def top(self): if self.length", "self.length -= 1 if self.switch == 1: return self.q2.pop(0) else: return self.q1.pop(0) #", "= 1 self.length = 0 # @param x, an integer # @return nothing", "def push(self, x): if self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2", "@return an boolean def empty(self): return self.length == 0 s = Stack() s.push(1)", "here. def __init__(self): self.q1 = [] self.q2 =[] self.switch = 1 self.length =", "self.length ==0: return None if self.switch == 1: return self.q2[0] else: return self.q1[0]", "== 1: return self.q2.pop(0) else: return self.q1.pop(0) # @return an integer def top(self):", "self.length ==0: return None self.length -= 1 if self.switch == 1: return self.q2.pop(0)", "top(self): if self.length ==0: return None if self.switch == 1: return self.q2[0] else:", "# @param x, an integer # @return nothing def push(self, x): if self.switch", "None self.length -= 1 if self.switch == 1: return self.q2.pop(0) else: return self.q1.pop(0)", "-= 1 if self.switch == 1: return self.q2.pop(0) else: return self.q1.pop(0) # @return", "0 # @param x, an integer # @return nothing def push(self, x): if", "self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1", "else: return self.q1[0] # @return an boolean def empty(self): return self.length == 0", "None if self.switch == 1: return self.q2[0] else: return self.q1[0] # @return an", "return None self.length -= 1 if self.switch == 1: return self.q2.pop(0) else: return", "@return nothing def push(self, x): if self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch", "x): if self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x)", "an integer def top(self): if self.length ==0: return None if self.switch == 1:", "def pop(self): if self.length ==0: return None self.length -= 1 if self.switch ==", "self.q1 = [] self.q2 =[] self.switch = 1 self.length = 0 # @param", "while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length", "= 0 # @param x, an integer # @return nothing def push(self, x):", "def __init__(self): self.q1 = [] self.q2 =[] self.switch = 1 self.length = 0", "push(self, x): if self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else:", "+= 1 # @return nothing def pop(self): if self.length ==0: return None self.length", "1 # @return nothing def pop(self): if self.length ==0: return None self.length -=", "if self.length ==0: return None self.length -= 1 if self.switch == 1: return", "self.q1[0] # @return an boolean def empty(self): return self.length == 0 s =", "self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length += 1 # @return nothing def", "1: return self.q2[0] else: return self.q1[0] # @return an boolean def empty(self): return", "1 self.length = 0 # @param x, an integer # @return nothing def", "self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0))", "== 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch", "structure here. def __init__(self): self.q1 = [] self.q2 =[] self.switch = 1 self.length", "your data structure here. def __init__(self): self.q1 = [] self.q2 =[] self.switch =", "@param x, an integer # @return nothing def push(self, x): if self.switch ==", "= 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length += 1 #", "self.q2 =[] self.switch = 1 self.length = 0 # @param x, an integer", "empty(self): return self.length == 0 s = Stack() s.push(1) s.push(2) s.push(3) print s.top()", "if self.length ==0: return None if self.switch == 1: return self.q2[0] else: return", "nothing def push(self, x): if self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch =", "self.switch = 1 self.length += 1 # @return nothing def pop(self): if self.length", "# @return nothing def pop(self): if self.length ==0: return None self.length -= 1", "= Stack() s.push(1) s.push(2) s.push(3) print s.top() print s.pop() print s.pop() print s.pop()", "= 1 self.length += 1 # @return nothing def pop(self): if self.length ==0:", "self.switch == 1: return self.q2[0] else: return self.q1[0] # @return an boolean def", "self.length += 1 # @return nothing def pop(self): if self.length ==0: return None", "@return an integer def top(self): if self.length ==0: return None if self.switch ==", "1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0)) self.switch = 2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch =", "@return nothing def pop(self): if self.length ==0: return None self.length -= 1 if", "while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length += 1 # @return nothing def pop(self):", "def top(self): if self.length ==0: return None if self.switch == 1: return self.q2[0]", "initialize your data structure here. def __init__(self): self.q1 = [] self.q2 =[] self.switch", "nothing def pop(self): if self.length ==0: return None self.length -= 1 if self.switch", "if self.switch == 1: return self.q2[0] else: return self.q1[0] # @return an boolean", "# @return nothing def push(self, x): if self.switch == 1: self.q1.append(x) while(len(self.q2)>0): self.q1.append(self.q2.pop(0))", "return self.q1.pop(0) # @return an integer def top(self): if self.length ==0: return None", "self.q2.pop(0) else: return self.q1.pop(0) # @return an integer def top(self): if self.length ==0:", "an boolean def empty(self): return self.length == 0 s = Stack() s.push(1) s.push(2)", "# initialize your data structure here. def __init__(self): self.q1 = [] self.q2 =[]", "def empty(self): return self.length == 0 s = Stack() s.push(1) s.push(2) s.push(3) print", "=[] self.switch = 1 self.length = 0 # @param x, an integer #", "2 else: self.q2.append(x) while(len(self.q1)>0): self.q2.append(self.q1.pop(0)) self.switch = 1 self.length += 1 # @return", "integer def top(self): if self.length ==0: return None if self.switch == 1: return", "self.q2[0] else: return self.q1[0] # @return an boolean def empty(self): return self.length ==", "Stack: # initialize your data structure here. def __init__(self): self.q1 = [] self.q2", "self.length = 0 # @param x, an integer # @return nothing def push(self,", "self.q2.append(self.q1.pop(0)) self.switch = 1 self.length += 1 # @return nothing def pop(self): if" ]
[ "10 print(n) n += 10 print(n) print(type(n)) # primitive type x = 10", "Number(self.number) # class n = Number(10) print(n) n + 10 print(n) n +=", "__init__(self, num): self.number = num def __str__(self): return str(self.number) def __add__(self, other): self.number", "= Number(10) print(n) n + 10 print(n) n += 10 print(n) print(type(n)) #", "print(n) n + 10 print(n) n += 10 print(n) print(type(n)) # primitive type", "def __str__(self): return str(self.number) def __add__(self, other): self.number += other return Number(self.number) #", "return Number(self.number) # class n = Number(10) print(n) n + 10 print(n) n", "num def __str__(self): return str(self.number) def __add__(self, other): self.number += other return Number(self.number)", "# class n = Number(10) print(n) n + 10 print(n) n += 10", "10 print(n) print(type(n)) # primitive type x = 10 print(x) x += 10", "self.number += other return Number(self.number) # class n = Number(10) print(n) n +", "print(type(n)) # primitive type x = 10 print(x) x += 10 print(x) print(type(x))", "print(n) print(type(n)) # primitive type x = 10 print(x) x += 10 print(x)", "class n = Number(10) print(n) n + 10 print(n) n += 10 print(n)", "+= other return Number(self.number) # class n = Number(10) print(n) n + 10", "Number(10) print(n) n + 10 print(n) n += 10 print(n) print(type(n)) # primitive", "__add__(self, other): self.number += other return Number(self.number) # class n = Number(10) print(n)", "return str(self.number) def __add__(self, other): self.number += other return Number(self.number) # class n", "num): self.number = num def __str__(self): return str(self.number) def __add__(self, other): self.number +=", "other return Number(self.number) # class n = Number(10) print(n) n + 10 print(n)", "+ 10 print(n) n += 10 print(n) print(type(n)) # primitive type x =", "+= 10 print(n) print(type(n)) # primitive type x = 10 print(x) x +=", "= num def __str__(self): return str(self.number) def __add__(self, other): self.number += other return", "Number: def __init__(self, num): self.number = num def __str__(self): return str(self.number) def __add__(self,", "n + 10 print(n) n += 10 print(n) print(type(n)) # primitive type x", "n += 10 print(n) print(type(n)) # primitive type x = 10 print(x) x", "other): self.number += other return Number(self.number) # class n = Number(10) print(n) n", "class Number: def __init__(self, num): self.number = num def __str__(self): return str(self.number) def", "__str__(self): return str(self.number) def __add__(self, other): self.number += other return Number(self.number) # class", "str(self.number) def __add__(self, other): self.number += other return Number(self.number) # class n =", "def __add__(self, other): self.number += other return Number(self.number) # class n = Number(10)", "self.number = num def __str__(self): return str(self.number) def __add__(self, other): self.number += other", "print(n) n += 10 print(n) print(type(n)) # primitive type x = 10 print(x)", "n = Number(10) print(n) n + 10 print(n) n += 10 print(n) print(type(n))", "<reponame>motazsaad/WDMM1405<filename>chapter14_OOP/2019/Number.py class Number: def __init__(self, num): self.number = num def __str__(self): return str(self.number)", "def __init__(self, num): self.number = num def __str__(self): return str(self.number) def __add__(self, other):" ]
[ "to a special type used by go ''' @classmethod def process(cls, tree): return", "process(cls, tree): return cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None): if node.value.type == 'standard_iterable_call':", "@classmethod def process(cls, tree): return cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None): if node.value.type", "from pseudo.middlewares.middleware import Middleware from pseudo.pseudo_tree import Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call", "Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call in return to a special type used", "return cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None): if node.value.type == 'standard_iterable_call': node.value.type =", "== 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value else: return node transform_explicit_return = transform_implicit_return", "import Middleware from pseudo.pseudo_tree import Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call in return", "changes standard_iterable_call in return to a special type used by go ''' @classmethod", "import Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call in return to a special type", "''' changes standard_iterable_call in return to a special type used by go '''", "used by go ''' @classmethod def process(cls, tree): return cls().transform(tree) def transform_r(self, node,", "in_block=False, assignment=None): if node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value else: return", "StandardMiddleware(Middleware): ''' changes standard_iterable_call in return to a special type used by go", "'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value else: return node transform_explicit_return = transform_implicit_return =", "pseudo.pseudo_tree import Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call in return to a special", "from pseudo.pseudo_tree import Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call in return to a", "transform_r(self, node, in_block=False, assignment=None): if node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value", "''' @classmethod def process(cls, tree): return cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None): if", "assignment=None): if node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value else: return node", "Middleware from pseudo.pseudo_tree import Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call in return to", "pseudo.middlewares.middleware import Middleware from pseudo.pseudo_tree import Node class StandardMiddleware(Middleware): ''' changes standard_iterable_call in", "node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value else: return node transform_explicit_return =", "tree): return cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None): if node.value.type == 'standard_iterable_call': node.value.type", "cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None): if node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return'", "a special type used by go ''' @classmethod def process(cls, tree): return cls().transform(tree)", "node.value.type = 'standard_iterable_call_return' return node.value else: return node transform_explicit_return = transform_implicit_return = transform_r", "return to a special type used by go ''' @classmethod def process(cls, tree):", "type used by go ''' @classmethod def process(cls, tree): return cls().transform(tree) def transform_r(self,", "node, in_block=False, assignment=None): if node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value else:", "in return to a special type used by go ''' @classmethod def process(cls,", "class StandardMiddleware(Middleware): ''' changes standard_iterable_call in return to a special type used by", "if node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return node.value else: return node transform_explicit_return", "def transform_r(self, node, in_block=False, assignment=None): if node.value.type == 'standard_iterable_call': node.value.type = 'standard_iterable_call_return' return", "go ''' @classmethod def process(cls, tree): return cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None):", "by go ''' @classmethod def process(cls, tree): return cls().transform(tree) def transform_r(self, node, in_block=False,", "special type used by go ''' @classmethod def process(cls, tree): return cls().transform(tree) def", "def process(cls, tree): return cls().transform(tree) def transform_r(self, node, in_block=False, assignment=None): if node.value.type ==", "standard_iterable_call in return to a special type used by go ''' @classmethod def" ]
[ "# input dim num_classes=300000, # output dim noise=noise, ) # 这里 input 假装是经过了", "但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long() # [batch, 1] # training mode loss", "[0, 2, 2, 3, 4, 5, 6] # an unigram class probability freq_count", "6] # an unigram class probability freq_count = torch.FloatTensor(class_freq) print(\"total counts for all", "basic usage of NCE module\"\"\" import torch from nce import IndexLinear class_freq =", "input = torch.Tensor(200, 100) # [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target =", "usage of NCE module\"\"\" import torch from nce import IndexLinear class_freq = [0,", "noise=noise, ) # 这里 input 假装是经过了 embedding之后的 input = torch.Tensor(200, 100) # [batch,", "of basic usage of NCE module\"\"\" import torch from nce import IndexLinear class_freq", "# [batch, 1] # training mode loss = nce_linear(target, input).mean() print(loss.item()) # evaluation", "freq_count = torch.FloatTensor(class_freq) print(\"total counts for all tokens:\", freq_count.sum()) noise = freq_count /", "target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long() # [batch, 1] # training mode", "/ freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100, # input dim", "# output dim noise=noise, ) # 这里 input 假装是经过了 embedding之后的 input = torch.Tensor(200,", "num_classes=300000, # output dim noise=noise, ) # 这里 input 假装是经过了 embedding之后的 input =", "module\"\"\" import torch from nce import IndexLinear class_freq = [0, 2, 2, 3,", "freq_count.sum()) noise = freq_count / freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear(", "类 nce_linear = IndexLinear( embedding_dim=100, # input dim num_classes=300000, # output dim noise=noise,", "nce_linear(target, input).mean() print(loss.item()) # evaluation mode for fast probability computation nce_linear.eval() prob =", "= freq_count / freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100, #", "illustration of basic usage of NCE module\"\"\" import torch from nce import IndexLinear", "torch.Tensor(200, 100) # [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long()", "minimal sample script for illustration of basic usage of NCE module\"\"\" import torch", ") # 这里 input 假装是经过了 embedding之后的 input = torch.Tensor(200, 100) # [batch, emb_dim]", "emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long() # [batch, 1] #", "import torch from nce import IndexLinear class_freq = [0, 2, 2, 3, 4,", "script for illustration of basic usage of NCE module\"\"\" import torch from nce", "1).long() # [batch, 1] # training mode loss = nce_linear(target, input).mean() print(loss.item()) #", "IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100, # input dim num_classes=300000, # output", "= torch.ones(200, 1).long() # [batch, 1] # training mode loss = nce_linear(target, input).mean()", "2, 2, 3, 4, 5, 6] # an unigram class probability freq_count =", "5, 6] # an unigram class probability freq_count = torch.FloatTensor(class_freq) print(\"total counts for", "= IndexLinear( embedding_dim=100, # input dim num_classes=300000, # output dim noise=noise, ) #", "IndexLinear class_freq = [0, 2, 2, 3, 4, 5, 6] # an unigram", "print(\"total counts for all tokens:\", freq_count.sum()) noise = freq_count / freq_count.sum() # IndexLinear", "for illustration of basic usage of NCE module\"\"\" import torch from nce import", "an unigram class probability freq_count = torch.FloatTensor(class_freq) print(\"total counts for all tokens:\", freq_count.sum())", "1] # training mode loss = nce_linear(target, input).mean() print(loss.item()) # evaluation mode for", "= [0, 2, 2, 3, 4, 5, 6] # an unigram class probability", "# evaluation mode for fast probability computation nce_linear.eval() prob = nce_linear(target, input).mean() print(prob.item())", "freq_count / freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100, # input", "NCE module\"\"\" import torch from nce import IndexLinear class_freq = [0, 2, 2,", "class probability freq_count = torch.FloatTensor(class_freq) print(\"total counts for all tokens:\", freq_count.sum()) noise =", "# 这里 input 假装是经过了 embedding之后的 input = torch.Tensor(200, 100) # [batch, emb_dim] #", "nce import IndexLinear class_freq = [0, 2, 2, 3, 4, 5, 6] #", "# training mode loss = nce_linear(target, input).mean() print(loss.item()) # evaluation mode for fast", "noise = freq_count / freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100,", "对应的正确的token的id target = torch.ones(200, 1).long() # [batch, 1] # training mode loss =", "[batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long() # [batch, 1]", "of NCE module\"\"\" import torch from nce import IndexLinear class_freq = [0, 2,", "这里 input 假装是经过了 embedding之后的 input = torch.Tensor(200, 100) # [batch, emb_dim] # target中这里是ones,", "loss = nce_linear(target, input).mean() print(loss.item()) # evaluation mode for fast probability computation nce_linear.eval()", "embedding_dim=100, # input dim num_classes=300000, # output dim noise=noise, ) # 这里 input", "继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100, # input dim num_classes=300000, # output dim", "# [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long() # [batch,", "\"\"\"A minimal sample script for illustration of basic usage of NCE module\"\"\" import", "# an unigram class probability freq_count = torch.FloatTensor(class_freq) print(\"total counts for all tokens:\",", "torch.ones(200, 1).long() # [batch, 1] # training mode loss = nce_linear(target, input).mean() print(loss.item())", "sample script for illustration of basic usage of NCE module\"\"\" import torch from", "print(loss.item()) # evaluation mode for fast probability computation nce_linear.eval() prob = nce_linear(target, input).mean()", "torch.FloatTensor(class_freq) print(\"total counts for all tokens:\", freq_count.sum()) noise = freq_count / freq_count.sum() #", "# target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long() # [batch, 1] # training", "= torch.FloatTensor(class_freq) print(\"total counts for all tokens:\", freq_count.sum()) noise = freq_count / freq_count.sum()", "dim num_classes=300000, # output dim noise=noise, ) # 这里 input 假装是经过了 embedding之后的 input", "for all tokens:\", freq_count.sum()) noise = freq_count / freq_count.sum() # IndexLinear 继承了NCELoss 类", "2, 3, 4, 5, 6] # an unigram class probability freq_count = torch.FloatTensor(class_freq)", "import IndexLinear class_freq = [0, 2, 2, 3, 4, 5, 6] # an", "class_freq = [0, 2, 2, 3, 4, 5, 6] # an unigram class", "embedding之后的 input = torch.Tensor(200, 100) # [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target", "input).mean() print(loss.item()) # evaluation mode for fast probability computation nce_linear.eval() prob = nce_linear(target,", "output dim noise=noise, ) # 这里 input 假装是经过了 embedding之后的 input = torch.Tensor(200, 100)", "torch from nce import IndexLinear class_freq = [0, 2, 2, 3, 4, 5,", "from nce import IndexLinear class_freq = [0, 2, 2, 3, 4, 5, 6]", "假装是经过了 embedding之后的 input = torch.Tensor(200, 100) # [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id", "dim noise=noise, ) # 这里 input 假装是经过了 embedding之后的 input = torch.Tensor(200, 100) #", "freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100, # input dim num_classes=300000,", "probability freq_count = torch.FloatTensor(class_freq) print(\"total counts for all tokens:\", freq_count.sum()) noise = freq_count", "3, 4, 5, 6] # an unigram class probability freq_count = torch.FloatTensor(class_freq) print(\"total", "training mode loss = nce_linear(target, input).mean() print(loss.item()) # evaluation mode for fast probability", "target = torch.ones(200, 1).long() # [batch, 1] # training mode loss = nce_linear(target,", "unigram class probability freq_count = torch.FloatTensor(class_freq) print(\"total counts for all tokens:\", freq_count.sum()) noise", "tokens:\", freq_count.sum()) noise = freq_count / freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear =", "[batch, 1] # training mode loss = nce_linear(target, input).mean() print(loss.item()) # evaluation mode", "100) # [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200, 1).long() #", "nce_linear = IndexLinear( embedding_dim=100, # input dim num_classes=300000, # output dim noise=noise, )", "all tokens:\", freq_count.sum()) noise = freq_count / freq_count.sum() # IndexLinear 继承了NCELoss 类 nce_linear", "mode loss = nce_linear(target, input).mean() print(loss.item()) # evaluation mode for fast probability computation", "input dim num_classes=300000, # output dim noise=noise, ) # 这里 input 假装是经过了 embedding之后的", "counts for all tokens:\", freq_count.sum()) noise = freq_count / freq_count.sum() # IndexLinear 继承了NCELoss", "# IndexLinear 继承了NCELoss 类 nce_linear = IndexLinear( embedding_dim=100, # input dim num_classes=300000, #", "IndexLinear( embedding_dim=100, # input dim num_classes=300000, # output dim noise=noise, ) # 这里", "= nce_linear(target, input).mean() print(loss.item()) # evaluation mode for fast probability computation nce_linear.eval() prob", "4, 5, 6] # an unigram class probability freq_count = torch.FloatTensor(class_freq) print(\"total counts", "= torch.Tensor(200, 100) # [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是 对应的正确的token的id target = torch.ones(200,", "input 假装是经过了 embedding之后的 input = torch.Tensor(200, 100) # [batch, emb_dim] # target中这里是ones, 但是我们的task中应该是" ]
[ "if \"ten\" in image['file_name']: continue else: new_images.append(image) image_id = [] annotations = dic['annotations']", "image_id = [] annotations = dic['annotations'] new_annotations = [] for image in new_images:", "test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close() dic =", "[] for image in new_images: # print(image) image_id.append(image['id']) for annotation in annotations: if", "is aimed to generate a small datasets for test\"\"\" import json f =", "image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations f1 = open(\"/home/ayb/UVM_Datasets/voc_test_not_ten.json\", \"w\") dic_json", "if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations f1 =", "json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close() dic = eval(line) images", "f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close() dic = eval(line) images =", "f.close() dic = eval(line) images = dic['images'] new_images=[] for image in images: if", "for image in new_images: # print(image) image_id.append(image['id']) for annotation in annotations: if annotation['image_id']", "generate a small datasets for test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line", "else: new_images.append(image) image_id = [] annotations = dic['annotations'] new_annotations = [] for image", "\"ten\" in image['file_name']: continue else: new_images.append(image) image_id = [] annotations = dic['annotations'] new_annotations", "image in new_images: # print(image) image_id.append(image['id']) for annotation in annotations: if annotation['image_id'] in", "= eval(line) images = dic['images'] new_images=[] for image in images: if \"ten\" in", "in new_images: # print(image) image_id.append(image['id']) for annotation in annotations: if annotation['image_id'] in image_id:", "datasets for test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close()", "for test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close() dic", "annotations = dic['annotations'] new_annotations = [] for image in new_images: # print(image) image_id.append(image['id'])", "continue else: new_images.append(image) image_id = [] annotations = dic['annotations'] new_annotations = [] for", "small datasets for test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline()", "new_images=[] for image in images: if \"ten\" in image['file_name']: continue else: new_images.append(image) image_id", "print(image) image_id.append(image['id']) for annotation in annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] =", "= dic['images'] new_images=[] for image in images: if \"ten\" in image['file_name']: continue else:", "= [] annotations = dic['annotations'] new_annotations = [] for image in new_images: #", "annotation in annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] =", "[] annotations = dic['annotations'] new_annotations = [] for image in new_images: # print(image)", "\"\"\"this file is aimed to generate a small datasets for test\"\"\" import json", "dic['images'] new_images=[] for image in images: if \"ten\" in image['file_name']: continue else: new_images.append(image)", "= new_images dic[\"annotations\"] = new_annotations f1 = open(\"/home/ayb/UVM_Datasets/voc_test_not_ten.json\", \"w\") dic_json = json.dumps(dic) f1.write(str(dic_json))", "new_images dic[\"annotations\"] = new_annotations f1 = open(\"/home/ayb/UVM_Datasets/voc_test_not_ten.json\", \"w\") dic_json = json.dumps(dic) f1.write(str(dic_json)) f1.close()", "for image in images: if \"ten\" in image['file_name']: continue else: new_images.append(image) image_id =", "\"r\") line = f.readline() f.close() dic = eval(line) images = dic['images'] new_images=[] for", "aimed to generate a small datasets for test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\",", "in image['file_name']: continue else: new_images.append(image) image_id = [] annotations = dic['annotations'] new_annotations =", "dic['annotations'] new_annotations = [] for image in new_images: # print(image) image_id.append(image['id']) for annotation", "in annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations", "in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations f1 = open(\"/home/ayb/UVM_Datasets/voc_test_not_ten.json\", \"w\")", "new_images.append(image) image_id = [] annotations = dic['annotations'] new_annotations = [] for image in", "open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close() dic = eval(line) images = dic['images'] new_images=[]", "image in images: if \"ten\" in image['file_name']: continue else: new_images.append(image) image_id = []", "new_annotations = [] for image in new_images: # print(image) image_id.append(image['id']) for annotation in", "a small datasets for test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line =", "line = f.readline() f.close() dic = eval(line) images = dic['images'] new_images=[] for image", "images: if \"ten\" in image['file_name']: continue else: new_images.append(image) image_id = [] annotations =", "dic = eval(line) images = dic['images'] new_images=[] for image in images: if \"ten\"", "new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations f1 = open(\"/home/ayb/UVM_Datasets/voc_test_not_ten.json\", \"w\") dic_json =", "= f.readline() f.close() dic = eval(line) images = dic['images'] new_images=[] for image in", "= [] for image in new_images: # print(image) image_id.append(image['id']) for annotation in annotations:", "file is aimed to generate a small datasets for test\"\"\" import json f", "= open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close() dic = eval(line) images = dic['images']", "eval(line) images = dic['images'] new_images=[] for image in images: if \"ten\" in image['file_name']:", "image['file_name']: continue else: new_images.append(image) image_id = [] annotations = dic['annotations'] new_annotations = []", "= dic['annotations'] new_annotations = [] for image in new_images: # print(image) image_id.append(image['id']) for", "in images: if \"ten\" in image['file_name']: continue else: new_images.append(image) image_id = [] annotations", "dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations f1 = open(\"/home/ayb/UVM_Datasets/voc_test_not_ten.json\", \"w\") dic_json = json.dumps(dic)", "image_id.append(image['id']) for annotation in annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images", "import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\") line = f.readline() f.close() dic = eval(line)", "for annotation in annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"]", "to generate a small datasets for test\"\"\" import json f = open(\"/home/ayb/UVM_Datasets/voc_test3.json\", \"r\")", "f.readline() f.close() dic = eval(line) images = dic['images'] new_images=[] for image in images:", "new_images: # print(image) image_id.append(image['id']) for annotation in annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation)", "annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations f1 = open(\"/home/ayb/UVM_Datasets/voc_test_not_ten.json\",", "annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"] = new_images dic[\"annotations\"] = new_annotations f1", "images = dic['images'] new_images=[] for image in images: if \"ten\" in image['file_name']: continue", "# print(image) image_id.append(image['id']) for annotation in annotations: if annotation['image_id'] in image_id: new_annotations.append(annotation) dic[\"images\"]" ]
[ "alt print(f'SUa parede tem a dimensão de {larg:.2f}x{alt:.2f} e sua área é de", "tem a dimensão de {larg:.2f}x{alt:.2f} e sua área é de {area:.3f}m².') print(f'Para pintar", "alt = float(input(\"Altura da parede: \")) area = larg * alt print(f'SUa parede", "da parede: \")) alt = float(input(\"Altura da parede: \")) area = larg *", "= larg * alt print(f'SUa parede tem a dimensão de {larg:.2f}x{alt:.2f} e sua", "area = larg * alt print(f'SUa parede tem a dimensão de {larg:.2f}x{alt:.2f} e", "área é de {area:.3f}m².') print(f'Para pintar essa parede, você precisará de {area/2} litros", "* alt print(f'SUa parede tem a dimensão de {larg:.2f}x{alt:.2f} e sua área é", "dimensão de {larg:.2f}x{alt:.2f} e sua área é de {area:.3f}m².') print(f'Para pintar essa parede,", "= float(input(\"Altura da parede: \")) area = larg * alt print(f'SUa parede tem", "parede tem a dimensão de {larg:.2f}x{alt:.2f} e sua área é de {area:.3f}m².') print(f'Para", "\")) alt = float(input(\"Altura da parede: \")) area = larg * alt print(f'SUa", "\")) area = larg * alt print(f'SUa parede tem a dimensão de {larg:.2f}x{alt:.2f}", "{larg:.2f}x{alt:.2f} e sua área é de {area:.3f}m².') print(f'Para pintar essa parede, você precisará", "= float(input(\"Largura da parede: \")) alt = float(input(\"Altura da parede: \")) area =", "é de {area:.3f}m².') print(f'Para pintar essa parede, você precisará de {area/2} litros de", "da parede: \")) area = larg * alt print(f'SUa parede tem a dimensão", "float(input(\"Largura da parede: \")) alt = float(input(\"Altura da parede: \")) area = larg", "parede: \")) alt = float(input(\"Altura da parede: \")) area = larg * alt", "parede: \")) area = larg * alt print(f'SUa parede tem a dimensão de", "de {area:.3f}m².') print(f'Para pintar essa parede, você precisará de {area/2} litros de tinta')", "<reponame>mateusguida/ExerciciosPython larg = float(input(\"Largura da parede: \")) alt = float(input(\"Altura da parede: \"))", "a dimensão de {larg:.2f}x{alt:.2f} e sua área é de {area:.3f}m².') print(f'Para pintar essa", "larg = float(input(\"Largura da parede: \")) alt = float(input(\"Altura da parede: \")) area", "larg * alt print(f'SUa parede tem a dimensão de {larg:.2f}x{alt:.2f} e sua área", "de {larg:.2f}x{alt:.2f} e sua área é de {area:.3f}m².') print(f'Para pintar essa parede, você", "float(input(\"Altura da parede: \")) area = larg * alt print(f'SUa parede tem a", "sua área é de {area:.3f}m².') print(f'Para pintar essa parede, você precisará de {area/2}", "e sua área é de {area:.3f}m².') print(f'Para pintar essa parede, você precisará de", "print(f'SUa parede tem a dimensão de {larg:.2f}x{alt:.2f} e sua área é de {area:.3f}m².')" ]
[ "\"StreamPlatform\" for test we are currently writing. Hence, \"each test classes are independent\".", "self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream = models.StreamPlatform.objects.create( name", "response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks to the", "for getting the individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we", "and test_review_create_unauth\" While, the second is used to for the update, \"PUT\" \"\"\"", "\"each test classes are independent\". 2. \"self.watchlist\" is attribute in the \"setUp\" for", "\"active\" : True } response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self):", "= User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream", "models.StreamPlatform.objects.create( name = \"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\" )", "Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)),", "= self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code,", "number of reviews that can be created on a movie) self.assertEqual(models.Review.objects.count(), 2) response", "= self.stream , active = True ) self.review = models.Review.objects.create( review_user =self.user ,", "data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self):", "Movie!!!\", watchlist = self.watchlist2, active = True ) def test_review_create(self): data = {", "status from rest_framework.test import APITestCase from rest_framework.authtoken.models import Token from movielist_app.api import serializers", ": self.watchlist, \"active\" : True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "= self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream = models.StreamPlatform.objects.create( name = \"Netflix\", about", "} response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list',", "for creating the \"watchlist\" object manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We", "\"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example", "User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream =", "data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = { \"review_user\": self.user , \"rating \"", "\"watchlist\" : self.watchlist, \"active\" : True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code,", "IMPORTANT: The we are using the 'user' which isn't the 'admin'. Hence it", "self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream = models.StreamPlatform.objects.create( name = \"Netflix\", about =", "= \"#1 Streaming Platform\", website = \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title =", "self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token =", "Platform\", website = \"https://netflix.com\" ) def test_streamplatform_create(self): data = { \"name\" : \"Netflix\",", "test_review_create_unauth(self): data = { \"review_user\": self.user , \"rating \" : 5, \"description\" :", "without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for creating the", "= self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK)", "self.stream , active = True ) def test_watchlist_create(self): data = { \"title\": \"Example", "self.user , \"rating \" : 4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist,", "response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,)))", "are \"Forcing authentication\" to login as anonymous 4. Created a \"self.watchlist\" attribute to", "storyline = \"Example Story\", platform = self.stream , active = True ) self.watchlist2", "True } response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response =", "was created because we aren't allowed to send multilpe review on a watchlist.", "\" : 5, \"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True", "self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username", "= { \"review_user\": self.user , \"rating \" : 5, \"description\" : \"Great Movie!!!\",", "----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\" to login as", "reverse from rest_framework import status from rest_framework.test import APITestCase from rest_framework.authtoken.models import Token", "password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream = models.StreamPlatform.objects.create(", "= self.stream , active = True ) self.watchlist2 = models.WatchList.objects.create( title = \"Example", "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the \"setUp\" methods again to create", "from rest_framework.test import APITestCase from rest_framework.authtoken.models import Token from movielist_app.api import serializers from", "True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data", "status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response =", "login as anonymous 4. Created a \"self.watchlist\" attribute to get an id for", "The we are using the 'user' which isn't the 'admin'. Hence it going", "'self.watchlist' i.e('self.watchlist & self.watchlist2') was created because we aren't allowed to send multilpe", "# Added more checks to the tests. i.c(For the content and # the", "send multilpe review on a watchlist. The first is used to test the", ", \"storyline\": \"Example Story\", \"platform\" : self.stream , \"active\" : True } response", "True ) self.watchlist2 = models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example", "the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking care of that.", "= self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\",", "self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self):", "\"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" : False } response =", "more checks to the tests. i.c(For the content and # the number of", "= \"Example Story\", platform = self.stream , active = True ) def test_watchlist_create(self):", "getting the individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are", "authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\" to login as anonymous 4.", "\"active\" : False } response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self):", ") self.watchlist2 = models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example Story\",", "\"Example Story\", \"platform\" : self.stream , \"active\" : True } response = self.client.post(reverse('movie-list')", "from rest_framework import status from rest_framework.test import APITestCase from rest_framework.authtoken.models import Token from", "the content and # the number of reviews that can be created on", "data = { \"review_user\": self.user , \"rating \" : 4, \"description\" : \"Great", "self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK)", "self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response =", "data = { \"review_user\": self.user , \"rating \" : 5, \"description\" : \"Great", "created on a movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "Streaming Platform\", website = \"https://netflix.com\" ) def test_streamplatform_create(self): data = { \"name\" :", "{ \"review_user\": self.user , \"rating \" : 5, \"description\" : \"Great Movie!!!\", \"watchlist\"", "Movie\" , storyline = \"Example Story\", platform = self.stream , active = True", "= { \"name\" : \"Netflix\", \"about\" : \"#1 Streaming Platform\", \"website\" : \"https://netflix.com\"", "= self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = { \"review_user\": self.user", "The first is used to test the \"test_review_create and test_review_create_unauth\" While, the second", "a movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self):", "we are using the \"setUp\" methods again to create a \"StreamPlatform\" for test", "\"setUp\" methods again to create a \"StreamPlatform\" for test we are currently writing.", "\"self.watchlist\" attribute to get an id for the review to be updated. NOTE:", "allowed to send multilpe review on a watchlist. The first is used to", "again to create a \"StreamPlatform\" for test we are currently writing. Hence, \"each", "\"review_user\": self.user , \"rating \" : 4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" :", "} response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list'))", "self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK)", "request is sent without the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is", "first is used to test the \"test_review_create and test_review_create_unauth\" While, the second is", "the request is sent without the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method", ", \"rating \" : 4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\"", "using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the \"setUp\" methods", "self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream = models.StreamPlatform.objects.create( name = \"Netflix\", about = \"#1", "= \"#1 Streaming Platform\", website = \"https://netflix.com\" ) def test_streamplatform_create(self): data = {", "self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user =", "the tests. i.c(For the content and # the number of reviews that can", "data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks to the tests. i.c(For the content", "self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def", "= self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code,", "self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response =", "args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self):", "models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username =", "2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = {", "True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks", "to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the request is sent without the", "a watchlist. The first is used to test the \"test_review_create and test_review_create_unauth\" While,", "https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\" to login as anonymous 4. Created a", ": self.stream , \"active\" : True } response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code,", "} response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks to", "manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\" to login", "def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self):", "\"storyline\": \"Example Story\", \"platform\" : self.stream , \"active\" : True } response =", "=self.user , rating = 5, description = \"Great Movie!!!\", watchlist = self.watchlist2, active", "self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we are using the 'user' which isn't the", ": \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response =", "args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def", "self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we are using the 'user' which isn't", "status.HTTP_201_CREATED) # Added more checks to the tests. i.c(For the content and #", "= self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks to the tests.", "status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def", "\"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)),", "self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\")", "\"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create',", "= { \"review_user\": self.user , \"rating \" : 4, \"description\" : \"Great Movie!!!-(Updated)\",", "1. we are using the \"setUp\" methods again to create a \"StreamPlatform\" for", "the 'user' which isn't the 'admin'. Hence it going to return \"HTTP_403_FORBIDDEN\" which", "\"name\" : \"Netflix\", \"about\" : \"#1 Streaming Platform\", \"website\" : \"https://netflix.com\" } response", "data = { \"title\": \"Example Movie\" , \"storyline\": \"Example Story\", \"platform\" : self.stream", "Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\" to login as anonymous", "status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username =", "self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def", "2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was created because we aren't allowed to send", "args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self):", "self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = {", "NOTE: Once the request is sent without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE:", "Story\", \"platform\" : self.stream , \"active\" : True } response = self.client.post(reverse('movie-list') ,", "= models.Review.objects.create( review_user =self.user , rating = 5, description = \"Great Movie!!!\", watchlist", "description = \"Great Movie!!!\", watchlist = self.watchlist2, active = True ) def test_review_create(self):", "\"Forcing authentication\" to login as anonymous 4. Created a \"self.watchlist\" attribute to get", "\"#1 Streaming Platform\", website = \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title = \"Example", "NOTE: \"test_streamplatform_ind\" method is for getting the individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<", "= 5, description = \"Great Movie!!!\", watchlist = self.watchlist2, active = True )", "platform = self.stream , active = True ) self.review = models.Review.objects.create( review_user =self.user", "import serializers from movielist_app import models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\",", "attribute to get an id for the review to be updated. NOTE: The", "args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\"", "active = True ) def test_review_create(self): data = { \"review_user\": self.user , \"rating", "= \"Example Story\", platform = self.stream , active = True ) self.watchlist2 =", "status.HTTP_200_OK) \"\"\" IMPORTANT: The we are using the 'user' which isn't the 'admin'.", "i.e('self.watchlist & self.watchlist2') was created because we aren't allowed to send multilpe review", "which 'ok' NOTE: Once the request is sent without the authorization. It returns", "\"title\": \"Example Movie\" , \"storyline\": \"Example Story\", \"platform\" : self.stream , \"active\" :", "from django.urls import reverse from rest_framework import status from rest_framework.test import APITestCase from", ",args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token", "response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = { \"review_user\":", ": \"Netflix\", \"about\" : \"#1 Streaming Platform\", \"website\" : \"https://netflix.com\" } response =", "test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,)))", "status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title,", "\"\"\" IMPORTANT: The we are using the 'user' which isn't the 'admin'. Hence", "response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username)", "return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the request is sent without the authorization.", "creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is for getting the individual object", "the review to be updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was", "self.user , \"rating \" : 5, \"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist,", "response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code,", "\"about\" : \"#1 Streaming Platform\", \"website\" : \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data)", "'user' which isn't the 'admin'. Hence it going to return \"HTTP_403_FORBIDDEN\" which 'ok'", "= self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we are using the", "= \"https://netflix.com\" ) def test_streamplatform_create(self): data = { \"name\" : \"Netflix\", \"about\" :", ", active = True ) def test_watchlist_create(self): data = { \"title\": \"Example Movie\"", "= self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase):", "is sent without the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking", "= models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example Story\", platform =", "models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example Story\", platform = self.stream", "2. \"self.watchlist\" is attribute in the \"setUp\" for creating the \"watchlist\" object manually.", "= True ) self.review = models.Review.objects.create( review_user =self.user , rating = 5, description", "data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = { \"review_user\": self.user , \"rating \"", "watchlist = self.watchlist2, active = True ) def test_review_create(self): data = { \"review_user\":", "movielist_app.api import serializers from movielist_app import models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user =", "that. NOTE: Once the request is sent without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\"", "that can be created on a movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)),", "content and # the number of reviews that can be created on a", "& self.watchlist2') was created because we aren't allowed to send multilpe review on", "is used to test the \"test_review_create and test_review_create_unauth\" While, the second is used", "response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code,", "User from django.http import response from django.urls import reverse from rest_framework import status", "manually. NOTE: \"test_streamplatform_ind\" method is for getting the individual object using the \"self.stream\"", "self.watchlist = models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example Story\", platform", "Movie\" , \"storyline\": \"Example Story\", \"platform\" : self.stream , \"active\" : True }", "because we aren't allowed to send multilpe review on a watchlist. The first", "status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The", "1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token", "the \"setUp\" for creating the \"watchlist\" object manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication", "response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK)", "review_user =self.user , rating = 5, description = \"Great Movie!!!\", watchlist = self.watchlist2,", "self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = { \"review_user\": self.user ,", "self.stream , active = True ) self.watchlist2 = models.WatchList.objects.create( title = \"Example Movie\"", "def test_watchlist_create(self): data = { \"title\": \"Example Movie\" , \"storyline\": \"Example Story\", \"platform\"", "and # the number of reviews that can be created on a movie)", "be updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was created because we", "def test_review_create_unauth(self): data = { \"review_user\": self.user , \"rating \" : 5, \"description\"", "authentication\" to login as anonymous 4. Created a \"self.watchlist\" attribute to get an", "def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args=", "to be updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was created because", "\"platform\" : self.stream , \"active\" : True } response = self.client.post(reverse('movie-list') , data)", "from movielist_app.api import serializers from movielist_app import models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user", "response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we are using", "\"https://netflix.com\" ) def test_streamplatform_create(self): data = { \"name\" : \"Netflix\", \"about\" : \"#1", "status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response =", "\"self.stream\" is used for creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is for", "Streaming Platform\", \"website\" : \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def", "rest_framework.test import APITestCase from rest_framework.authtoken.models import Token from movielist_app.api import serializers from movielist_app", "+ self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we are using the 'user' which", "'ok' NOTE: Once the request is sent without the authorization. It returns \"HTTP_401_UNAUTHORIZED\".", "are currently writing. Hence, \"each test classes are independent\". 2. \"self.watchlist\" is attribute", "website = \"https://netflix.com\" ) def test_streamplatform_create(self): data = { \"name\" : \"Netflix\", \"about\"", "for creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is for getting the individual", ": True } response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response", "name = \"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) def", ", data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self):", "authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking care of that. NOTE:", "Hence it going to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the request is", "a \"self.watchlist\" attribute to get an id for the review to be updated.", "Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username =", "django.contrib.auth.models import User from django.http import response from django.urls import reverse from rest_framework", "rating = 5, description = \"Great Movie!!!\", watchlist = self.watchlist2, active = True", "aren't allowed to send multilpe review on a watchlist. The first is used", "= \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title = \"Example Movie\" , storyline =", "{ \"title\": \"Example Movie\" , \"storyline\": \"Example Story\", \"platform\" : self.stream , \"active\"", ", storyline = \"Example Story\", platform = self.stream , active = True )", "watchlist. The first is used to test the \"test_review_create and test_review_create_unauth\" While, the", "writing. Hence, \"each test classes are independent\". 2. \"self.watchlist\" is attribute in the", "active = True ) self.watchlist2 = models.WatchList.objects.create( title = \"Example Movie\" , storyline", ": True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more", "the request is sent without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is", "self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class", "\"setUp\" method is taking care of that. NOTE: Once the request is sent", "active = True ) def test_watchlist_create(self): data = { \"title\": \"Example Movie\" ,", "object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the \"setUp\"", "\"watchlist\" : self.watchlist, \"active\" : False } response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code,", "= Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream = models.StreamPlatform.objects.create( name =", "without the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking care of", "\"Netflix\", \"about\" : \"#1 Streaming Platform\", \"website\" : \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'),", "care of that. NOTE: Once the request is sent without \"admin\" credentials. It", "used for creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is for getting the", "self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT:", "\"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list'))", ": \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } response = self.client.post(reverse('review-create',", "status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail'", ", \"rating \" : 5, \"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\"", "\"setUp\" for creating the \"watchlist\" object manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3.", "self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response", "def test_review_create(self): data = { \"review_user\": self.user , \"rating \" : 5, \"description\"", "We are \"Forcing authentication\" to login as anonymous 4. Created a \"self.watchlist\" attribute", "on a watchlist. The first is used to test the \"test_review_create and test_review_create_unauth\"", "} response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code,", "import models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username", "= \"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) def test_streamplatform_create(self):", "def test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we", "using the 'user' which isn't the 'admin'. Hence it going to return \"HTTP_403_FORBIDDEN\"", "Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) self.stream = models.StreamPlatform.objects.create( name = \"Netflix\",", "Added more checks to the tests. i.c(For the content and # the number", "\"#1 Streaming Platform\", website = \"https://netflix.com\" ) def test_streamplatform_create(self): data = { \"name\"", "the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the \"setUp\" methods again", "platform = self.stream , active = True ) self.watchlist2 = models.WatchList.objects.create( title =", ": \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" : False } response = self.client.put(reverse('review-detail',", "anonymous 4. Created a \"self.watchlist\" attribute to get an id for the review", "(self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token =", "self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response", "= self.stream , active = True ) def test_watchlist_create(self): data = { \"title\":", "to the tests. i.c(For the content and # the number of reviews that", "import User from django.http import response from django.urls import reverse from rest_framework import", "can be created on a movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data)", "is sent without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for", "created because we aren't allowed to send multilpe review on a watchlist. The", "self.watchlist, \"active\" : False } response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def", ": \"#1 Streaming Platform\", \"website\" : \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code,", "data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response", "for the review to be updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2')", "movielist_app import models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token =", "the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is for getting the individual object using", "NOTE: Once the request is sent without the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The", ") self.review = models.Review.objects.create( review_user =self.user , rating = 5, description = \"Great", "creating the \"watchlist\" object manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are", "self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data =", "= \"Example Story\", platform = self.stream , active = True ) self.review =", "from rest_framework.authtoken.models import Token from movielist_app.api import serializers from movielist_app import models class", "3. We are \"Forcing authentication\" to login as anonymous 4. Created a \"self.watchlist\"", "NOTE: \"self.stream\" is used for creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is", "of reviews that can be created on a movie) self.assertEqual(models.Review.objects.count(), 2) response =", "test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,)))", "self.watchlist2, active = True ) def test_review_create(self): data = { \"review_user\": self.user ,", "(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user", "\"Example Story\", platform = self.stream , active = True ) self.review = models.Review.objects.create(", ": 5, \"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True }", "\"review_user\": self.user , \"rating \" : 5, \"description\" : \"Great Movie!!!\", \"watchlist\" :", "to get an id for the review to be updated. NOTE: The 2", "\"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the request is sent without the authorization. It", "\"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" : False } response = self.client.put(reverse('review-detail', args=(self.review.id,)),", "test_review_create_update(self): data = { \"review_user\": self.user , \"rating \" : 4, \"description\" :", "import response from django.urls import reverse from rest_framework import status from rest_framework.test import", "methods again to create a \"StreamPlatform\" for test we are currently writing. Hence,", "import Token from movielist_app.api import serializers from movielist_app import models class StreamPlatformTestCase(APITestCase): def", "{ \"review_user\": self.user , \"rating \" : 4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\"", "are using the \"setUp\" methods again to create a \"StreamPlatform\" for test we", "the \"test_review_create and test_review_create_unauth\" While, the second is used to for the update,", "import APITestCase from rest_framework.authtoken.models import Token from movielist_app.api import serializers from movielist_app import", "test_watchlist_create(self): data = { \"title\": \"Example Movie\" , \"storyline\": \"Example Story\", \"platform\" :", "\" : 4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" : False", "\"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create(", ") def test_streamplatform_create(self): data = { \"name\" : \"Netflix\", \"about\" : \"#1 Streaming", "updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was created because we aren't", "def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token '", "{ \"name\" : \"Netflix\", \"about\" : \"#1 Streaming Platform\", \"website\" : \"https://netflix.com\" }", "Platform\", website = \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title = \"Example Movie\" ,", "response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = { \"review_user\":", "tests. i.c(For the content and # the number of reviews that can be", "True ) def test_watchlist_create(self): data = { \"title\": \"Example Movie\" , \"storyline\": \"Example", "self.stream , active = True ) self.review = models.Review.objects.create( review_user =self.user , rating", "= \"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) self.watchlist =", "LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\" to login as anonymous 4. Created", "of that. NOTE: Once the request is sent without \"admin\" credentials. It returns", "create a \"StreamPlatform\" for test we are currently writing. Hence, \"each test classes", "about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) def test_streamplatform_create(self): data =", "self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase):", "we are using the 'user' which isn't the 'admin'. Hence it going to", "is taking care of that. NOTE: Once the request is sent without \"admin\"", "test_review_create(self): data = { \"review_user\": self.user , \"rating \" : 5, \"description\" :", "= self.watchlist2, active = True ) def test_review_create(self): data = { \"review_user\": self.user", "import status from rest_framework.test import APITestCase from rest_framework.authtoken.models import Token from movielist_app.api import", "about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title", "as anonymous 4. Created a \"self.watchlist\" attribute to get an id for the", "\"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the \"setUp\" methods again to", "classes are independent\". 2. \"self.watchlist\" is attribute in the \"setUp\" for creating the", "\"watchlist\" : self.watchlist, \"active\" : True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data)", "django.urls import reverse from rest_framework import status from rest_framework.test import APITestCase from rest_framework.authtoken.models", "rest_framework.authtoken.models import Token from movielist_app.api import serializers from movielist_app import models class StreamPlatformTestCase(APITestCase):", ", \"active\" : True } response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def", "= self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK)", "= \"Example Movie\" , storyline = \"Example Story\", platform = self.stream , active", "reviews that can be created on a movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create',", "5, \"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } response", "is used for creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is for getting", "we aren't allowed to send multilpe review on a watchlist. The first is", "are independent\". 2. \"self.watchlist\" is attribute in the \"setUp\" for creating the \"watchlist\"", "= True ) self.watchlist2 = models.WatchList.objects.create( title = \"Example Movie\" , storyline =", "def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example", "It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking care of that. NOTE: Once", "which isn't the 'admin'. Hence it going to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE:", "def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail',", "'Example Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username", "self.watchlist2 = models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example Story\", platform", "method is taking care of that. NOTE: Once the request is sent without", "Created a \"self.watchlist\" attribute to get an id for the review to be", "It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for creating the \"streamplatorm\" manually. NOTE:", "using the \"setUp\" methods again to create a \"StreamPlatform\" for test we are", ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the \"setUp\" methods again to create a \"StreamPlatform\"", "going to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the request is sent without", "Story\", platform = self.stream , active = True ) self.review = models.Review.objects.create( review_user", "\"Great Movie!!!\", watchlist = self.watchlist2, active = True ) def test_review_create(self): data =", "on a movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def", "\"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } self.client.force_authenticate(user=None) response", "Streaming Platform\", website = \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title = \"Example Movie\"", "self.token.key) self.stream = models.StreamPlatform.objects.create( name = \"Netflix\", about = \"#1 Streaming Platform\", website", "The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was created because we aren't allowed to", "class WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user)", "Hence, \"each test classes are independent\". 2. \"self.watchlist\" is attribute in the \"setUp\"", "Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" : False } response = self.client.put(reverse('review-detail', args=(self.review.id,)), data)", "self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we are using the 'user'", "to login as anonymous 4. Created a \"self.watchlist\" attribute to get an id", "be created on a movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code,", "returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\"", "active = True ) self.review = models.Review.objects.create( review_user =self.user , rating = 5,", "in the \"setUp\" for creating the \"watchlist\" object manually. ----------------------- Forcing authentication------------------------- LINKS:", "self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\")", "from movielist_app import models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token", "self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = { \"review_user\": self.user , \"rating \" :", "Story\", platform = self.stream , active = True ) self.watchlist2 = models.WatchList.objects.create( title", "to create a \"StreamPlatform\" for test we are currently writing. Hence, \"each test", "self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response", "multilpe review on a watchlist. The first is used to test the \"test_review_create", "data = { \"name\" : \"Netflix\", \"about\" : \"#1 Streaming Platform\", \"website\" :", "is for getting the individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1.", "status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = { \"review_user\": self.user , \"rating \" : 4,", "self.review = models.Review.objects.create( review_user =self.user , rating = 5, description = \"Great Movie!!!\",", "\"Example Movie\" , storyline = \"Example Story\", platform = self.stream , active =", "self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = { \"review_user\": self.user ,", "\"active\" : True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added", "the \"setUp\" methods again to create a \"StreamPlatform\" for test we are currently", ": True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self):", "args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data = { \"review_user\": self.user , \"rating", "\"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method is for getting the individual object using the", "object manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\" to", ",args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def setUp(self):", "test the \"test_review_create and test_review_create_unauth\" While, the second is used to for the", "= { \"title\": \"Example Movie\" , \"storyline\": \"Example Story\", \"platform\" : self.stream ,", "test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,)))", "setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' +", "checks to the tests. i.c(For the content and # the number of reviews", "self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def", "django.http import response from django.urls import reverse from rest_framework import status from rest_framework.test", "+ self.token.key) self.stream = models.StreamPlatform.objects.create( name = \"Netflix\", about = \"#1 Streaming Platform\",", "method is for getting the individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", "WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the \"setUp\" methods again to create a", "self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(),", "self.stream , \"active\" : True } response = self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "False } response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response =", "\"Example Movie\" , \"storyline\": \"Example Story\", \"platform\" : self.stream , \"active\" : True", "credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for creating the \"streamplatorm\" manually.", "the \"watchlist\" object manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing", "status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail'", "to send multilpe review on a watchlist. The first is used to test", "test classes are independent\". 2. \"self.watchlist\" is attribute in the \"setUp\" for creating", "currently writing. Hence, \"each test classes are independent\". 2. \"self.watchlist\" is attribute in", "get an id for the review to be updated. NOTE: The 2 'self.watchlist'", "response from django.urls import reverse from rest_framework import status from rest_framework.test import APITestCase", "= models.StreamPlatform.objects.create( name = \"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\"", ", active = True ) self.watchlist2 = models.WatchList.objects.create( title = \"Example Movie\" ,", "name = \"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) self.watchlist", "independent\". 2. \"self.watchlist\" is attribute in the \"setUp\" for creating the \"watchlist\" object", "\"#1 Streaming Platform\", \"website\" : \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "Once the request is sent without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\"", "test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user", "class StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user)", ": 4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" : False }", "self.watchlist2') was created because we aren't allowed to send multilpe review on a", "True ) self.review = models.Review.objects.create( review_user =self.user , rating = 5, description =", "self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks to the tests. i.c(For the content and", "self.watchlist, \"active\" : True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)", "Once the request is sent without the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\"", "\"watchlist\" object manually. ----------------------- Forcing authentication------------------------- LINKS: https://www.django-rest-framework.org/api-guide/testing/#forcing-authentication 3. We are \"Forcing authentication\"", "<reponame>rhedwan/BuildingDjangoAPI from django.contrib.auth.models import User from django.http import response from django.urls import reverse", "it going to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the request is sent", "def test_review_create_update(self): data = { \"review_user\": self.user , \"rating \" : 4, \"description\"", "Story\", platform = self.stream , active = True ) def test_watchlist_create(self): data =", "status.HTTP_200_OK) def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response =", "test_review_user(self): response = self.client.get('/watch/reviews/?username' + self.user.username) self.assertEqual(response.status_code, status.HTTP_200_OK) \"\"\" IMPORTANT: The we are", "title = \"Example Movie\" , storyline = \"Example Story\", platform = self.stream ,", "the number of reviews that can be created on a movie) self.assertEqual(models.Review.objects.count(), 2)", "i.c(For the content and # the number of reviews that can be created", "True ) def test_review_create(self): data = { \"review_user\": self.user , \"rating \" :", "test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie')", "movie) self.assertEqual(models.Review.objects.count(), 2) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data", ", active = True ) self.review = models.Review.objects.create( review_user =self.user , rating =", "rest_framework import status from rest_framework.test import APITestCase from rest_framework.authtoken.models import Token from movielist_app.api", "'admin'. Hence it going to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the request", "\"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } response =", "response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) class WatchListTestCase(APITestCase): def setUp(self): self.user =", "ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token", "used to test the \"test_review_create and test_review_create_unauth\" While, the second is used to", "' + self.token.key) self.stream = models.StreamPlatform.objects.create( name = \"Netflix\", about = \"#1 Streaming", "\"self.watchlist\" is attribute in the \"setUp\" for creating the \"watchlist\" object manually. -----------------------", "to test the \"test_review_create and test_review_create_unauth\" While, the second is used to for", "def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username'", "review to be updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was created", "\"test_review_create and test_review_create_unauth\" While, the second is used to for the update, \"PUT\"", "review on a watchlist. The first is used to test the \"test_review_create and", "WatchListTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token", "for test we are currently writing. Hence, \"each test classes are independent\". 2.", ") def test_watchlist_create(self): data = { \"title\": \"Example Movie\" , \"storyline\": \"Example Story\",", "a \"StreamPlatform\" for test we are currently writing. Hence, \"each test classes are", "self.watchlist, \"active\" : True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) #", ": False } response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_list(self): response", "individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using the", "args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = { \"review_user\": self.user , \"rating", "NOTE: The 2 'self.watchlist' i.e('self.watchlist & self.watchlist2') was created because we aren't allowed", "test we are currently writing. Hence, \"each test classes are independent\". 2. \"self.watchlist\"", "self.stream = models.StreamPlatform.objects.create( name = \"Netflix\", about = \"#1 Streaming Platform\", website =", "response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code,", "\"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for creating the \"streamplatorm\"", "response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code,", "= self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args= (self.stream.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK)", "} self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_review_create_update(self): data =", "5, description = \"Great Movie!!!\", watchlist = self.watchlist2, active = True ) def", "test_streamplatform_create(self): data = { \"name\" : \"Netflix\", \"about\" : \"#1 Streaming Platform\", \"website\"", ": self.watchlist, \"active\" : True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code,", "is attribute in the \"setUp\" for creating the \"watchlist\" object manually. ----------------------- Forcing", "request is sent without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used", "self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks to the tests. i.c(For", ") def test_review_create(self): data = { \"review_user\": self.user , \"rating \" : 5,", "self.assertEqual(response.status_code, status.HTTP_200_OK) def test_watchlist_ind(self): response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1)", "import reverse from rest_framework import status from rest_framework.test import APITestCase from rest_framework.authtoken.models import", ") self.watchlist = models.WatchList.objects.create( title = \"Example Movie\" , storyline = \"Example Story\",", "self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = { \"review_user\": self.user , \"rating \" :", ": \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } self.client.force_authenticate(user=None) response =", "Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data)", "response = self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class", "\"Netflix\", about = \"#1 Streaming Platform\", website = \"https://netflix.com\" ) def test_streamplatform_create(self): data", "serializers from movielist_app import models class StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\")", "def test_streamplatform_create(self): data = { \"name\" : \"Netflix\", \"about\" : \"#1 Streaming Platform\",", "response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code,", "id for the review to be updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist &", "StreamPlatformTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token", "\"rating \" : 4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" :", "\"active\" : True } self.client.force_authenticate(user=None) response = self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def", "we are currently writing. Hence, \"each test classes are independent\". 2. \"self.watchlist\" is", "models.Review.objects.create( review_user =self.user , rating = 5, description = \"Great Movie!!!\", watchlist =", "data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response", "5, \"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" : True } self.client.force_authenticate(user=None)", "4, \"description\" : \"Great Movie!!!-(Updated)\", \"watchlist\" : self.watchlist, \"active\" : False } response", "\"website\" : \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response", "class ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user)", "status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = { \"review_user\": self.user , \"rating \" : 5,", "are using the 'user' which isn't the 'admin'. Hence it going to return", "storyline = \"Example Story\", platform = self.stream , active = True ) def", "4. Created a \"self.watchlist\" attribute to get an id for the review to", "the individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1. we are using", "\"Example Story\", platform = self.stream , active = True ) def test_watchlist_create(self): data", "\"Example Story\", platform = self.stream , active = True ) self.watchlist2 = models.WatchList.objects.create(", "platform = self.stream , active = True ) def test_watchlist_create(self): data = {", "returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking care of that. NOTE: Once the", "\"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for creating the \"streamplatorm\" manually. NOTE: \"test_streamplatform_ind\" method", "= self.client.post(reverse('movie-list') , data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_watchlist_list(self): response = self.client.get(reverse('movie-list')) self.assertEqual(response.status_code, status.HTTP_200_OK)", ", rating = 5, description = \"Great Movie!!!\", watchlist = self.watchlist2, active =", "the 'admin'. Hence it going to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once the", "test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_review_user(self): response = self.client.get('/watch/reviews/?username' +", "sent without \"admin\" credentials. It returns \"HTTP_403_FORBIDDEN\" NOTE: \"self.stream\" is used for creating", "\"test_streamplatform_ind\" method is for getting the individual object using the \"self.stream\" <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< WatchListTestCase", "\"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking care of that. NOTE: Once the request", "APITestCase from rest_framework.authtoken.models import Token from movielist_app.api import serializers from movielist_app import models", "status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def setUp(self): self.user = User.objects.create_user(username=\"example\",", "= True ) def test_review_create(self): data = { \"review_user\": self.user , \"rating \"", "def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_streamplatform_ind(self): response = self.client.get(reverse('streamplatform-detail' ,args=", "args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # Added more checks to the tests. i.c(For the", "= self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)", "sent without the authorization. It returns \"HTTP_401_UNAUTHORIZED\". The \"setUp\" method is taking care", "from django.http import response from django.urls import reverse from rest_framework import status from", "isn't the 'admin'. Hence it going to return \"HTTP_403_FORBIDDEN\" which 'ok' NOTE: Once", "an id for the review to be updated. NOTE: The 2 'self.watchlist' i.e('self.watchlist", "attribute in the \"setUp\" for creating the \"watchlist\" object manually. ----------------------- Forcing authentication-------------------------", "= self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self): response = self.client.get(reverse('streamplatform-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) def", "The \"setUp\" method is taking care of that. NOTE: Once the request is", "# the number of reviews that can be created on a movie) self.assertEqual(models.Review.objects.count(),", "= self.client.post(reverse('review-create', args=(self.watchlist.id,)), data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_review_create_unauth(self): data = { \"review_user\": self.user", "taking care of that. NOTE: Once the request is sent without \"admin\" credentials.", ": self.watchlist, \"active\" : False } response = self.client.put(reverse('review-detail', args=(self.review.id,)), data) self.assertEqual(response.status_code, status.HTTP_200_OK)", "\"rating \" : 5, \"description\" : \"Great Movie!!!\", \"watchlist\" : self.watchlist, \"active\" :", "self.client.get(reverse('movie-detail' ,args= (self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(models.WatchList.objects.count(), 1) self.assertEqual(models.WatchList.objects.get().title, 'Example Movie') class ReviewTestCase(APITestCase): def", "self.user = User.objects.create_user(username=\"example\", password=\"<PASSWORD>\") self.token = Token.objects.get(user__username = self.user) self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key)", "= \"Great Movie!!!\", watchlist = self.watchlist2, active = True ) def test_review_create(self): data", "from django.contrib.auth.models import User from django.http import response from django.urls import reverse from", "Platform\", \"website\" : \"https://netflix.com\" } response = self.client.post(reverse('streamplatform-list'), data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_streamplatform_list(self):", "storyline = \"Example Story\", platform = self.stream , active = True ) self.review", "= True ) def test_watchlist_create(self): data = { \"title\": \"Example Movie\" , \"storyline\":", "website = \"https://netflix.com\" ) self.watchlist = models.WatchList.objects.create( title = \"Example Movie\" , storyline", "def test_review_list(self): response = self.client.get(reverse('review-list', args=(self.watchlist.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_ind(self): response = self.client.get(reverse('review-detail',", "Token from movielist_app.api import serializers from movielist_app import models class StreamPlatformTestCase(APITestCase): def setUp(self):", "test_review_ind(self): response = self.client.get(reverse('review-detail', args=(self.review.id,))) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_review_delete(self): response = self.client.delete(reverse('review-detail', args=(self.review.id,)))" ]
[ "@flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return", "import sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path", "os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app)", "'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def", "os from flask import Flask, g from flask_sijax import sijax path = os.path.join('.',", "g from flask_sijax import sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__)", "= Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return", "import Flask, g from flask_sijax import sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app", "app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index' @flask_sijax.route(app, '/hello') def hello():", "os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/')", "path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index' @flask_sijax.route(app, '/hello') def", "obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html') if __name__ ==", "say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html') if __name__", "def hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return", "= path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index' @flask_sijax.route(app, '/hello')", "flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index' @flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi", "hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html')", "Flask, g from flask_sijax import sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app =", "there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html') if __name__ == '__main__':", "g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html') if __name__ == '__main__': app.run(debug = True)", "from flask import Flask, g from flask_sijax import sijax path = os.path.join('.', os.path.dirname(__file__),", "= '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index' @flask_sijax.route(app, '/hello') def hello(): def", "'/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index' @flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response):", "index(): return 'Index' @flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request:", "if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html') if __name__ == '__main__': app.run(debug", "'/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request()", "'Index' @flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi)", "return 'Index' @flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi',", "app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index' @flask_sijax.route(app,", "flask_sijax import sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] =", "flask import Flask, g from flask_sijax import sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/')", "from flask_sijax import sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH']", "g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html') if __name__ == '__main__': app.run(debug =", "def index(): return 'Index' @flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi there!') if", "def say_hi(obj_response): obj_response.alert('Hi there!') if g.sijax.is_sijax_request: g.sijax.register_callback('say_hi', say_hi) return g.sijax.process_request() return _render_template('sijaxexample.html') if", "Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index(): return 'Index'", "@app.route('/') def index(): return 'Index' @flask_sijax.route(app, '/hello') def hello(): def say_hi(obj_response): obj_response.alert('Hi there!')", "import os from flask import Flask, g from flask_sijax import sijax path =", "= os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js'", "sijax path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI']", "app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js' flask_sijax.Sijax(app) @app.route('/') def index():", "path = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/') app = Flask(__name__) app.config['SIJAX_STATIC_PATH'] = path app.config['SIJAX_JSON_URI'] =" ]
[ "validator object The rest are optional arguments and keyword arguments that belong to", "to a canonical name # from the fields dictionary or the name #", "field, data in post.iteritems(): if field in F(): # Skip it if the", "WTF instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self, name, label, field_type, *args,", "validator parameters for possible %field% replacement, then bind these parameters to their validator.", "canonical name # from the fields dictionary or the name # was malformed,", "directly, it means the field # is not a set so just set", "has the added ability to process sets of fields that are suffixed with", "current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we are currently in a", "class F(form): pass for field, data in post.iteritems(): if field in F(): #", "= self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create", "sys from wtforms.form import FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields to", "X is a number. For ease of configuration, these set names will be", "(re)set the current set. current_set_number = None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()):", "configuration dictionary on which you can add fields and validators using the designated", "(_X) when they are bound to the validator. The latter brings the power", "arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except:", "Calling the \"process\" method will take care of actually applying the build configuration", "this form a Flask WTF or a plain WTF instance? \"\"\" self._dyn_fields =", "that are decorated with %'s within # the arguments. for key, arg in", "self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args, **kwargs) else: raise", "in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args = [] kwargs = {} if", "set, (re)set the current set. current_set_number = None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in", "dictionary. \"\"\" if name in self._dyn_fields: raise AttributeError('Field already added to the form.')", "raise AttributeError('Field already added to the form.') else: self._dyn_fields[name] = {'label': label, 'type':", "3 or iteritems() in 2. \"\"\" if sys.version_info[0] >= 3: return dict.items() else:", "to the validator. The latter brings the power to reference set fields with", "to all the words that are decorated with %'s within # the arguments.", "field did not match to a canonical name # from the fields dictionary", "memory hugging Python2 .items() method. So for providing both Python2 and 3 support,", "is valid, check all the validator parameters for possible %field% replacement, then bind", "the current set. current_set_number = None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): #", "about the set number that will be used later on when injecting them", "in self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args']", "validator. Finally, add the field together with their validators to the form. :param", "WTForm form object and attach new fields to it according to a match", "the configuration. Inside the configuration there is the ability to reference other fields", "# the last part contains only digits and the # everything *but* the", "# from the fields dictionary or the name # was malformed, throw it", "**field_kwargs)) # Create an instance of the form with the newly # created", "A valid WTForm Form object :param post: A MultiDict with the POST variables", "not seem to be regex-able # Probably not a string, thus we can", "be defined once in the configuration. Inside the configuration there is the ability", "validator arguments and, if we are in a set # replace the %field_name%", "it is time to add it to the form. field_type = self._dyn_fields[field_cname]['type'] field_label", "and bound later. \"\"\" if name in self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator)", "\"\"\" Add the field to the internal configuration dictionary. \"\"\" if name in", "# the arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg)", "self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs))", "field # Since we are not in a set, (re)set the current set.", "the validator. We let them simply pass through to be checked and bound", "not a string, thus we can skip it. pass args.append(arg) if 'kwargs' in", "this class will merely create a configuration dictionary on which you can add", "that each of these fields only have to be defined once in the", "or a plain WTF instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self, name,", "of the form with the newly # created fields and give it back", "be regex-able # Probably not a string, thus we can skip it. pass", "name # was malformed, throw it out. continue # Since the field seems", "\"process\" method will take care of actually applying the build configuration to the", "try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not seem to", "POST values and check each field against the configuration that was made. For", "bind these parameters to their validator. Finally, add the field together with their", "already added to the form.') else: self._dyn_fields[name] = {'label': label, 'type': field_type, 'args':", "# Finally, bind arguments to the validator # and add it to the", "TypeError('Given form is not a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass", "Skip it if the POST field is one of the standard form fields.", "that are suffixed with the convention of '_X' where X is a number.", "the words that are decorated with %'s within # the arguments. for key,", "according to a match between what is in the POST and what is", "**kwargs) else: raise AttributeError('Field \"{0}\" does not exist. ' 'Did you forget to", "reference set fields with their canonical name without needing to care about the", "the POST values and check each field against the configuration that was made.", "args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we are currently in a set,", "return dict.items() else: return dict.iteritems() def process(self, form, post): \"\"\" Process the given", "flask_wtf: Is this form a Flask WTF or a plain WTF instance? \"\"\"", "can skip it. pass kwargs[key] = arg # Finally, bind arguments to the", "later on when injecting them in the DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\"", "be used later on when injecting them in the DOM. \"\"\" def __init__(self,", "field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the field can be split on underscore characters,", "Add the field to the internal configuration dictionary. \"\"\" if name in self._dyn_fields:", "[validator.__name__]['kwargs'] else: # If we are currently in a set, append the set", "the field # is not a set so just set the canonical name", "except: # The argument does not seem to be regex-able # Probably not", "'type': field_type, 'args': args, 'kwargs': kwargs} def add_validator(self, name, validator, *args, **kwargs): \"\"\"", "if 'validators' in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args = [] kwargs =", "field against the configuration that was made. For each field that is valid,", "[validator.__name__]: if not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we are", "POST and what is defined in the build configuration dictionary. It has the", "bound later. \"\"\" if name in self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__]", "will merely create a configuration dictionary on which you can add fields and", "process(self, form, post): \"\"\" Process the given WTForm Form object. Itterate over the", "not a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field, data", "# and add it to the list validators.append(validator(*args, **kwargs)) # The field is", "a canonical name # from the fields dictionary or the name # was", "replacement, then bind these parameters to their validator. Finally, add the field together", "data in post.iteritems(): if field in F(): # Skip it if the POST", ":param post: A MultiDict with the POST variables \"\"\" if not isinstance(form, FormMeta):", "on :param validator: The WTForms validator object The rest are optional arguments and", "back to their canonical name so that each of these fields only have", "we are at. current_set_number = str(field.split('_')[-1]) else: # The field did not match", "Is this form a Flask WTF or a plain WTF instance? \"\"\" self._dyn_fields", "it. pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs =", "% signs. Fields that belong to a set will be automatically suffixed with", "**kwargs): \"\"\" Add the field to the internal configuration dictionary. \"\"\" if name", "object The rest are optional arguments and keyword arguments that belong to the", "it if the POST field is one of the standard form fields. continue", "the # everything *but* the last part is found in the # field", "else: # If we are currently in a set, append the set number", "to process sets of fields that are suffixed with the convention of '_X'", "AttributeError('Field \"{0}\" does not exist. ' 'Did you forget to add it?'.format(name)) @staticmethod", "%'s within # the arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg", "both Python2 and 3 support, setting up iteritems() as either items() in 3", "is time to add it to the form. field_type = self._dyn_fields[field_cname]['type'] field_label =", "number. For ease of configuration, these set names will be traced back to", ">= 3: return dict.items() else: return dict.iteritems() def process(self, form, post): \"\"\" Process", "methods \"add_field\" and \"add_validator\". Calling the \"process\" method will take care of actually", "belong to the validator. We let them simply pass through to be checked", "to care about the set number that will be used later on when", "# to all the words that are decorated with %'s within # the", "raise AttributeError('Field \"{0}\" does not exist. ' 'Did you forget to add it?'.format(name))", "Since we are not in a set, (re)set the current set. current_set_number =", "validators to the form. :param form: A valid WTForm Form object :param post:", "post.iteritems(): if field in F(): # Skip it if the POST field is", "field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create an instance", "'Did you forget to add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing to use", "the list validators.append(validator(*args, **kwargs)) # The field is setup, it is time to", "fields that are suffixed with the convention of '_X' where X is a", "configuration dictionary. \"\"\" if name in self._dyn_fields: raise AttributeError('Field already added to the", "set. current_set_number = None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the", "argument does not seem to be regex-able # Probably not a string, thus", "= self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args,", "replace the %field_name% convention where we find it. validators = [] if 'validators'", "a string, thus we can skip it. pass kwargs[key] = arg # Finally,", "# Since we apparently are in a set, remember the # the set", "a WTForm. Instantiating this class will merely create a configuration dictionary on which", "= field[:-(len(field.split('_')[-1]))-1] # Since we apparently are in a set, remember the #", "support, setting up iteritems() as either items() in 3 or iteritems() in 2.", "in 3 or iteritems() in 2. \"\"\" if sys.version_info[0] >= 3: return dict.items()", "post): \"\"\" Process the given WTForm Form object. Itterate over the POST values", "the validator parameters for possible %field% replacement, then bind these parameters to their", "the caller. if self.flask_wtf: # Flask WTF overrides the form initialization # and", "validator, *args, **kwargs): \"\"\" Add the validator to the internal configuration dictionary. :param", "= [] self.add_validator(name, validator, *args, **kwargs) else: raise AttributeError('Field \"{0}\" does not exist.", "name: The field machine name to apply the validator on :param validator: The", "current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we are currently in a", "words that are decorated with %'s within # the arguments. for arg in", "with their set number (_X) when they are bound to the validator. The", "field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create an instance of the form with the", "def add_field(self, name, label, field_type, *args, **kwargs): \"\"\" Add the field to the", "flask_wtf=False): \"\"\" Class init. :param flask_wtf: Is this form a Flask WTF or", "%field% replacement, then bind these parameters to their validator. Finally, add the field", "is found in the # field configuration, we are good to go. #", "= re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field, data in post.iteritems(): if field in", "string, thus we can skip it. pass kwargs[key] = arg # Finally, bind", "setup, it is time to add it to the form. field_type = self._dyn_fields[field_cname]['type']", "field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we apparently are in a set, remember the", "to reference set fields with their canonical name without needing to care about", "you can add fields and validators using the designated methods \"add_field\" and \"add_validator\".", "field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) #", "hugging Python2 .items() method. So for providing both Python2 and 3 support, setting", "the arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except:", "which you can add fields and validators using the designated methods \"add_field\" and", "internal configuration dictionary. :param name: The field machine name to apply the validator", "add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing to use a possible memory hugging", "if self.flask_wtf: # Flask WTF overrides the form initialization # and already injects", "that are decorated with %'s within # the arguments. for arg in self._dyn_fields[field_cname]\\", "only have to be defined once in the configuration. Inside the configuration there", "not isinstance(form, FormMeta): raise TypeError('Given form is not a valid WTForm.') re_field_name =", "new fields to it according to a match between what is in the", "Finally, bind arguments to the validator # and add it to the list", "validator. The latter brings the power to reference set fields with their canonical", "setting up iteritems() as either items() in 3 or iteritems() in 2. \"\"\"", "Process the given WTForm Form object. Itterate over the POST values and check", "self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not", "will be used later on when injecting them in the DOM. \"\"\" def", "the build configuration dictionary. It has the added ability to process sets of", "so just set the canonical name and go on. field_cname = field #", "take a WTForm form object and attach new fields to it according to", "of '_X' where X is a number. For ease of configuration, these set", "to the validator # and add it to the list validators.append(validator(*args, **kwargs)) #", "= [] if 'validators' in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args = []", "class will merely create a configuration dictionary on which you can add fields", "field seems to be a valid one, let us # prepare the validator", "care about the set number that will be used later on when injecting", "valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field, data in post.iteritems():", "# (Cowardly refusing to use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we", "\"\"\" Add the validator to the internal configuration dictionary. :param name: The field", "validator: The WTForms validator object The rest are optional arguments and keyword arguments", "Add dynamic (set) fields to a WTForm. Instantiating this class will merely create", "convention where we find it. validators = [] if 'validators' in self._dyn_fields[field_cname]: for", "set will be automatically suffixed with their set number (_X) when they are", "a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field, data in", "between what is in the POST and what is defined in the build", "to a match between what is in the POST and what is defined", "if not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we are currently", "# Create an instance of the form with the newly # created fields", "we find it. validators = [] if 'validators' in self._dyn_fields[field_cname]: for validator in", "or iteritems() in 2. \"\"\" if sys.version_info[0] >= 3: return dict.items() else: return", "add it to the list validators.append(validator(*args, **kwargs)) # The field is setup, it", "through to be checked and bound later. \"\"\" if name in self._dyn_fields: if", "these fields only have to be defined once in the configuration. Inside the", "(set) fields to a WTForm. Instantiating this class will merely create a configuration", "not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we are currently in", "so that each of these fields only have to be defined once in", "is not a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field,", "# Probably not a string, thus we can skip it. pass kwargs[key] =", "the form. field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs =", "the validator # and add it to the list validators.append(validator(*args, **kwargs)) # The", "number # to all the words that are decorated with %'s within #", "# the arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number,", "have to be defined once in the configuration. Inside the configuration there is", "validator arguments with the convention of surrounding it with % signs. Fields that", "set fields with their canonical name without needing to care about the set", "validator on :param validator: The WTForms validator object The rest are optional arguments", "to be a valid one, let us # prepare the validator arguments and,", "name and go on. field_cname = field # Since we are not in", "skip it. pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs", "to the form. field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs", "else: raise AttributeError('Field \"{0}\" does not exist. ' 'Did you forget to add", "values and check each field against the configuration that was made. For each", "and check each field against the configuration that was made. For each field", "for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The", "The latter brings the power to reference set fields with their canonical name", "re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field, data in post.iteritems(): if field", "a Flask WTF or a plain WTF instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf", "that was made. For each field that is valid, check all the validator", "self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we", "\"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self, name, label, field_type, *args, **kwargs): \"\"\"", "\"\"\" Add dynamic (set) fields to a WTForm. Instantiating this class will merely", "self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name,", "regex-able # Probably not a string, thus we can skip it. pass args.append(arg)", "list validators.append(validator(*args, **kwargs)) # The field is setup, it is time to add", "form fields. continue else: if field in self._dyn_fields: # If we can find", "args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args,", "instance of the form with the newly # created fields and give it", "they are bound to the validator. The latter brings the power to reference", "pass through to be checked and bound later. \"\"\" if name in self._dyn_fields:", "# If we can find the field name directly, it means the field", "%'s within # the arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg =", "if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args, **kwargs)", ".items() method. So for providing both Python2 and 3 support, setting up iteritems()", "not match to a canonical name # from the fields dictionary or the", "dict.items() else: return dict.iteritems() def process(self, form, post): \"\"\" Process the given WTForm", "For each field that is valid, check all the validator parameters for possible", "valid one, let us # prepare the validator arguments and, if we are", "ability to reference other fields within the validator arguments with the convention of", "Since the field seems to be a valid one, let us # prepare", "form. field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs']", "\"\"\" if sys.version_info[0] >= 3: return dict.items() else: return dict.iteritems() def process(self, form,", "# and already injects the POST variables. form = F() else: form =", "'_X' where X is a number. For ease of configuration, these set names", "add it to the form. field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args =", "configuration. Inside the configuration there is the ability to reference other fields within", "validator # and add it to the list validators.append(validator(*args, **kwargs)) # The field", "on when injecting them in the DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\" Class", "create a configuration dictionary on which you can add fields and validators using", "\"\"\" Refusing to use a possible memory hugging Python2 .items() method. So for", "machine name to apply the validator on :param validator: The WTForms validator object", "def process(self, form, post): \"\"\" Process the given WTForm Form object. Itterate over", "in the # field configuration, we are good to go. # (Cowardly refusing", "# The field is setup, it is time to add it to the", "to apply the validator on :param validator: The WTForms validator object The rest", "dictionary. It has the added ability to process sets of fields that are", "the convention of '_X' where X is a number. For ease of configuration,", "us # prepare the validator arguments and, if we are in a set", "configuration dictionary. :param name: The field machine name to apply the validator on", "there is the ability to reference other fields within the validator arguments with", "in self._dyn_fields[field_cname]['validators']: args = [] kwargs = {} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]:", "with %'s within # the arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try:", "the convention of surrounding it with % signs. Fields that belong to a", "check each field against the configuration that was made. For each field that", "canonical name and go on. field_cname = field # Since we are not", "characters, # the last part contains only digits and the # everything *but*", "Create an instance of the form with the newly # created fields and", "WTForm Form object. Itterate over the POST values and check each field against", "used later on when injecting them in the DOM. \"\"\" def __init__(self, flask_wtf=False):", "arg) except: # The argument does not seem to be regex-able # Probably", "valid WTForm Form object :param post: A MultiDict with the POST variables \"\"\"", ":param validator: The WTForms validator object The rest are optional arguments and keyword", "FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields to a WTForm. Instantiating this", "we are in a set # replace the %field_name% convention where we find", "self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs", "it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing to use a possible memory hugging Python2", "suffixed with their set number (_X) when they are bound to the validator.", "them in the DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\" Class init. :param flask_wtf:", "object and attach new fields to it according to a match between what", "form. :param form: A valid WTForm Form object :param post: A MultiDict with", "found in the # field configuration, we are good to go. # (Cowardly", "a configuration dictionary on which you can add fields and validators using the", "fields. continue else: if field in self._dyn_fields: # If we can find the", "continue # Since the field seems to be a valid one, let us", "a set will be automatically suffixed with their set number (_X) when they", "configuration to the WTForm form. This method will take a WTForm form object", "\"\"\" Class init. :param flask_wtf: Is this form a Flask WTF or a", "else: return dict.iteritems() def process(self, form, post): \"\"\" Process the given WTForm Form", "does not seem to be regex-able # Probably not a string, thus we", "iteritems(dict): \"\"\" Refusing to use a possible memory hugging Python2 .items() method. So", "with the POST variables \"\"\" if not isinstance(form, FormMeta): raise TypeError('Given form is", "arguments to the validator # and add it to the list validators.append(validator(*args, **kwargs))", "pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs = self._dyn_fields[field_cname]\\", "the canonical name and go on. field_cname = field # Since we are", "what is defined in the build configuration dictionary. It has the added ability", "already injects the POST variables. form = F() else: form = F(post) return", "are at. current_set_number = str(field.split('_')[-1]) else: # The field did not match to", "configuration, we are good to go. # (Cowardly refusing to use regex here).", "fields with their canonical name without needing to care about the set number", "last part is found in the # field configuration, we are good to", "from wtforms.form import FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields to a", "match to a canonical name # from the fields dictionary or the name", "WTF or a plain WTF instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self,", "field_type, 'args': args, 'kwargs': kwargs} def add_validator(self, name, validator, *args, **kwargs): \"\"\" Add", "field is setup, it is time to add it to the form. field_type", ":param form: A valid WTForm Form object :param post: A MultiDict with the", "configuration there is the ability to reference other fields within the validator arguments", "and, if we are in a set # replace the %field_name% convention where", "number that will be used later on when injecting them in the DOM.", "the field seems to be a valid one, let us # prepare the", "each field that is valid, check all the validator parameters for possible %field%", "back to the caller. if self.flask_wtf: # Flask WTF overrides the form initialization", "the words that are decorated with %'s within # the arguments. for arg", "the form.') else: self._dyn_fields[name] = {'label': label, 'type': field_type, 'args': args, 'kwargs': kwargs}", "process sets of fields that are suffixed with the convention of '_X' where", "a plain WTF instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self, name, label,", "fields to it according to a match between what is in the POST", "it according to a match between what is in the POST and what", "is the ability to reference other fields within the validator arguments with the", "these parameters to their validator. Finally, add the field together with their validators", "WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field, data in post.iteritems(): if", "'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args'] = args if", "re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for field, data in post.iteritems(): if field in F():", "was malformed, throw it out. continue # Since the field seems to be", "find it. validators = [] if 'validators' in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']:", "in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If", "(field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the field can be split on", "form: A valid WTForm Form object :param post: A MultiDict with the POST", "the last part is found in the # field configuration, we are good", "# is not a set so just set the canonical name and go", "within the validator arguments with the convention of surrounding it with % signs.", "field can be split on underscore characters, # the last part contains only", "for providing both Python2 and 3 support, setting up iteritems() as either items()", "isinstance(form, FormMeta): raise TypeError('Given form is not a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%')", "regex-able # Probably not a string, thus we can skip it. pass kwargs[key]", "sets of fields that are suffixed with the convention of '_X' where X", "it. pass kwargs[key] = arg # Finally, bind arguments to the validator #", "__init__(self, flask_wtf=False): \"\"\" Class init. :param flask_wtf: Is this form a Flask WTF", "to be regex-able # Probably not a string, thus we can skip it.", "kwargs[key] = arg # Finally, bind arguments to the validator # and add", "other fields within the validator arguments with the convention of surrounding it with", "are in a set, remember the # the set number we are at.", "to their canonical name so that each of these fields only have to", "return dict.iteritems() def process(self, form, post): \"\"\" Process the given WTForm Form object.", "last part contains only digits and the # everything *but* the last part", "part contains only digits and the # everything *but* the last part is", "kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args, **kwargs) else: raise AttributeError('Field \"{0}\"", "not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we are currently in", "(Cowardly refusing to use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we apparently", "Python2 and 3 support, setting up iteritems() as either items() in 3 or", "method will take a WTForm form object and attach new fields to it", "are optional arguments and keyword arguments that belong to the validator. We let", "internal configuration dictionary. \"\"\" if name in self._dyn_fields: raise AttributeError('Field already added to", "Add the validator to the internal configuration dictionary. :param name: The field machine", "Probably not a string, thus we can skip it. pass kwargs[key] = arg", "Flask WTF or a plain WTF instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def", "are decorated with %'s within # the arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']:", "name directly, it means the field # is not a set so just", "to the internal configuration dictionary. :param name: The field machine name to apply", "checked and bound later. \"\"\" if name in self._dyn_fields: if 'validators' in self._dyn_fields[name]:", "\"\"\" if not isinstance(form, FormMeta): raise TypeError('Given form is not a valid WTForm.')", "self.add_validator(name, validator, *args, **kwargs) else: raise AttributeError('Field \"{0}\" does not exist. ' 'Did", "are suffixed with the convention of '_X' where X is a number. For", "self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args, **kwargs) else: raise AttributeError('Field \"{0}\" does not", "for field, data in post.iteritems(): if field in F(): # Skip it if", "So for providing both Python2 and 3 support, setting up iteritems() as either", "set number we are at. current_set_number = str(field.split('_')[-1]) else: # The field did", "in self._dyn_fields: # If we can find the field name directly, it means", "pass for field, data in post.iteritems(): if field in F(): # Skip it", "it means the field # is not a set so just set the", "kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we are currently in a set,", "setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create an instance of the form", "a match between what is in the POST and what is defined in", "injecting them in the DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\" Class init. :param", "def __init__(self, flask_wtf=False): \"\"\" Class init. :param flask_wtf: Is this form a Flask", "in the POST and what is defined in the build configuration dictionary. It", "args, 'kwargs': kwargs} def add_validator(self, name, validator, *args, **kwargs): \"\"\" Add the validator", "Python2 .items() method. So for providing both Python2 and 3 support, setting up", "and the # everything *but* the last part is found in the #", "where X is a number. For ease of configuration, these set names will", "to reference other fields within the validator arguments with the convention of surrounding", "add_field(self, name, label, field_type, *args, **kwargs): \"\"\" Add the field to the internal", "self._dyn_fields[field_cname]['validators']: args = [] kwargs = {} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if", "with % signs. Fields that belong to a set will be automatically suffixed", "%field_name% convention where we find it. validators = [] if 'validators' in self._dyn_fields[field_cname]:", "the configuration there is the ability to reference other fields within the validator", "currently in a set, append the set number # to all the words", "self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args'] =", "to the form. :param form: A valid WTForm Form object :param post: A", "attach new fields to it according to a match between what is in", "and attach new fields to it according to a match between what is", "a possible memory hugging Python2 .items() method. So for providing both Python2 and", "of the standard form fields. continue else: if field in self._dyn_fields: # If", "the form with the newly # created fields and give it back to", "field in F(): # Skip it if the POST field is one of", "*but* the last part is found in the # field configuration, we are", "str(field.split('_')[-1]) else: # The field did not match to a canonical name #", "actually applying the build configuration to the WTForm form. This method will take", "'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: #", "the validator arguments and, if we are in a set # replace the", "field configuration, we are good to go. # (Cowardly refusing to use regex", "post: A MultiDict with the POST variables \"\"\" if not isinstance(form, FormMeta): raise", "the configuration that was made. For each field that is valid, check all", "power to reference set fields with their canonical name without needing to care", "else: # The field did not match to a canonical name # from", "self.flask_wtf: # Flask WTF overrides the form initialization # and already injects the", "go on. field_cname = field # Since we are not in a set,", "= None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the field can", "are decorated with %'s within # the arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\", "form with the newly # created fields and give it back to the", "of surrounding it with % signs. Fields that belong to a set will", "from the fields dictionary or the name # was malformed, throw it out.", "If we are currently in a set, append the set number # to", "merely create a configuration dictionary on which you can add fields and validators", "we apparently are in a set, remember the # the set number we", "prepare the validator arguments and, if we are in a set # replace", ":param flask_wtf: Is this form a Flask WTF or a plain WTF instance?", "if sys.version_info[0] >= 3: return dict.items() else: return dict.iteritems() def process(self, form, post):", "the standard form fields. continue else: if field in self._dyn_fields: # If we", "self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not", ":param name: The field machine name to apply the validator on :param validator:", "MultiDict with the POST variables \"\"\" if not isinstance(form, FormMeta): raise TypeError('Given form", "\"add_field\" and \"add_validator\". Calling the \"process\" method will take care of actually applying", "give it back to the caller. if self.flask_wtf: # Flask WTF overrides the", "[] self.add_validator(name, validator, *args, **kwargs) else: raise AttributeError('Field \"{0}\" does not exist. '", "self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create an", "with %'s within # the arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg", "the validator. The latter brings the power to reference set fields with their", "elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the field can be split", "self._dyn_fields: raise AttributeError('Field already added to the form.') else: self._dyn_fields[name] = {'label': label,", "canonical name without needing to care about the set number that will be", "the last part contains only digits and the # everything *but* the last", "# The field did not match to a canonical name # from the", "we can skip it. pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not", "time to add it to the form. field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label']", "on which you can add fields and validators using the designated methods \"add_field\"", "throw it out. continue # Since the field seems to be a valid", "if name in self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if", "what is in the POST and what is defined in the build configuration", "object :param post: A MultiDict with the POST variables \"\"\" if not isinstance(form,", "current_set_number = None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the field", "self._dyn_fields.keys()): # If the field can be split on underscore characters, # the", "are in a set # replace the %field_name% convention where we find it.", "validators = [] if 'validators' in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args =", "self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args = [] kwargs = {} if 'args'", "= self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we are currently in a set, append", "it to the form. field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args']", "and go on. field_cname = field # Since we are not in a", "# created fields and give it back to the caller. if self.flask_wtf: #", "their canonical name so that each of these fields only have to be", "and validators using the designated methods \"add_field\" and \"add_validator\". Calling the \"process\" method", "standard form fields. continue else: if field in self._dyn_fields: # If we can", "does not exist. ' 'Did you forget to add it?'.format(name)) @staticmethod def iteritems(dict):", "field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F,", "if args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] =", "field together with their validators to the form. :param form: A valid WTForm", "not a set so just set the canonical name and go on. field_cname", "= field # Since we are not in a set, (re)set the current", "be traced back to their canonical name so that each of these fields", "to use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we apparently are in", "append the set number # to all the words that are decorated with", "to the form.') else: self._dyn_fields[name] = {'label': label, 'type': field_type, 'args': args, 'kwargs':", "to the list validators.append(validator(*args, **kwargs)) # The field is setup, it is time", "A MultiDict with the POST variables \"\"\" if not isinstance(form, FormMeta): raise TypeError('Given", "parameters to their validator. Finally, add the field together with their validators to", "a number. For ease of configuration, these set names will be traced back", "Finally, add the field together with their validators to the form. :param form:", "if the POST field is one of the standard form fields. continue else:", "if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else:", "will take care of actually applying the build configuration to the WTForm form.", "name, label, field_type, *args, **kwargs): \"\"\" Add the field to the internal configuration", "in post.iteritems(): if field in F(): # Skip it if the POST field", "not exist. ' 'Did you forget to add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\"", "suffixed with the convention of '_X' where X is a number. For ease", "dict.iteritems() def process(self, form, post): \"\"\" Process the given WTForm Form object. Itterate", "check all the validator parameters for possible %field% replacement, then bind these parameters", "simply pass through to be checked and bound later. \"\"\" if name in", "the WTForm form. This method will take a WTForm form object and attach", "thus we can skip it. pass kwargs[key] = arg # Finally, bind arguments", "it out. continue # Since the field seems to be a valid one,", "can skip it. pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number:", "be checked and bound later. \"\"\" if name in self._dyn_fields: if 'validators' in", "the # field configuration, we are good to go. # (Cowardly refusing to", "to the internal configuration dictionary. \"\"\" if name in self._dyn_fields: raise AttributeError('Field already", "'args': args, 'kwargs': kwargs} def add_validator(self, name, validator, *args, **kwargs): \"\"\" Add the", "are good to go. # (Cowardly refusing to use regex here). field_cname =", "self._dyn_fields: # If we can find the field name directly, it means the", "to it according to a match between what is in the POST and", "we are good to go. # (Cowardly refusing to use regex here). field_cname", "# If we are currently in a set, append the set number #", "Refusing to use a possible memory hugging Python2 .items() method. So for providing", "current_set_number = str(field.split('_')[-1]) else: # The field did not match to a canonical", "The field did not match to a canonical name # from the fields", "in a set, (re)set the current set. current_set_number = None elif (field.split('_')[-1].isdigit() and", "AttributeError('Field already added to the form.') else: self._dyn_fields[name] = {'label': label, 'type': field_type,", "**kwargs): \"\"\" Add the validator to the internal configuration dictionary. :param name: The", "set # replace the %field_name% convention where we find it. validators = []", "it. validators = [] if 'validators' in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args", "together with their validators to the form. :param form: A valid WTForm Form", "keyword arguments that belong to the validator. We let them simply pass through", "when they are bound to the validator. The latter brings the power to", "We let them simply pass through to be checked and bound later. \"\"\"", "def iteritems(dict): \"\"\" Refusing to use a possible memory hugging Python2 .items() method.", "underscore characters, # the last part contains only digits and the # everything", "split on underscore characters, # the last part contains only digits and the", "added to the form.') else: self._dyn_fields[name] = {'label': label, 'type': field_type, 'args': args,", "are bound to the validator. The latter brings the power to reference set", "to the WTForm form. This method will take a WTForm form object and", "*args, **kwargs) else: raise AttributeError('Field \"{0}\" does not exist. ' 'Did you forget", "applying the build configuration to the WTForm form. This method will take a", "the DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\" Class init. :param flask_wtf: Is this", "field in self._dyn_fields: # If we can find the field name directly, it", "and give it back to the caller. if self.flask_wtf: # Flask WTF overrides", "The field machine name to apply the validator on :param validator: The WTForms", "configuration that was made. For each field that is valid, check all the", "will take a WTForm form object and attach new fields to it according", "one, let us # prepare the validator arguments and, if we are in", "set the canonical name and go on. field_cname = field # Since we", "the %field_name% convention where we find it. validators = [] if 'validators' in", "# Probably not a string, thus we can skip it. pass args.append(arg) if", "thus we can skip it. pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if", "set so just set the canonical name and go on. field_cname = field", "variables \"\"\" if not isinstance(form, FormMeta): raise TypeError('Given form is not a valid", "if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else:", "\"\"\" Process the given WTForm Form object. Itterate over the POST values and", "configuration dictionary. It has the added ability to process sets of fields that", "all the validator parameters for possible %field% replacement, then bind these parameters to", "it to the list validators.append(validator(*args, **kwargs)) # The field is setup, it is", "string, thus we can skip it. pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]:", "in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If", "DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\" Class init. :param flask_wtf: Is this form", "self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] =", "them simply pass through to be checked and bound later. \"\"\" if name", "validator to the internal configuration dictionary. :param name: The field machine name to", "validators using the designated methods \"add_field\" and \"add_validator\". Calling the \"process\" method will", "and 3 support, setting up iteritems() as either items() in 3 or iteritems()", "Class init. :param flask_wtf: Is this form a Flask WTF or a plain", "# Flask WTF overrides the form initialization # and already injects the POST", "the validator to the internal configuration dictionary. :param name: The field machine name", "optional arguments and keyword arguments that belong to the validator. We let them", "valid, check all the validator parameters for possible %field% replacement, then bind these", "in a set # replace the %field_name% convention where we find it. validators", "If we can find the field name directly, it means the field #", "[validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not seem", "iteritems() as either items() in 3 or iteritems() in 2. \"\"\" if sys.version_info[0]", "the internal configuration dictionary. \"\"\" if name in self._dyn_fields: raise AttributeError('Field already added", "sys.version_info[0] >= 3: return dict.items() else: return dict.iteritems() def process(self, form, post): \"\"\"", "all the words that are decorated with %'s within # the arguments. for", "WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields to a WTForm. Instantiating this class will", "the ability to reference other fields within the validator arguments with the convention", "WTForm Form object :param post: A MultiDict with the POST variables \"\"\" if", "Flask WTF overrides the form initialization # and already injects the POST variables.", "with the convention of '_X' where X is a number. For ease of", "Itterate over the POST values and check each field against the configuration that", "let us # prepare the validator arguments and, if we are in a", "the set number # to all the words that are decorated with %'s", "the validator arguments with the convention of surrounding it with % signs. Fields", "\"\"\" def __init__(self, flask_wtf=False): \"\"\" Class init. :param flask_wtf: Is this form a", "given WTForm Form object. Itterate over the POST values and check each field", "is in the POST and what is defined in the build configuration dictionary.", "in self._dyn_fields.keys()): # If the field can be split on underscore characters, #", "remember the # the set number we are at. current_set_number = str(field.split('_')[-1]) else:", "continue else: if field in self._dyn_fields: # If we can find the field", "= [] kwargs = {} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number:", "WTF overrides the form initialization # and already injects the POST variables. form", "3: return dict.items() else: return dict.iteritems() def process(self, form, post): \"\"\" Process the", "name to apply the validator on :param validator: The WTForms validator object The", "dynamic (set) fields to a WTForm. Instantiating this class will merely create a", "in 2. \"\"\" if sys.version_info[0] >= 3: return dict.items() else: return dict.iteritems() def", "the set number we are at. current_set_number = str(field.split('_')[-1]) else: # The field", "can add fields and validators using the designated methods \"add_field\" and \"add_validator\". Calling", "# the set number we are at. current_set_number = str(field.split('_')[-1]) else: # The", "traced back to their canonical name so that each of these fields only", "seem to be regex-able # Probably not a string, thus we can skip", "# Since the field seems to be a valid one, let us #", "at. current_set_number = str(field.split('_')[-1]) else: # The field did not match to a", "WTForms validator object The rest are optional arguments and keyword arguments that belong", "to their validator. Finally, add the field together with their validators to the", "Instantiating this class will merely create a configuration dictionary on which you can", "are currently in a set, append the set number # to all the", "bind arguments to the validator # and add it to the list validators.append(validator(*args,", "WTForm. Instantiating this class will merely create a configuration dictionary on which you", "once in the configuration. Inside the configuration there is the ability to reference", "latter brings the power to reference set fields with their canonical name without", "POST field is one of the standard form fields. continue else: if field", "arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not seem to be", "field name directly, it means the field # is not a set so", "forget to add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing to use a possible", "their set number (_X) when they are bound to the validator. The latter", "words that are decorated with %'s within # the arguments. for key, arg", "field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create an instance of the form with", "of these fields only have to be defined once in the configuration. Inside", "validator. We let them simply pass through to be checked and bound later.", "= self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create an instance of", "for validator in self._dyn_fields[field_cname]['validators']: args = [] kwargs = {} if 'args' in", "if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args'] = args", "raise TypeError('Given form is not a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form):", "take care of actually applying the build configuration to the WTForm form. This", "if not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we are currently", "in the DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\" Class init. :param flask_wtf: Is", "self._dyn_fields[name] = {'label': label, 'type': field_type, 'args': args, 'kwargs': kwargs} def add_validator(self, name,", "[] if 'validators' in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args = [] kwargs", "to a set will be automatically suffixed with their set number (_X) when", "with their validators to the form. :param form: A valid WTForm Form object", "kwargs = {} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args =", "is setup, it is time to add it to the form. field_type =", "the # the set number we are at. current_set_number = str(field.split('_')[-1]) else: #", "decorated with %'s within # the arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try:", "iteritems() in 2. \"\"\" if sys.version_info[0] >= 3: return dict.items() else: return dict.iteritems()", "[validator.__name__]: if not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we are", "= str(field.split('_')[-1]) else: # The field did not match to a canonical name", "'kwargs': kwargs} def add_validator(self, name, validator, *args, **kwargs): \"\"\" Add the validator to", "in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does", "providing both Python2 and 3 support, setting up iteritems() as either items() in", "None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the field can be", "names will be traced back to their canonical name so that each of", "arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: #", "the power to reference set fields with their canonical name without needing to", "these set names will be traced back to their canonical name so that", "fields dictionary or the name # was malformed, throw it out. continue #", "if we are in a set # replace the %field_name% convention where we", "arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument", "Form object. Itterate over the POST values and check each field against the", "The field is setup, it is time to add it to the form.", "def add_validator(self, name, validator, *args, **kwargs): \"\"\" Add the validator to the internal", "form is not a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class F(form): pass for", "of fields that are suffixed with the convention of '_X' where X is", "a set, append the set number # to all the words that are", "against the configuration that was made. For each field that is valid, check", "are not in a set, (re)set the current set. current_set_number = None elif", "malformed, throw it out. continue # Since the field seems to be a", "field machine name to apply the validator on :param validator: The WTForms validator", "was made. For each field that is valid, check all the validator parameters", "= {} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args = self._dyn_fields[field_cname]\\", "fields within the validator arguments with the convention of surrounding it with %", "go. # (Cowardly refusing to use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since", "apparently are in a set, remember the # the set number we are", "= self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we are currently in a set, append", "possible memory hugging Python2 .items() method. So for providing both Python2 and 3", "self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label,", "convention of surrounding it with % signs. Fields that belong to a set", "add fields and validators using the designated methods \"add_field\" and \"add_validator\". Calling the", "init. :param flask_wtf: Is this form a Flask WTF or a plain WTF", "seems to be a valid one, let us # prepare the validator arguments", "name so that each of these fields only have to be defined once", "the POST and what is defined in the build configuration dictionary. It has", "name in self._dyn_fields: raise AttributeError('Field already added to the form.') else: self._dyn_fields[name] =", "= kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args, **kwargs) else: raise AttributeError('Field", "self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs']", "arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument", "in F(): # Skip it if the POST field is one of the", "a set so just set the canonical name and go on. field_cname =", "not a string, thus we can skip it. pass kwargs[key] = arg #", "import re import sys from wtforms.form import FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic", "later. \"\"\" if name in self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] =", "Form object :param post: A MultiDict with the POST variables \"\"\" if not", "kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args, **kwargs) else:", "object. Itterate over the POST values and check each field against the configuration", "overrides the form initialization # and already injects the POST variables. form =", "fields to a WTForm. Instantiating this class will merely create a configuration dictionary", "2. \"\"\" if sys.version_info[0] >= 3: return dict.items() else: return dict.iteritems() def process(self,", "each field against the configuration that was made. For each field that is", "the field together with their validators to the form. :param form: A valid", "to go. # (Cowardly refusing to use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] #", "The argument does not seem to be regex-able # Probably not a string,", "belong to a set will be automatically suffixed with their set number (_X)", "configuration, these set names will be traced back to their canonical name so", "re import sys from wtforms.form import FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic (set)", "apply the validator on :param validator: The WTForms validator object The rest are", "The WTForms validator object The rest are optional arguments and keyword arguments that", "the fields dictionary or the name # was malformed, throw it out. continue", "signs. Fields that belong to a set will be automatically suffixed with their", "args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] = []", "only digits and the # everything *but* the last part is found in", "on underscore characters, # the last part contains only digits and the #", "regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we apparently are in a set,", "= args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator,", "their validators to the form. :param form: A valid WTForm Form object :param", "= arg # Finally, bind arguments to the validator # and add it", "be split on underscore characters, # the last part contains only digits and", "refusing to use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we apparently are", "everything *but* the last part is found in the # field configuration, we", "to the caller. if self.flask_wtf: # Flask WTF overrides the form initialization #", "we can skip it. pass kwargs[key] = arg # Finally, bind arguments to", "the validator on :param validator: The WTForms validator object The rest are optional", "is defined in the build configuration dictionary. It has the added ability to", "self.flask_wtf=flask_wtf def add_field(self, name, label, field_type, *args, **kwargs): \"\"\" Add the field to", "arg # Finally, bind arguments to the validator # and add it to", "= {} self.flask_wtf=flask_wtf def add_field(self, name, label, field_type, *args, **kwargs): \"\"\" Add the", "and add it to the list validators.append(validator(*args, **kwargs)) # The field is setup,", "is one of the standard form fields. continue else: if field in self._dyn_fields:", "# If the field can be split on underscore characters, # the last", "in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does", "{} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args']", "means the field # is not a set so just set the canonical", "For ease of configuration, these set names will be traced back to their", "part is found in the # field configuration, we are good to go.", "self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args = self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we", "WTForm form. This method will take a WTForm form object and attach new", "*field_args, **field_kwargs)) # Create an instance of the form with the newly #", "Inside the configuration there is the ability to reference other fields within the", "arguments and, if we are in a set # replace the %field_name% convention", "arguments and keyword arguments that belong to the validator. We let them simply", "'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: #", "find the field name directly, it means the field # is not a", "for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: #", "@staticmethod def iteritems(dict): \"\"\" Refusing to use a possible memory hugging Python2 .items()", "decorated with %'s within # the arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']):", "fields and give it back to the caller. if self.flask_wtf: # Flask WTF", "their canonical name without needing to care about the set number that will", "3 support, setting up iteritems() as either items() in 3 or iteritems() in", "canonical name so that each of these fields only have to be defined", "dictionary. :param name: The field machine name to apply the validator on :param", "the form. :param form: A valid WTForm Form object :param post: A MultiDict", "field[:-(len(field.split('_')[-1]))-1] # Since we apparently are in a set, remember the # the", "will be automatically suffixed with their set number (_X) when they are bound", "self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators, *field_args, **field_kwargs)) # Create an instance of the", "be a valid one, let us # prepare the validator arguments and, if", "good to go. # (Cowardly refusing to use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1]", "a set # replace the %field_name% convention where we find it. validators =", "= {} if args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else:", "in the configuration. Inside the configuration there is the ability to reference other", "that will be used later on when injecting them in the DOM. \"\"\"", "{} if args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs: self._dyn_fields[name][validator.__name__]['kwargs'] = kwargs else: self._dyn_fields[name]['validators']", "the arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg)", "to add it to the form. field_type = self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args", "the POST variables \"\"\" if not isinstance(form, FormMeta): raise TypeError('Given form is not", "arguments with the convention of surrounding it with % signs. Fields that belong", "key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The", "caller. if self.flask_wtf: # Flask WTF overrides the form initialization # and already", "the field name directly, it means the field # is not a set", "brings the power to reference set fields with their canonical name without needing", "= re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not seem to be regex-able", "If the field can be split on underscore characters, # the last part", "one of the standard form fields. continue else: if field in self._dyn_fields: #", "label, field_type, *args, **kwargs): \"\"\" Add the field to the internal configuration dictionary.", "possible %field% replacement, then bind these parameters to their validator. Finally, add the", "and keyword arguments that belong to the validator. We let them simply pass", "if not isinstance(form, FormMeta): raise TypeError('Given form is not a valid WTForm.') re_field_name", "with the convention of surrounding it with % signs. Fields that belong to", "the set number that will be used later on when injecting them in", "# prepare the validator arguments and, if we are in a set #", "use a possible memory hugging Python2 .items() method. So for providing both Python2", "field_type, *args, **kwargs): \"\"\" Add the field to the internal configuration dictionary. \"\"\"", "a valid one, let us # prepare the validator arguments and, if we", "It has the added ability to process sets of fields that are suffixed", "items() in 3 or iteritems() in 2. \"\"\" if sys.version_info[0] >= 3: return", "set number # to all the words that are decorated with %'s within", "the newly # created fields and give it back to the caller. if", "then bind these parameters to their validator. Finally, add the field together with", "field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field, field_type(field_label, validators=validators,", "the added ability to process sets of fields that are suffixed with the", "validator, *args, **kwargs) else: raise AttributeError('Field \"{0}\" does not exist. ' 'Did you", "to add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing to use a possible memory", "Probably not a string, thus we can skip it. pass args.append(arg) if 'kwargs'", "or the name # was malformed, throw it out. continue # Since the", "Since we apparently are in a set, remember the # the set number", "This method will take a WTForm form object and attach new fields to", "a WTForm form object and attach new fields to it according to a", "reference other fields within the validator arguments with the convention of surrounding it", "form a Flask WTF or a plain WTF instance? \"\"\" self._dyn_fields = {}", "name, validator, *args, **kwargs): \"\"\" Add the validator to the internal configuration dictionary.", "the internal configuration dictionary. :param name: The field machine name to apply the", "let them simply pass through to be checked and bound later. \"\"\" if", "with their canonical name without needing to care about the set number that", "method. So for providing both Python2 and 3 support, setting up iteritems() as", "we are not in a set, (re)set the current set. current_set_number = None", "if name in self._dyn_fields: raise AttributeError('Field already added to the form.') else: self._dyn_fields[name]", "in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args: self._dyn_fields[name][validator.__name__]['args'] = args if kwargs:", "= {'label': label, 'type': field_type, 'args': args, 'kwargs': kwargs} def add_validator(self, name, validator,", "field that is valid, check all the validator parameters for possible %field% replacement,", "kwargs} def add_validator(self, name, validator, *args, **kwargs): \"\"\" Add the validator to the", "ease of configuration, these set names will be traced back to their canonical", "will be traced back to their canonical name so that each of these", "form, post): \"\"\" Process the given WTForm Form object. Itterate over the POST", "name in self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {} if args:", "# Skip it if the POST field is one of the standard form", "**kwargs)) # The field is setup, it is time to add it to", "bound to the validator. The latter brings the power to reference set fields", "that is valid, check all the validator parameters for possible %field% replacement, then", "each of these fields only have to be defined once in the configuration.", "a string, thus we can skip it. pass args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\", "[] kwargs = {} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: args", "needing to care about the set number that will be used later on", "the POST field is one of the standard form fields. continue else: if", "up iteritems() as either items() in 3 or iteritems() in 2. \"\"\" if", "F(): # Skip it if the POST field is one of the standard", "' 'Did you forget to add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing to", "made. For each field that is valid, check all the validator parameters for", "a set, remember the # the set number we are at. current_set_number =", "convention of '_X' where X is a number. For ease of configuration, these", "and \"add_validator\". Calling the \"process\" method will take care of actually applying the", "# everything *but* the last part is found in the # field configuration,", "wtforms.form import FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields to a WTForm.", "build configuration dictionary. It has the added ability to process sets of fields", "set, remember the # the set number we are at. current_set_number = str(field.split('_')[-1])", "'validators' in self._dyn_fields[field_cname]: for validator in self._dyn_fields[field_cname]['validators']: args = [] kwargs = {}", "using the designated methods \"add_field\" and \"add_validator\". Calling the \"process\" method will take", "*args, **kwargs): \"\"\" Add the validator to the internal configuration dictionary. :param name:", "add_validator(self, name, validator, *args, **kwargs): \"\"\" Add the validator to the internal configuration", "validators=validators, *field_args, **field_kwargs)) # Create an instance of the form with the newly", "[validator.__name__]['args'] else: # If we are currently in a set, append the set", "is a number. For ease of configuration, these set names will be traced", "add the field together with their validators to the form. :param form: A", "\"{0}\" does not exist. ' 'Did you forget to add it?'.format(name)) @staticmethod def", "import FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields to a WTForm. Instantiating", "*args, **kwargs): \"\"\" Add the field to the internal configuration dictionary. \"\"\" if", "build configuration to the WTForm form. This method will take a WTForm form", "name without needing to care about the set number that will be used", "the field can be split on underscore characters, # the last part contains", "without needing to care about the set number that will be used later", "plain WTF instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self, name, label, field_type,", "we are currently in a set, append the set number # to all", "parameters for possible %field% replacement, then bind these parameters to their validator. Finally,", "in a set, append the set number # to all the words that", "digits and the # everything *but* the last part is found in the", "of actually applying the build configuration to the WTForm form. This method will", "field is one of the standard form fields. continue else: if field in", "re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not seem to be regex-able #", "it back to the caller. if self.flask_wtf: # Flask WTF overrides the form", "Fields that belong to a set will be automatically suffixed with their set", "a set, (re)set the current set. current_set_number = None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1]", "when injecting them in the DOM. \"\"\" def __init__(self, flask_wtf=False): \"\"\" Class init.", "field to the internal configuration dictionary. \"\"\" if name in self._dyn_fields: raise AttributeError('Field", "# Since we are not in a set, (re)set the current set. current_set_number", "set names will be traced back to their canonical name so that each", "ability to process sets of fields that are suffixed with the convention of", "set number that will be used later on when injecting them in the", "the field to the internal configuration dictionary. \"\"\" if name in self._dyn_fields: raise", "{'label': label, 'type': field_type, 'args': args, 'kwargs': kwargs} def add_validator(self, name, validator, *args,", "use regex here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we apparently are in a", "field # is not a set so just set the canonical name and", "that belong to a set will be automatically suffixed with their set number", "else: self._dyn_fields[name] = {'label': label, 'type': field_type, 'args': args, 'kwargs': kwargs} def add_validator(self,", "we can find the field name directly, it means the field # is", "is not a set so just set the canonical name and go on.", "over the POST values and check each field against the configuration that was", "import sys from wtforms.form import FormMeta class WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields", "F(form): pass for field, data in post.iteritems(): if field in F(): # Skip", "current set. current_set_number = None elif (field.split('_')[-1].isdigit() and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If", "{} self.flask_wtf=flask_wtf def add_field(self, name, label, field_type, *args, **kwargs): \"\"\" Add the field", "can find the field name directly, it means the field # is not", "out. continue # Since the field seems to be a valid one, let", "# The argument does not seem to be regex-able # Probably not a", "initialization # and already injects the POST variables. form = F() else: form", "form initialization # and already injects the POST variables. form = F() else:", "field_cname = field # Since we are not in a set, (re)set the", "args.append(arg) if 'kwargs' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not current_set_number: kwargs = self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']", "to use a possible memory hugging Python2 .items() method. So for providing both", "label, 'type': field_type, 'args': args, 'kwargs': kwargs} def add_validator(self, name, validator, *args, **kwargs):", "args = [] kwargs = {} if 'args' in self._dyn_fields[field_cname]\\ [validator.__name__]: if not", "designated methods \"add_field\" and \"add_validator\". Calling the \"process\" method will take care of", "surrounding it with % signs. Fields that belong to a set will be", "form. This method will take a WTForm form object and attach new fields", "the designated methods \"add_field\" and \"add_validator\". Calling the \"process\" method will take care", "exist. ' 'Did you forget to add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing", "their validator. Finally, add the field together with their validators to the form.", "else: if field in self._dyn_fields: # If we can find the field name", "arguments that belong to the validator. We let them simply pass through to", "in self._dyn_fields: raise AttributeError('Field already added to the form.') else: self._dyn_fields[name] = {'label':", "it with % signs. Fields that belong to a set will be automatically", "validator in self._dyn_fields[field_cname]['validators']: args = [] kwargs = {} if 'args' in self._dyn_fields[field_cname]\\", "FormMeta): raise TypeError('Given form is not a valid WTForm.') re_field_name = re.compile(r'\\%([a-zA-Z0-9_]*)\\%') class", "be automatically suffixed with their set number (_X) when they are bound to", "can be split on underscore characters, # the last part contains only digits", "for possible %field% replacement, then bind these parameters to their validator. Finally, add", "# was malformed, throw it out. continue # Since the field seems to", "self._dyn_fields[field_cname]\\ [validator.__name__]['args'] else: # If we are currently in a set, append the", "skip it. pass kwargs[key] = arg # Finally, bind arguments to the validator", "form.') else: self._dyn_fields[name] = {'label': label, 'type': field_type, 'args': args, 'kwargs': kwargs} def", "just set the canonical name and go on. field_cname = field # Since", "to be defined once in the configuration. Inside the configuration there is the", "injects the POST variables. form = F() else: form = F(post) return form", "on. field_cname = field # Since we are not in a set, (re)set", "and field[:-(len(field.split('_')[-1]))-1] in self._dyn_fields.keys()): # If the field can be split on underscore", "number we are at. current_set_number = str(field.split('_')[-1]) else: # The field did not", "validators.append(validator(*args, **kwargs)) # The field is setup, it is time to add it", "and what is defined in the build configuration dictionary. It has the added", "\"add_validator\". Calling the \"process\" method will take care of actually applying the build", "fields and validators using the designated methods \"add_field\" and \"add_validator\". Calling the \"process\"", "the given WTForm Form object. Itterate over the POST values and check each", "and already injects the POST variables. form = F() else: form = F(post)", "defined in the build configuration dictionary. It has the added ability to process", "the \"process\" method will take care of actually applying the build configuration to", "within # the arguments. for arg in self._dyn_fields[field_cname]\\ [validator.__name__]['args']: try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number,", "if field in F(): # Skip it if the POST field is one", "self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self, name, label, field_type, *args, **kwargs): \"\"\" Add", "automatically suffixed with their set number (_X) when they are bound to the", "POST variables \"\"\" if not isinstance(form, FormMeta): raise TypeError('Given form is not a", "to the validator. We let them simply pass through to be checked and", "defined once in the configuration. Inside the configuration there is the ability to", "that belong to the validator. We let them simply pass through to be", "contains only digits and the # everything *but* the last part is found", "did not match to a canonical name # from the fields dictionary or", "care of actually applying the build configuration to the WTForm form. This method", "to be checked and bound later. \"\"\" if name in self._dyn_fields: if 'validators'", "in a set, remember the # the set number we are at. current_set_number", "if field in self._dyn_fields: # If we can find the field name directly,", "class WTFormsDynamicFields(): \"\"\" Add dynamic (set) fields to a WTForm. Instantiating this class", "match between what is in the POST and what is defined in the", "method will take care of actually applying the build configuration to the WTForm", "# field configuration, we are good to go. # (Cowardly refusing to use", "fields only have to be defined once in the configuration. Inside the configuration", "the name # was malformed, throw it out. continue # Since the field", "set, append the set number # to all the words that are decorated", "= self._dyn_fields[field_cname]['type'] field_label = self._dyn_fields[field_cname]['label'] field_args = self._dyn_fields[field_cname]['args'] field_kwargs = self._dyn_fields[field_cname]['kwargs'] setattr(F, field,", "self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs'] else: # If we are currently in a set, append the", "the build configuration to the WTForm form. This method will take a WTForm", "form object and attach new fields to it according to a match between", "The rest are optional arguments and keyword arguments that belong to the validator.", "rest are optional arguments and keyword arguments that belong to the validator. We", "an instance of the form with the newly # created fields and give", "added ability to process sets of fields that are suffixed with the convention", "set number (_X) when they are bound to the validator. The latter brings", "not in a set, (re)set the current set. current_set_number = None elif (field.split('_')[-1].isdigit()", "name # from the fields dictionary or the name # was malformed, throw", "# replace the %field_name% convention where we find it. validators = [] if", "instance? \"\"\" self._dyn_fields = {} self.flask_wtf=flask_wtf def add_field(self, name, label, field_type, *args, **kwargs):", "either items() in 3 or iteritems() in 2. \"\"\" if sys.version_info[0] >= 3:", "dictionary on which you can add fields and validators using the designated methods", "here). field_cname = field[:-(len(field.split('_')[-1]))-1] # Since we apparently are in a set, remember", "number (_X) when they are bound to the validator. The latter brings the", "with the newly # created fields and give it back to the caller.", "\"\"\" if name in self._dyn_fields: if 'validators' in self._dyn_fields[name]: self._dyn_fields[name]['validators'].append(validator) self._dyn_fields[name][validator.__name__] = {}", "else: self._dyn_fields[name]['validators'] = [] self.add_validator(name, validator, *args, **kwargs) else: raise AttributeError('Field \"{0}\" does", "dictionary or the name # was malformed, throw it out. continue # Since", "where we find it. validators = [] if 'validators' in self._dyn_fields[field_cname]: for validator", "the form initialization # and already injects the POST variables. form = F()", "you forget to add it?'.format(name)) @staticmethod def iteritems(dict): \"\"\" Refusing to use a", "to a WTForm. Instantiating this class will merely create a configuration dictionary on", "\"\"\" if name in self._dyn_fields: raise AttributeError('Field already added to the form.') else:", "as either items() in 3 or iteritems() in 2. \"\"\" if sys.version_info[0] >=", "within # the arguments. for key, arg in self.iteritems(self._dyn_fields[field_cname]\\ [validator.__name__]['kwargs']): try: arg =", "in the build configuration dictionary. It has the added ability to process sets", "[validator.__name__]['kwargs']): try: arg = re_field_name.sub(r'\\1'+'_'+current_set_number, arg) except: # The argument does not seem", "newly # created fields and give it back to the caller. if self.flask_wtf:", "of configuration, these set names will be traced back to their canonical name", "pass kwargs[key] = arg # Finally, bind arguments to the validator # and", "created fields and give it back to the caller. if self.flask_wtf: # Flask" ]
[ "os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import * # NOQA from coconut.constants import (", "as index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import", "with_toc, ) from coconut.util import univ_open import pydata_sphinx_theme # NOQA import myst_parser #", "with univ_open(\"README.rst\", \"r\") as readme_file: readme = readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file:", "import univ_open import pydata_sphinx_theme # NOQA import myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- #", "pydata_sphinx_theme # NOQA import myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: # -----------------------------------------------------------------------------------------------------------------------", "html_theme_options = { } master_doc = \"index\" exclude_patterns = [\"README.*\"] source_suffix = [\".rst\",", "highlight_language, ) version = VERSION release = version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options =", "absolute_import, unicode_literals, division import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import *", "# ----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file: readme =", "[\".rst\", \".md\"] default_role = \"code\" extensions = [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ]", "extensions = [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors = 4 html_sidebars =", "NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file: readme", "#!/usr/bin/env python # -*- coding: utf-8 -*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: # -----------------------------------------------------------------------------------------------------------------------", "-*- coding: utf-8 -*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME>", "unicode_literals, division import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import * #", "coconut.constants import ( version_str_tag, without_toc, with_toc, ) from coconut.util import univ_open import pydata_sphinx_theme", "= readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS:", "project, copyright, author, highlight_language, ) version = VERSION release = version_str_tag html_theme =", "version = VERSION release = version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options = { }", "= \"code\" extensions = [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors = 4", "from __future__ import print_function, absolute_import, unicode_literals, division import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))", "VERSION release = version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options = { } master_doc =", "<reponame>evhub/coconut<gh_stars>1000+ #!/usr/bin/env python # -*- coding: utf-8 -*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: #", "# INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache 2.0 Description: Sphinx configuration", "} master_doc = \"index\" exclude_patterns = [\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role =", "{ } master_doc = \"index\" exclude_patterns = [\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role", "# ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import, unicode_literals, division", "# IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import, unicode_literals, division import sys", "= [\".rst\", \".md\"] default_role = \"code\" extensions = [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\",", "myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors = 4 html_sidebars = { \"**\": [", "NOQA from coconut.constants import ( version_str_tag, without_toc, with_toc, ) from coconut.util import univ_open", "# ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( # NOQA project,", "NOQA project, copyright, author, highlight_language, ) version = VERSION release = version_str_tag html_theme", "\"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants", "= [ \"smartquotes\", ] myst_heading_anchors = 4 html_sidebars = { \"**\": [ \"localtoc.html\",", "Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import,", "configuration file for the Coconut Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: #", "for the Coconut Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from", "coding: utf-8 -*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License:", "copyright, author, highlight_language, ) version = VERSION release = version_str_tag html_theme = \"pydata_sphinx_theme\"", "# ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache 2.0 Description: Sphinx configuration file for", "= { } master_doc = \"index\" exclude_patterns = [\"README.*\"] source_suffix = [\".rst\", \".md\"]", "# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author:", "DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( # NOQA project, copyright, author, highlight_language,", "univ_open import pydata_sphinx_theme # NOQA import myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- # README:", "----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import, unicode_literals, division import sys import os.path sys.path.insert(0,", "\".md\"] default_role = \"code\" extensions = [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors", "= \"index\" exclude_patterns = [\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role = \"code\" extensions", "= version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options = { } master_doc = \"index\" exclude_patterns", "from coconut.root import * # NOQA from coconut.constants import ( version_str_tag, without_toc, with_toc,", "index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( #", "author, highlight_language, ) version = VERSION release = version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options", "# NOQA import myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with", "\"\"\" Author: <NAME> License: Apache 2.0 Description: Sphinx configuration file for the Coconut", ") version = VERSION release = version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options = {", "# DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( # NOQA project, copyright, author,", "NOQA import myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\",", "----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache 2.0 Description: Sphinx configuration file for the", "Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function,", "as readme_file: readme = readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) #", "[\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role = \"code\" extensions = [\"myst_parser\"] myst_enable_extensions =", "import pydata_sphinx_theme # NOQA import myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: #", "\"smartquotes\", ] myst_heading_anchors = 4 html_sidebars = { \"**\": [ \"localtoc.html\", ], }", "----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( # NOQA project, copyright, author, highlight_language, ) version", "----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file: readme = readme_file.read()", "# NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file:", "python # -*- coding: utf-8 -*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\"", "html_theme = \"pydata_sphinx_theme\" html_theme_options = { } master_doc = \"index\" exclude_patterns = [\"README.*\"]", "index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import (", "= [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors = 4 html_sidebars = {", "__future__ import print_function, absolute_import, unicode_literals, division import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from", "Author: <NAME> License: Apache 2.0 Description: Sphinx configuration file for the Coconut Programming", "Sphinx configuration file for the Coconut Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS:", "import print_function, absolute_import, unicode_literals, division import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root", "* # NOQA from coconut.constants import ( version_str_tag, without_toc, with_toc, ) from coconut.util", "( version_str_tag, without_toc, with_toc, ) from coconut.util import univ_open import pydata_sphinx_theme # NOQA", "# NOQA project, copyright, author, highlight_language, ) version = VERSION release = version_str_tag", "import ( version_str_tag, without_toc, with_toc, ) from coconut.util import univ_open import pydata_sphinx_theme #", "utf-8 -*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache", "source_suffix = [\".rst\", \".md\"] default_role = \"code\" extensions = [\"myst_parser\"] myst_enable_extensions = [", "sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import * # NOQA from coconut.constants import ( version_str_tag,", "import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import * # NOQA from", "# README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file: readme = readme_file.read() with", "# ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( # NOQA project, copyright, author, highlight_language, )", "exclude_patterns = [\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role = \"code\" extensions = [\"myst_parser\"]", "# ----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache 2.0 Description:", "coconut.root import * # NOQA from coconut.constants import ( version_str_tag, without_toc, with_toc, )", "file for the Coconut Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # -----------------------------------------------------------------------------------------------------------------------", "= [\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role = \"code\" extensions = [\"myst_parser\"] myst_enable_extensions", "= VERSION release = version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options = { } master_doc", "\"pydata_sphinx_theme\" html_theme_options = { } master_doc = \"index\" exclude_patterns = [\"README.*\"] source_suffix =", "without_toc, with_toc, ) from coconut.util import univ_open import pydata_sphinx_theme # NOQA import myst_parser", "-*- # ----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache 2.0", "default_role = \"code\" extensions = [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors =", "univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from", "Apache 2.0 Description: Sphinx configuration file for the Coconut Programming Language. \"\"\" #", "sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import * # NOQA from coconut.constants", "import * # NOQA from coconut.constants import ( version_str_tag, without_toc, with_toc, ) from", "import ( # NOQA project, copyright, author, highlight_language, ) version = VERSION release", "= \"pydata_sphinx_theme\" html_theme_options = { } master_doc = \"index\" exclude_patterns = [\"README.*\"] source_suffix", "from coconut.constants import ( version_str_tag, without_toc, with_toc, ) from coconut.util import univ_open import", "division import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import * # NOQA", "Description: Sphinx configuration file for the Coconut Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- #", "coconut.util import univ_open import pydata_sphinx_theme # NOQA import myst_parser # NOQA # -----------------------------------------------------------------------------------------------------------------------", "coconut.constants import ( # NOQA project, copyright, author, highlight_language, ) version = VERSION", "# NOQA from coconut.constants import ( version_str_tag, without_toc, with_toc, ) from coconut.util import", "2.0 Description: Sphinx configuration file for the Coconut Programming Language. \"\"\" # -----------------------------------------------------------------------------------------------------------------------", "License: Apache 2.0 Description: Sphinx configuration file for the Coconut Programming Language. \"\"\"", "import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import * # NOQA from coconut.constants import", "the Coconut Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__", "release = version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options = { } master_doc = \"index\"", "README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file: readme = readme_file.read() with univ_open(\"index.rst\",", "[ \"smartquotes\", ] myst_heading_anchors = 4 html_sidebars = { \"**\": [ \"localtoc.html\", ],", "with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # -----------------------------------------------------------------------------------------------------------------------", "from coconut.constants import ( # NOQA project, copyright, author, highlight_language, ) version =", ") from coconut.util import univ_open import pydata_sphinx_theme # NOQA import myst_parser # NOQA", "import myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\")", "with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( # NOQA", "----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: # ----------------------------------------------------------------------------------------------------------------------- from coconut.constants import ( # NOQA project, copyright,", "# ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file: readme = readme_file.read() with univ_open(\"index.rst\", \"w\")", "Coconut Programming Language. \"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import", "readme = readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- #", "readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) # ----------------------------------------------------------------------------------------------------------------------- # DEFINITIONS: #", "[\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors = 4 html_sidebars = { \"**\":", "print_function, absolute_import, unicode_literals, division import sys import os.path sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from coconut.root import", "\"index\" exclude_patterns = [\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role = \"code\" extensions =", "myst_parser # NOQA # ----------------------------------------------------------------------------------------------------------------------- # README: # ----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as", "----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import, unicode_literals, division import", "readme_file: readme = readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc)) # -----------------------------------------------------------------------------------------------------------------------", "INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache 2.0 Description: Sphinx configuration file", "\"code\" extensions = [\"myst_parser\"] myst_enable_extensions = [ \"smartquotes\", ] myst_heading_anchors = 4 html_sidebars", "univ_open(\"README.rst\", \"r\") as readme_file: readme = readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc,", "version_str_tag html_theme = \"pydata_sphinx_theme\" html_theme_options = { } master_doc = \"index\" exclude_patterns =", "version_str_tag, without_toc, with_toc, ) from coconut.util import univ_open import pydata_sphinx_theme # NOQA import", "\"\"\" # ----------------------------------------------------------------------------------------------------------------------- # IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import, unicode_literals,", "os.path.dirname(os.path.abspath(__file__))) from coconut.root import * # NOQA from coconut.constants import ( version_str_tag, without_toc,", "<NAME> License: Apache 2.0 Description: Sphinx configuration file for the Coconut Programming Language.", "# ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import, unicode_literals, division import sys import os.path", "from coconut.util import univ_open import pydata_sphinx_theme # NOQA import myst_parser # NOQA #", "----------------------------------------------------------------------------------------------------------------------- # INFO: # ----------------------------------------------------------------------------------------------------------------------- \"\"\" Author: <NAME> License: Apache 2.0 Description: Sphinx", "( # NOQA project, copyright, author, highlight_language, ) version = VERSION release =", "master_doc = \"index\" exclude_patterns = [\"README.*\"] source_suffix = [\".rst\", \".md\"] default_role = \"code\"", "\"r\") as readme_file: readme = readme_file.read() with univ_open(\"index.rst\", \"w\") as index_file: index_file.write(readme.replace(without_toc, with_toc))", "IMPORTS: # ----------------------------------------------------------------------------------------------------------------------- from __future__ import print_function, absolute_import, unicode_literals, division import sys import", "----------------------------------------------------------------------------------------------------------------------- with univ_open(\"README.rst\", \"r\") as readme_file: readme = readme_file.read() with univ_open(\"index.rst\", \"w\") as" ]
[ "= 500000.0 tasa = 0.05 pago_mensual = 2684.11 total_pagado = 0.0 mes =", "pago_extra_mes_fin = 108 pago_extra = 1000 mes_adelantado = 0 while saldo > 0:", "+ 1 else: saldo = saldo - pago_mensual total_pagado = total_pagado + pago_mensual", "total_pagado = 0.0 mes = 0 # 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin =", "- pago_mensual total_pagado = total_pagado + pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2)) #", "saldo - pago_mensual - pago_extra total_pagado = total_pagado + pago_mensual + pago_extra mes_adelantado", "# 1.7 saldo = 500000.0 tasa = 0.05 pago_mensual = 2684.11 total_pagado =", "# 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108 pago_extra = 1000 mes_adelantado =", "saldo = saldo - pago_mensual total_pagado = total_pagado + pago_mensual print(mes, round(total_pagado, 2),", "if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo = saldo - pago_mensual - pago_extra", "108 pago_extra = 1000 mes_adelantado = 0 while saldo > 0: saldo =", "pago_mensual = 2684.11 total_pagado = 0.0 mes = 0 # 1.9 pago_extra_mes_comienzo =", "round(saldo, 2)) # 1.10 print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses: ', mes) print", "- pago_extra total_pagado = total_pagado + pago_mensual + pago_extra mes_adelantado = mes_adelantado +", "saldo > 0: saldo = saldo * (1+tasa/12) mes = mes + 1", "1000 mes_adelantado = 0 while saldo > 0: saldo = saldo * (1+tasa/12)", "tasa = 0.05 pago_mensual = 2684.11 total_pagado = 0.0 mes = 0 #", "hipoteca de David # 1.9: Calculadora de adelantos # 1.10: Tablas # 1.7", "0.05 pago_mensual = 2684.11 total_pagado = 0.0 mes = 0 # 1.9 pago_extra_mes_comienzo", "total_pagado = total_pagado + pago_mensual + pago_extra mes_adelantado = mes_adelantado + 1 else:", "saldo - pago_mensual total_pagado = total_pagado + pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2))", "= 1000 mes_adelantado = 0 while saldo > 0: saldo = saldo *", "= mes_adelantado + 1 else: saldo = saldo - pago_mensual total_pagado = total_pagado", "pago_mensual total_pagado = total_pagado + pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2)) # 1.10", "mes <= pago_extra_mes_fin): saldo = saldo - pago_mensual - pago_extra total_pagado = total_pagado", "de David # 1.9: Calculadora de adelantos # 1.10: Tablas # 1.7 saldo", "> 0: saldo = saldo * (1+tasa/12) mes = mes + 1 if", "print(mes, round(total_pagado, 2), round(saldo, 2)) # 1.10 print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses:", "1 else: saldo = saldo - pago_mensual total_pagado = total_pagado + pago_mensual print(mes,", "= 0.0 mes = 0 # 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108", "saldo = saldo * (1+tasa/12) mes = mes + 1 if (pago_extra_mes_comienzo <=", "2684.11 total_pagado = 0.0 mes = 0 # 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin", "mes = mes + 1 if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo =", "= 108 pago_extra = 1000 mes_adelantado = 0 while saldo > 0: saldo", "Tablas # 1.7 saldo = 500000.0 tasa = 0.05 pago_mensual = 2684.11 total_pagado", "0.0 mes = 0 # 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108 pago_extra", "pago_extra total_pagado = total_pagado + pago_mensual + pago_extra mes_adelantado = mes_adelantado + 1", "mes_adelantado = 0 while saldo > 0: saldo = saldo * (1+tasa/12) mes", "+ 1 if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo = saldo - pago_mensual", "de adelantos # 1.10: Tablas # 1.7 saldo = 500000.0 tasa = 0.05", "1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108 pago_extra = 1000 mes_adelantado = 0", "mes + 1 if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo = saldo -", "saldo = 500000.0 tasa = 0.05 pago_mensual = 2684.11 total_pagado = 0.0 mes", "2), round(saldo, 2)) # 1.10 print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses: ', mes)", "pago_mensual + pago_extra mes_adelantado = mes_adelantado + 1 else: saldo = saldo -", "- pago_mensual - pago_extra total_pagado = total_pagado + pago_mensual + pago_extra mes_adelantado =", "David # 1.9: Calculadora de adelantos # 1.10: Tablas # 1.7 saldo =", "= 0 while saldo > 0: saldo = saldo * (1+tasa/12) mes =", "= total_pagado + pago_mensual + pago_extra mes_adelantado = mes_adelantado + 1 else: saldo", "pago_mensual - pago_extra total_pagado = total_pagado + pago_mensual + pago_extra mes_adelantado = mes_adelantado", "adelantos # 1.10: Tablas # 1.7 saldo = 500000.0 tasa = 0.05 pago_mensual", "2)) # 1.10 print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses: ', mes) print ('Meses", "= total_pagado + pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2)) # 1.10 print('Total pagado:", "= 0 # 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108 pago_extra = 1000", "1.9: Calculadora de adelantos # 1.10: Tablas # 1.7 saldo = 500000.0 tasa", "else: saldo = saldo - pago_mensual total_pagado = total_pagado + pago_mensual print(mes, round(total_pagado,", "total_pagado = total_pagado + pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2)) # 1.10 print('Total", "+ pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2)) # 1.10 print('Total pagado: ', round(total_pagado,", "pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2)) # 1.10 print('Total pagado: ', round(total_pagado, ndigits=2))", "Calculadora de adelantos # 1.10: Tablas # 1.7 saldo = 500000.0 tasa =", "= saldo - pago_mensual total_pagado = total_pagado + pago_mensual print(mes, round(total_pagado, 2), round(saldo,", "= 61 pago_extra_mes_fin = 108 pago_extra = 1000 mes_adelantado = 0 while saldo", "1.7: La hipoteca de David # 1.9: Calculadora de adelantos # 1.10: Tablas", "+ pago_extra mes_adelantado = mes_adelantado + 1 else: saldo = saldo - pago_mensual", "mes_adelantado = mes_adelantado + 1 else: saldo = saldo - pago_mensual total_pagado =", "mes_adelantado + 1 else: saldo = saldo - pago_mensual total_pagado = total_pagado +", "pago_extra_mes_fin): saldo = saldo - pago_mensual - pago_extra total_pagado = total_pagado + pago_mensual", "1.7 saldo = 500000.0 tasa = 0.05 pago_mensual = 2684.11 total_pagado = 0.0", "61 pago_extra_mes_fin = 108 pago_extra = 1000 mes_adelantado = 0 while saldo >", "# 1.9: Calculadora de adelantos # 1.10: Tablas # 1.7 saldo = 500000.0", "round(total_pagado, 2), round(saldo, 2)) # 1.10 print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses: ',", "1.10 print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses: ', mes) print ('Meses adelantados: ',", "total_pagado + pago_mensual print(mes, round(total_pagado, 2), round(saldo, 2)) # 1.10 print('Total pagado: ',", "(pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo = saldo - pago_mensual - pago_extra total_pagado", "* (1+tasa/12) mes = mes + 1 if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin):", "+ pago_mensual + pago_extra mes_adelantado = mes_adelantado + 1 else: saldo = saldo", "pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108 pago_extra = 1000 mes_adelantado = 0 while", "(1+tasa/12) mes = mes + 1 if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo", "<= mes <= pago_extra_mes_fin): saldo = saldo - pago_mensual - pago_extra total_pagado =", "0: saldo = saldo * (1+tasa/12) mes = mes + 1 if (pago_extra_mes_comienzo", "La hipoteca de David # 1.9: Calculadora de adelantos # 1.10: Tablas #", "while saldo > 0: saldo = saldo * (1+tasa/12) mes = mes +", "= 0.05 pago_mensual = 2684.11 total_pagado = 0.0 mes = 0 # 1.9", "<= pago_extra_mes_fin): saldo = saldo - pago_mensual - pago_extra total_pagado = total_pagado +", "= 2684.11 total_pagado = 0.0 mes = 0 # 1.9 pago_extra_mes_comienzo = 61", "= saldo * (1+tasa/12) mes = mes + 1 if (pago_extra_mes_comienzo <= mes", "mes = 0 # 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108 pago_extra =", "1 if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo = saldo - pago_mensual -", "total_pagado + pago_mensual + pago_extra mes_adelantado = mes_adelantado + 1 else: saldo =", "# 1.7: La hipoteca de David # 1.9: Calculadora de adelantos # 1.10:", "0 # 1.9 pago_extra_mes_comienzo = 61 pago_extra_mes_fin = 108 pago_extra = 1000 mes_adelantado", "= mes + 1 if (pago_extra_mes_comienzo <= mes <= pago_extra_mes_fin): saldo = saldo", "pago_extra = 1000 mes_adelantado = 0 while saldo > 0: saldo = saldo", "saldo = saldo - pago_mensual - pago_extra total_pagado = total_pagado + pago_mensual +", "# 1.10 print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses: ', mes) print ('Meses adelantados:", "saldo * (1+tasa/12) mes = mes + 1 if (pago_extra_mes_comienzo <= mes <=", "0 while saldo > 0: saldo = saldo * (1+tasa/12) mes = mes", "# 1.10: Tablas # 1.7 saldo = 500000.0 tasa = 0.05 pago_mensual =", "500000.0 tasa = 0.05 pago_mensual = 2684.11 total_pagado = 0.0 mes = 0", "pago_extra mes_adelantado = mes_adelantado + 1 else: saldo = saldo - pago_mensual total_pagado", "print('Total pagado: ', round(total_pagado, ndigits=2)) print('Meses: ', mes) print ('Meses adelantados: ', mes_adelantado)", "1.10: Tablas # 1.7 saldo = 500000.0 tasa = 0.05 pago_mensual = 2684.11", "= saldo - pago_mensual - pago_extra total_pagado = total_pagado + pago_mensual + pago_extra" ]
[ "-> List[int]: return sorted([el * el for el in A]) if __name__ ==", "return sorted([el * el for el in A]) if __name__ == \"__main__\": assert", "squares of each number, also in sorted non-decreasing order. Example 1: Input: [-4,-1,0,3,10]", "from typing import List class Solution: def sortedSquares(self, A: List[int]) -> List[int]: return", "1 <= A.length <= 10000 -10000 <= A[i] <= 10000 A is sorted", "* el for el in A]) if __name__ == \"__main__\": assert Solution().sortedSquares([-4,-1,0,3,10]) ==", "non-decreasing order, return an array of the squares of each number, also in", "an array of the squares of each number, also in sorted non-decreasing order.", "[0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <= A.length <= 10000", "Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <= A.length <= 10000 -10000", "Given an array of integers A sorted in non-decreasing order, return an array", "<= 10000 -10000 <= A[i] <= 10000 A is sorted in non-decreasing order.", "List[int]) -> List[int]: return sorted([el * el for el in A]) if __name__", "sortedSquares(self, A: List[int]) -> List[int]: return sorted([el * el for el in A])", "A: List[int]) -> List[int]: return sorted([el * el for el in A]) if", "sorted non-decreasing order. Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11]", "sorted in non-decreasing order. \"\"\" from typing import List class Solution: def sortedSquares(self,", "1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1", "array of the squares of each number, also in sorted non-decreasing order. Example", "typing import List class Solution: def sortedSquares(self, A: List[int]) -> List[int]: return sorted([el", "non-decreasing order. \"\"\" from typing import List class Solution: def sortedSquares(self, A: List[int])", "Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <= A.length <= 10000 -10000 <= A[i]", "<= 10000 A is sorted in non-decreasing order. \"\"\" from typing import List", "in non-decreasing order. \"\"\" from typing import List class Solution: def sortedSquares(self, A:", "2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <= A.length <= 10000 -10000 <=", "List class Solution: def sortedSquares(self, A: List[int]) -> List[int]: return sorted([el * el", "order, return an array of the squares of each number, also in sorted", "order. Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121]", "in A]) if __name__ == \"__main__\": assert Solution().sortedSquares([-4,-1,0,3,10]) == [0,1,9,16,100] assert Solution().sortedSquares([-7,-3,2,3,11]) ==", "sorted in non-decreasing order, return an array of the squares of each number,", "integers A sorted in non-decreasing order, return an array of the squares of", "Note: 1 <= A.length <= 10000 -10000 <= A[i] <= 10000 A is", "<= A.length <= 10000 -10000 <= A[i] <= 10000 A is sorted in", "non-decreasing order. Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output:", "\"\"\" Given an array of integers A sorted in non-decreasing order, return an", "is sorted in non-decreasing order. \"\"\" from typing import List class Solution: def", "number, also in sorted non-decreasing order. Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example", "\"\"\" from typing import List class Solution: def sortedSquares(self, A: List[int]) -> List[int]:", "<= A[i] <= 10000 A is sorted in non-decreasing order. \"\"\" from typing", "def sortedSquares(self, A: List[int]) -> List[int]: return sorted([el * el for el in", "return an array of the squares of each number, also in sorted non-decreasing", "10000 -10000 <= A[i] <= 10000 A is sorted in non-decreasing order. \"\"\"", "A]) if __name__ == \"__main__\": assert Solution().sortedSquares([-4,-1,0,3,10]) == [0,1,9,16,100] assert Solution().sortedSquares([-7,-3,2,3,11]) == [4,9,9,49,121]", "of integers A sorted in non-decreasing order, return an array of the squares", "for el in A]) if __name__ == \"__main__\": assert Solution().sortedSquares([-4,-1,0,3,10]) == [0,1,9,16,100] assert", "Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note:", "sorted([el * el for el in A]) if __name__ == \"__main__\": assert Solution().sortedSquares([-4,-1,0,3,10])", "A sorted in non-decreasing order, return an array of the squares of each", "10000 A is sorted in non-decreasing order. \"\"\" from typing import List class", "of the squares of each number, also in sorted non-decreasing order. Example 1:", "Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <= A.length <=", "in non-decreasing order, return an array of the squares of each number, also", "also in sorted non-decreasing order. Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2:", "Output: [4,9,9,49,121] Note: 1 <= A.length <= 10000 -10000 <= A[i] <= 10000", "[4,9,9,49,121] Note: 1 <= A.length <= 10000 -10000 <= A[i] <= 10000 A", "el in A]) if __name__ == \"__main__\": assert Solution().sortedSquares([-4,-1,0,3,10]) == [0,1,9,16,100] assert Solution().sortedSquares([-7,-3,2,3,11])", "in sorted non-decreasing order. Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input:", "order. \"\"\" from typing import List class Solution: def sortedSquares(self, A: List[int]) ->", "each number, also in sorted non-decreasing order. Example 1: Input: [-4,-1,0,3,10] Output: [0,1,9,16,100]", "the squares of each number, also in sorted non-decreasing order. Example 1: Input:", "an array of integers A sorted in non-decreasing order, return an array of", "el for el in A]) if __name__ == \"__main__\": assert Solution().sortedSquares([-4,-1,0,3,10]) == [0,1,9,16,100]", "-10000 <= A[i] <= 10000 A is sorted in non-decreasing order. \"\"\" from", "A[i] <= 10000 A is sorted in non-decreasing order. \"\"\" from typing import", "import List class Solution: def sortedSquares(self, A: List[int]) -> List[int]: return sorted([el *", "class Solution: def sortedSquares(self, A: List[int]) -> List[int]: return sorted([el * el for", "List[int]: return sorted([el * el for el in A]) if __name__ == \"__main__\":", "[-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <= A.length", "A.length <= 10000 -10000 <= A[i] <= 10000 A is sorted in non-decreasing", "Input: [-4,-1,0,3,10] Output: [0,1,9,16,100] Example 2: Input: [-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <=", "array of integers A sorted in non-decreasing order, return an array of the", "<filename>Python/easy/0977_squares_of_a_sorted_array.py \"\"\" Given an array of integers A sorted in non-decreasing order, return", "Solution: def sortedSquares(self, A: List[int]) -> List[int]: return sorted([el * el for el", "of each number, also in sorted non-decreasing order. Example 1: Input: [-4,-1,0,3,10] Output:", "A is sorted in non-decreasing order. \"\"\" from typing import List class Solution:", "[-7,-3,2,3,11] Output: [4,9,9,49,121] Note: 1 <= A.length <= 10000 -10000 <= A[i] <=" ]