gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package nxt; import nxt.db.DbVersion; import java.sql.SQLException; class NxtDbVersion extends DbVersion { protected void update(int nextUpdate) throws SQLException { switch (nextUpdate) { case 1: apply("CREATE TABLE IF NOT EXISTS block (db_id IDENTITY, id BIGINT NOT NULL, version INT NOT NULL, " + "timestamp INT NOT NULL, previous_block_id BIGINT, " + "FOREIGN KEY (previous_block_id) REFERENCES block (id) ON DELETE CASCADE, total_amount INT NOT NULL, " + "total_fee INT NOT NULL, payload_length INT NOT NULL, generator_public_key BINARY(32) NOT NULL, " + "previous_block_hash BINARY(32), cumulative_difficulty VARBINARY NOT NULL, base_target BIGINT NOT NULL, " + "next_block_id BIGINT, FOREIGN KEY (next_block_id) REFERENCES block (id) ON DELETE SET NULL, " + "index INT NOT NULL, height INT NOT NULL, generation_signature BINARY(64) NOT NULL, " + "block_signature BINARY(64) NOT NULL, payload_hash BINARY(32) NOT NULL, generator_account_id BIGINT NOT NULL)"); case 2: apply("CREATE UNIQUE INDEX IF NOT EXISTS block_id_idx ON block (id)"); case 3: apply("CREATE TABLE IF NOT EXISTS transaction (db_id IDENTITY, id BIGINT NOT NULL, " + "deadline SMALLINT NOT NULL, sender_public_key BINARY(32) NOT NULL, recipient_id BIGINT NOT NULL, " + "amount INT NOT NULL, fee INT NOT NULL, referenced_transaction_id BIGINT, index INT NOT NULL, " + "height INT NOT NULL, block_id BIGINT NOT NULL, FOREIGN KEY (block_id) REFERENCES block (id) ON DELETE CASCADE, " + "signature BINARY(64) NOT NULL, timestamp INT NOT NULL, type TINYINT NOT NULL, subtype TINYINT NOT NULL, " + "sender_account_id BIGINT NOT NULL, attachment OTHER)"); case 4: apply("CREATE UNIQUE INDEX IF NOT EXISTS transaction_id_idx ON transaction (id)"); case 5: apply("CREATE UNIQUE INDEX IF NOT EXISTS block_height_idx ON block (height)"); case 6: apply("CREATE INDEX IF NOT EXISTS transaction_timestamp_idx ON transaction (timestamp)"); case 7: apply("CREATE INDEX IF NOT EXISTS block_generator_account_id_idx ON block (generator_account_id)"); case 8: apply("CREATE INDEX IF NOT EXISTS transaction_sender_account_id_idx ON transaction (sender_account_id)"); case 9: apply("CREATE INDEX IF NOT EXISTS transaction_recipient_id_idx ON transaction (recipient_id)"); case 10: apply("ALTER TABLE block ALTER COLUMN generator_account_id RENAME TO generator_id"); case 11: apply("ALTER TABLE transaction ALTER COLUMN sender_account_id RENAME TO sender_id"); case 12: apply("ALTER INDEX block_generator_account_id_idx RENAME TO block_generator_id_idx"); case 13: apply("ALTER INDEX transaction_sender_account_id_idx RENAME TO transaction_sender_id_idx"); case 14: apply("ALTER TABLE block DROP COLUMN IF EXISTS index"); case 15: apply("ALTER TABLE transaction DROP COLUMN IF EXISTS index"); case 16: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS block_timestamp INT"); case 17: apply(null); case 18: apply("ALTER TABLE transaction ALTER COLUMN block_timestamp SET NOT NULL"); case 19: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS hash BINARY(32)"); case 20: apply(null); case 21: apply(null); case 22: apply("CREATE INDEX IF NOT EXISTS transaction_hash_idx ON transaction (hash)"); case 23: apply(null); case 24: apply("ALTER TABLE block ALTER COLUMN total_amount BIGINT"); case 25: apply("ALTER TABLE block ALTER COLUMN total_fee BIGINT"); case 26: apply("ALTER TABLE transaction ALTER COLUMN amount BIGINT"); case 27: apply("ALTER TABLE transaction ALTER COLUMN fee BIGINT"); case 28: apply(null); case 29: apply(null); case 30: apply(null); case 31: apply(null); case 32: apply(null); case 33: apply(null); case 34: apply(null); case 35: apply(null); case 36: apply("CREATE TABLE IF NOT EXISTS peer (address VARCHAR PRIMARY KEY)"); case 37: /* if (!Constants.isTestnet) { apply("INSERT INTO peer (address) VALUES " + "('81.220.60.240'), ('nxt3.webice.ru'), ('nxtx.ru'), ('162.243.87.10'), ('80.153.101.190'), ('185.12.44.108'), " + "('79.145.11.180'), ('95.85.31.45'), ('81.169.150.141'), ('megaman.thican.net'), ('54.65.93.59'), " + "('23.88.59.163'), ('213.46.57.77'), ('37.59.115.204'), ('54.245.255.250'), ('178.62.185.131'), " + "('82.0.149.148'), ('gunka.szn.dk'), ('104.219.53.3'), ('txn14.cloudapp.net'), ('46.4.35.166'), " + "('80.150.243.95'), ('80.150.243.97'), ('pakisnxt.no-ip.org'), ('46.109.89.219'), ('5.196.227.91'), " + "('185.61.148.216'), ('miasik.no-ip.org'), ('213.239.201.57'), ('192.99.68.108'), ('107.170.43.82'), " + "('node7.mynxtcoin.org'), ('54.72.170.192'), ('23.88.59.40'), ('54.214.250.209'), ('nxt01.now.im'), " + "('54.65.215.219'), ('54.154.46.55'), ('78.46.198.253'), ('jnxt.org'), ('108.61.57.76'), ('91.202.253.240'), " + "('80.150.243.10'), ('80.150.243.11'), ('87.138.143.21'), ('46.246.44.44'), ('113.106.85.172'), " + "('78.56.100.171'), ('84.195.25.169'), ('cryptkeeper.vps.nxtcrypto.org'), ('128.199.112.173'), " + "('89.250.240.63'), ('89.250.240.60'), ('sluni.szn.dk'), ('91.214.169.126'), ('54.213.222.141'), " + "('zdani.szn.dk'), ('107.170.164.129'), ('nxtnode.hopto.org'), ('82.46.194.21'), ('121.42.137.198'), " + "('54.68.43.61'), ('54.69.53.67'), ('84.242.91.139'), ('54.83.4.11'), ('184.164.72.177'), " + "('enricoip.no-ip.biz'), ('nxt8.webice.ru'), ('162.243.122.251'), ('78.106.115.210'), ('84.253.125.186'), " + "('node5.mynxtcoin.org'), ('178.24.154.23'), ('1.34.219.47'), ('134.119.24.206'), ('85.25.198.120'), " + "('dilnu.szn.dk'), ('178.150.207.53'), ('nxt.sx'), ('nxt5.webice.ru'), ('85.84.67.68'), ('107.155.87.235'), " + "('85.25.43.169'), ('23.89.192.151'), ('95.215.44.229'), ('174.140.166.124'), ('87.139.122.48'), " + "('23.92.53.243'), ('87.139.122.157'), ('5.147.113.212'), ('ct.flipflop.mooo.com'), ('nxt.alkeron.com'), " + "('67.212.71.173'), ('nxt9.webice.ru'), ('nxt.hopto.me'), ('77.88.208.12'), ('66.30.204.105'), " + "('188.40.96.10'), ('62.194.6.163'), ('217.17.88.5'), ('41.242.1.239'), ('89.212.19.49'), ('162.243.242.8'), " + "('162.220.240.171'), ('109.254.63.44'), ('beor.homeip.net'), ('2.225.88.10'), ('91.121.150.75'), " + "('dorcsforge.cloudapp.net'), ('nxt.cybermailing.com'), ('46.28.111.249'), ('209.126.70.159'), " + "('178.15.99.67'), ('104.131.254.22'), ('198.211.127.34'), ('191.238.101.73'), ('vps.krusherpt.com'), " + "('vh44.ddns.net:7873'), ('77.58.253.73'), ('198.57.198.33'), ('92.222.72.98'), ('178.20.9.9'), " + "('81.23.22.150'), ('23.102.0.45'), ('88.163.78.131'), ('54.169.132.50'), ('37.59.14.7'), ('85.214.222.82'), " + "('nacho.damnserver.com'), ('103.25.59.185'), ('54.69.182.12'), ('89.72.57.246'), ('209.222.2.110'), " + "('78.46.92.78'), ('192.3.158.120'), ('107.170.75.92'), ('bug.airdns.org'), ('91.98.139.194'), ('37.187.238.130'), " + "('195.154.111.47'), ('37.187.21.28'), ('node4.mynxtcoin.org'), ('scripterron.dyndns.biz'), ('82.165.145.37'), " + "('94.26.187.66'), ('node2.krusherpt.com'), ('node0.forgenxt.com'), ('198.199.95.15'), ('88.188.122.73'), " + "('95.116.255.148'), ('91.69.121.207'), ('178.33.203.157'), ('node6.mynxtcoin.org'), ('54.149.167.132'), " + "('190.10.9.166'), ('24.149.126.206'), ('132.230.76.160'), ('humanoide.thican.net')"); } else { apply("INSERT INTO peer (address) VALUES " + "('nxt.scryptmh.eu'), ('54.186.98.117'), ('178.150.207.53'), ('192.241.223.132'), ('node9.mynxtcoin.org'), " + "('node10.mynxtcoin.org'), ('node3.mynxtcoin.org'), ('109.87.169.253'), ('nxtnet.fr'), ('50.112.241.97'), " + "('2.84.142.149'), ('bug.airdns.org'), ('83.212.103.14'), ('62.210.131.30'), ('104.131.254.22'), " + "('46.28.111.249'), ('94.79.54.205'), ('174.140.168.136'), ('107.170.3.62'), ('node1.forgenxt.com'), " + "('5.196.1.215'), ('nxt01.now.im'), ('37.139.6.166')"); } */ case 38: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS full_hash BINARY(32)"); case 39: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS referenced_transaction_full_hash BINARY(32)"); case 40: apply(null); case 41: apply("ALTER TABLE transaction ALTER COLUMN full_hash SET NOT NULL"); case 42: apply("CREATE UNIQUE INDEX IF NOT EXISTS transaction_full_hash_idx ON transaction (full_hash)"); case 43: apply(null); case 44: apply(null); case 45: apply(null); case 46: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS attachment_bytes VARBINARY"); case 47: apply(null); case 48: apply("ALTER TABLE transaction DROP COLUMN attachment"); case 49: apply(null); case 50: apply("ALTER TABLE transaction DROP COLUMN referenced_transaction_id"); case 51: apply("ALTER TABLE transaction DROP COLUMN hash"); case 52: apply(null); case 53: apply("DROP INDEX transaction_recipient_id_idx"); case 54: apply("ALTER TABLE transaction ALTER COLUMN recipient_id SET NULL"); case 55: BlockDb.deleteAll(); apply(null); case 56: apply("CREATE INDEX IF NOT EXISTS transaction_recipient_id_idx ON transaction (recipient_id)"); case 57: apply(null); case 58: apply(null); case 59: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS version TINYINT"); case 60: apply("UPDATE transaction SET version = 0"); case 61: apply("ALTER TABLE transaction ALTER COLUMN version SET NOT NULL"); case 62: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_message BOOLEAN NOT NULL DEFAULT FALSE"); case 63: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_encrypted_message BOOLEAN NOT NULL DEFAULT FALSE"); case 64: apply("UPDATE transaction SET has_message = TRUE WHERE type = 1 AND subtype = 0"); case 65: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_public_key_announcement BOOLEAN NOT NULL DEFAULT FALSE"); case 66: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS ec_block_height INT DEFAULT NULL"); case 67: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS ec_block_id BIGINT DEFAULT NULL"); case 68: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_encrypttoself_message BOOLEAN NOT NULL DEFAULT FALSE"); case 69: apply("CREATE INDEX IF NOT EXISTS transaction_block_timestamp_idx ON transaction (block_timestamp DESC)"); case 70: apply("DROP INDEX transaction_timestamp_idx"); case 71: apply("CREATE TABLE IF NOT EXISTS alias (db_id IDENTITY, id BIGINT NOT NULL, " + "account_id BIGINT NOT NULL, alias_name VARCHAR NOT NULL, " + "alias_name_lower VARCHAR AS LOWER (alias_name) NOT NULL, " + "alias_uri VARCHAR NOT NULL, timestamp INT NOT NULL, " + "height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 72: apply("CREATE UNIQUE INDEX IF NOT EXISTS alias_id_height_idx ON alias (id, height DESC)"); case 73: apply("CREATE INDEX IF NOT EXISTS alias_account_id_idx ON alias (account_id, height DESC)"); case 74: apply("CREATE INDEX IF NOT EXISTS alias_name_lower_idx ON alias (alias_name_lower)"); case 75: apply("CREATE TABLE IF NOT EXISTS alias_offer (db_id IDENTITY, id BIGINT NOT NULL, " + "price BIGINT NOT NULL, buyer_id BIGINT, " + "height INT NOT NULL, latest BOOLEAN DEFAULT TRUE NOT NULL)"); case 76: apply("CREATE UNIQUE INDEX IF NOT EXISTS alias_offer_id_height_idx ON alias_offer (id, height DESC)"); case 77: apply("CREATE TABLE IF NOT EXISTS asset (db_id IDENTITY, id BIGINT NOT NULL, account_id BIGINT NOT NULL, " + "name VARCHAR NOT NULL, description VARCHAR, quantity BIGINT NOT NULL, decimals TINYINT NOT NULL, " + "height INT NOT NULL)"); case 78: apply("CREATE UNIQUE INDEX IF NOT EXISTS asset_id_idx ON asset (id)"); case 79: apply("CREATE INDEX IF NOT EXISTS asset_account_id_idx ON asset (account_id)"); case 80: apply("CREATE TABLE IF NOT EXISTS trade (db_id IDENTITY, asset_id BIGINT NOT NULL, block_id BIGINT NOT NULL, " + "ask_order_id BIGINT NOT NULL, bid_order_id BIGINT NOT NULL, ask_order_height INT NOT NULL, " + "bid_order_height INT NOT NULL, seller_id BIGINT NOT NULL, buyer_id BIGINT NOT NULL, " + "quantity BIGINT NOT NULL, price BIGINT NOT NULL, timestamp INT NOT NULL, height INT NOT NULL)"); case 81: apply("CREATE UNIQUE INDEX IF NOT EXISTS trade_ask_bid_idx ON trade (ask_order_id, bid_order_id)"); case 82: apply("CREATE INDEX IF NOT EXISTS trade_asset_id_idx ON trade (asset_id, height DESC)"); case 83: apply("CREATE INDEX IF NOT EXISTS trade_seller_id_idx ON trade (seller_id, height DESC)"); case 84: apply("CREATE INDEX IF NOT EXISTS trade_buyer_id_idx ON trade (buyer_id, height DESC)"); case 85: apply("CREATE TABLE IF NOT EXISTS ask_order (db_id IDENTITY, id BIGINT NOT NULL, account_id BIGINT NOT NULL, " + "asset_id BIGINT NOT NULL, price BIGINT NOT NULL, " + "quantity BIGINT NOT NULL, creation_height INT NOT NULL, height INT NOT NULL, " + "latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 86: apply("CREATE UNIQUE INDEX IF NOT EXISTS ask_order_id_height_idx ON ask_order (id, height DESC)"); case 87: apply("CREATE INDEX IF NOT EXISTS ask_order_account_id_idx ON ask_order (account_id, height DESC)"); case 88: apply("CREATE INDEX IF NOT EXISTS ask_order_asset_id_price_idx ON ask_order (asset_id, price)"); case 89: apply("CREATE TABLE IF NOT EXISTS bid_order (db_id IDENTITY, id BIGINT NOT NULL, account_id BIGINT NOT NULL, " + "asset_id BIGINT NOT NULL, price BIGINT NOT NULL, " + "quantity BIGINT NOT NULL, creation_height INT NOT NULL, height INT NOT NULL, " + "latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 90: apply("CREATE UNIQUE INDEX IF NOT EXISTS bid_order_id_height_idx ON bid_order (id, height DESC)"); case 91: apply("CREATE INDEX IF NOT EXISTS bid_order_account_id_idx ON bid_order (account_id, height DESC)"); case 92: apply("CREATE INDEX IF NOT EXISTS bid_order_asset_id_price_idx ON bid_order (asset_id, price DESC)"); case 93: apply("CREATE TABLE IF NOT EXISTS goods (db_id IDENTITY, id BIGINT NOT NULL, seller_id BIGINT NOT NULL, " + "name VARCHAR NOT NULL, description VARCHAR, " + "tags VARCHAR, timestamp INT NOT NULL, quantity INT NOT NULL, price BIGINT NOT NULL, " + "delisted BOOLEAN NOT NULL, height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 94: apply("CREATE UNIQUE INDEX IF NOT EXISTS goods_id_height_idx ON goods (id, height DESC)"); case 95: apply("CREATE INDEX IF NOT EXISTS goods_seller_id_name_idx ON goods (seller_id, name)"); case 96: apply("CREATE INDEX IF NOT EXISTS goods_timestamp_idx ON goods (timestamp DESC, height DESC)"); case 97: apply("CREATE TABLE IF NOT EXISTS purchase (db_id IDENTITY, id BIGINT NOT NULL, buyer_id BIGINT NOT NULL, " + "goods_id BIGINT NOT NULL, " + "seller_id BIGINT NOT NULL, quantity INT NOT NULL, " + "price BIGINT NOT NULL, deadline INT NOT NULL, note VARBINARY, nonce BINARY(32), " + "timestamp INT NOT NULL, pending BOOLEAN NOT NULL, goods VARBINARY, goods_nonce BINARY(32), " + "refund_note VARBINARY, refund_nonce BINARY(32), has_feedback_notes BOOLEAN NOT NULL DEFAULT FALSE, " + "has_public_feedbacks BOOLEAN NOT NULL DEFAULT FALSE, discount BIGINT NOT NULL, refund BIGINT NOT NULL, " + "height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 98: apply("CREATE UNIQUE INDEX IF NOT EXISTS purchase_id_height_idx ON purchase (id, height DESC)"); case 99: apply("CREATE INDEX IF NOT EXISTS purchase_buyer_id_height_idx ON purchase (buyer_id, height DESC)"); case 100: apply("CREATE INDEX IF NOT EXISTS purchase_seller_id_height_idx ON purchase (seller_id, height DESC)"); case 101: apply("CREATE INDEX IF NOT EXISTS purchase_deadline_idx ON purchase (deadline DESC, height DESC)"); case 102: apply("CREATE TABLE IF NOT EXISTS account (db_id IDENTITY, id BIGINT NOT NULL, creation_height INT NOT NULL, " + "public_key BINARY(32), key_height INT, balance BIGINT NOT NULL, unconfirmed_balance BIGINT NOT NULL, " + "forged_balance BIGINT NOT NULL, name VARCHAR, description VARCHAR, current_leasing_height_from INT, " + "current_leasing_height_to INT, current_lessee_id BIGINT NULL, next_leasing_height_from INT, " + "next_leasing_height_to INT, next_lessee_id BIGINT NULL, height INT NOT NULL, " + "latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 103: apply("CREATE UNIQUE INDEX IF NOT EXISTS account_id_height_idx ON account (id, height DESC)"); case 104: apply("CREATE INDEX IF NOT EXISTS account_current_lessee_id_leasing_height_idx ON account (current_lessee_id, " + "current_leasing_height_to DESC)"); case 105: apply("CREATE TABLE IF NOT EXISTS account_asset (db_id IDENTITY, account_id BIGINT NOT NULL, " + "asset_id BIGINT NOT NULL, quantity BIGINT NOT NULL, unconfirmed_quantity BIGINT NOT NULL, height INT NOT NULL, " + "latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 106: apply("CREATE UNIQUE INDEX IF NOT EXISTS account_asset_id_height_idx ON account_asset (account_id, asset_id, height DESC)"); case 107: apply("CREATE TABLE IF NOT EXISTS account_guaranteed_balance (db_id IDENTITY, account_id BIGINT NOT NULL, " + "additions BIGINT NOT NULL, height INT NOT NULL)"); case 108: apply("CREATE UNIQUE INDEX IF NOT EXISTS account_guaranteed_balance_id_height_idx ON account_guaranteed_balance " + "(account_id, height DESC)"); case 109: apply("CREATE TABLE IF NOT EXISTS purchase_feedback (db_id IDENTITY, id BIGINT NOT NULL, feedback_data VARBINARY NOT NULL, " + "feedback_nonce BINARY(32) NOT NULL, height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 110: apply("CREATE INDEX IF NOT EXISTS purchase_feedback_id_height_idx ON purchase_feedback (id, height DESC)"); case 111: apply("CREATE TABLE IF NOT EXISTS purchase_public_feedback (db_id IDENTITY, id BIGINT NOT NULL, public_feedback " + "VARCHAR NOT NULL, height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 112: apply("CREATE INDEX IF NOT EXISTS purchase_public_feedback_id_height_idx ON purchase_public_feedback (id, height DESC)"); case 113: apply("CREATE TABLE IF NOT EXISTS unconfirmed_transaction (db_id IDENTITY, id BIGINT NOT NULL, expiration INT NOT NULL, " + "transaction_height INT NOT NULL, fee_per_byte BIGINT NOT NULL, timestamp INT NOT NULL, " + "transaction_bytes VARBINARY NOT NULL, height INT NOT NULL)"); case 114: apply("CREATE UNIQUE INDEX IF NOT EXISTS unconfirmed_transaction_id_idx ON unconfirmed_transaction (id)"); case 115: apply("CREATE INDEX IF NOT EXISTS unconfirmed_transaction_height_fee_timestamp_idx ON unconfirmed_transaction " + "(transaction_height ASC, fee_per_byte DESC, timestamp ASC)"); case 116: apply("CREATE TABLE IF NOT EXISTS asset_transfer (db_id IDENTITY, id BIGINT NOT NULL, asset_id BIGINT NOT NULL, " + "sender_id BIGINT NOT NULL, recipient_id BIGINT NOT NULL, quantity BIGINT NOT NULL, timestamp INT NOT NULL, " + "height INT NOT NULL)"); case 117: apply("CREATE UNIQUE INDEX IF NOT EXISTS asset_transfer_id_idx ON asset_transfer (id)"); case 118: apply("CREATE INDEX IF NOT EXISTS asset_transfer_asset_id_idx ON asset_transfer (asset_id, height DESC)"); case 119: apply("CREATE INDEX IF NOT EXISTS asset_transfer_sender_id_idx ON asset_transfer (sender_id, height DESC)"); case 120: apply("CREATE INDEX IF NOT EXISTS asset_transfer_recipient_id_idx ON asset_transfer (recipient_id, height DESC)"); case 121: apply(null); case 122: apply("CREATE INDEX IF NOT EXISTS account_asset_quantity_idx ON account_asset (quantity DESC)"); case 123: apply("CREATE INDEX IF NOT EXISTS purchase_timestamp_idx ON purchase (timestamp DESC, id)"); case 124: apply("CREATE INDEX IF NOT EXISTS ask_order_creation_idx ON ask_order (creation_height DESC)"); case 125: apply("CREATE INDEX IF NOT EXISTS bid_order_creation_idx ON bid_order (creation_height DESC)"); case 126: apply(null); case 127: apply("CREATE UNIQUE INDEX IF NOT EXISTS block_timestamp_idx ON block (timestamp DESC)"); case 128: apply(null); case 129: apply("ALTER TABLE goods ADD COLUMN IF NOT EXISTS parsed_tags ARRAY"); case 130: apply("CREATE ALIAS IF NOT EXISTS FTL_INIT FOR \"org.h2.fulltext.FullTextLucene.init\""); case 131: apply("CALL FTL_INIT()"); case 132: apply("CALL FTL_CREATE_INDEX('PUBLIC', 'GOODS', 'NAME,DESCRIPTION,TAGS')"); case 133: apply("CALL FTL_CREATE_INDEX('PUBLIC', 'ASSET', 'NAME,DESCRIPTION')"); case 134: apply("CREATE TABLE IF NOT EXISTS tag (db_id IDENTITY, tag VARCHAR NOT NULL, in_stock_count INT NOT NULL, " + "total_count INT NOT NULL, height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 135: apply("CREATE UNIQUE INDEX IF NOT EXISTS tag_tag_idx ON tag (tag, height DESC)"); case 136: apply("CREATE INDEX IF NOT EXISTS tag_in_stock_count_idx ON tag (in_stock_count DESC, height DESC)"); case 137: apply(null); case 138: apply("CREATE TABLE IF NOT EXISTS currency (db_id IDENTITY, id BIGINT NOT NULL, account_id BIGINT NOT NULL, " + "name VARCHAR NOT NULL, name_lower VARCHAR AS LOWER (name) NOT NULL, code VARCHAR NOT NULL, " + "description VARCHAR, type INT NOT NULL, initial_supply BIGINT NOT NULL DEFAULT 0, current_supply BIGINT NOT NULL, " + "reserve_supply BIGINT NOT NULL, max_supply BIGINT NOT NULL, creation_height INT NOT NULL, issuance_height INT NOT NULL, " + "min_reserve_per_unit_nqt BIGINT NOT NULL, min_difficulty TINYINT NOT NULL, " + "max_difficulty TINYINT NOT NULL, ruleset TINYINT NOT NULL, algorithm TINYINT NOT NULL, " + "current_reserve_per_unit_nqt BIGINT NOT NULL, decimals TINYINT NOT NULL DEFAULT 0," + "height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 139: apply("CREATE UNIQUE INDEX IF NOT EXISTS currency_id_height_idx ON currency (id, height DESC)"); case 140: apply("CREATE INDEX IF NOT EXISTS currency_account_id_idx ON currency (account_id)"); case 141: apply("CREATE TABLE IF NOT EXISTS account_currency (db_id IDENTITY, account_id BIGINT NOT NULL, " + "currency_id BIGINT NOT NULL, units BIGINT NOT NULL, unconfirmed_units BIGINT NOT NULL, height INT NOT NULL, " + "latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 142: apply("CREATE UNIQUE INDEX IF NOT EXISTS account_currency_id_height_idx ON account_currency (account_id, currency_id, height DESC)"); case 143: apply("CREATE TABLE IF NOT EXISTS currency_founder (db_id IDENTITY, currency_id BIGINT NOT NULL, " + "account_id BIGINT NOT NULL, amount BIGINT NOT NULL, " + "height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 144: apply("CREATE UNIQUE INDEX IF NOT EXISTS currency_founder_currency_id_idx ON currency_founder (currency_id, account_id, height DESC)"); case 145: apply("CREATE TABLE IF NOT EXISTS currency_mint (db_id IDENTITY, currency_id BIGINT NOT NULL, account_id BIGINT NOT NULL, " + "counter BIGINT NOT NULL, height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 146: apply("CREATE UNIQUE INDEX IF NOT EXISTS currency_mint_currency_id_account_id_idx ON currency_mint (currency_id, account_id, height DESC)"); case 147: apply("CREATE TABLE IF NOT EXISTS buy_offer (db_id IDENTITY, id BIGINT NOT NULL, currency_id BIGINT NOT NULL, account_id BIGINT NOT NULL," + "rate BIGINT NOT NULL, unit_limit BIGINT NOT NULL, supply BIGINT NOT NULL, expiration_height INT NOT NULL," + "creation_height INT NOT NULL, transaction_index SMALLINT NOT NULL, height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 148: apply("CREATE UNIQUE INDEX IF NOT EXISTS buy_offer_id_idx ON buy_offer (id, height DESC)"); case 149: apply("CREATE INDEX IF NOT EXISTS buy_offer_currency_id_account_id_idx ON buy_offer (currency_id, account_id, height DESC)"); case 150: apply("CREATE TABLE IF NOT EXISTS sell_offer (db_id IDENTITY, id BIGINT NOT NULL, currency_id BIGINT NOT NULL, account_id BIGINT NOT NULL, " + "rate BIGINT NOT NULL, unit_limit BIGINT NOT NULL, supply BIGINT NOT NULL, expiration_height INT NOT NULL, " + "creation_height INT NOT NULL, transaction_index SMALLINT NOT NULL, height INT NOT NULL, latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 151: apply("CREATE UNIQUE INDEX IF NOT EXISTS sell_offer_id_idx ON sell_offer (id, height DESC)"); case 152: apply("CREATE INDEX IF NOT EXISTS sell_offer_currency_id_account_id_idx ON sell_offer (currency_id, account_id, height DESC)"); case 153: apply("CREATE TABLE IF NOT EXISTS exchange (db_id IDENTITY, transaction_id BIGINT NOT NULL, currency_id BIGINT NOT NULL, block_id BIGINT NOT NULL, " + "offer_id BIGINT NOT NULL, seller_id BIGINT NOT NULL, " + "buyer_id BIGINT NOT NULL, units BIGINT NOT NULL, " + "rate BIGINT NOT NULL, timestamp INT NOT NULL, height INT NOT NULL)"); case 154: apply("CREATE UNIQUE INDEX IF NOT EXISTS exchange_offer_idx ON exchange (transaction_id, offer_id)"); case 155: apply("CREATE INDEX IF NOT EXISTS exchange_currency_id_idx ON exchange (currency_id, height DESC)"); case 156: apply("CREATE INDEX IF NOT EXISTS exchange_seller_id_idx ON exchange (seller_id, height DESC)"); case 157: apply("CREATE INDEX IF NOT EXISTS exchange_buyer_id_idx ON exchange (buyer_id, height DESC)"); case 158: apply("CREATE TABLE IF NOT EXISTS currency_transfer (db_id IDENTITY, id BIGINT NOT NULL, currency_id BIGINT NOT NULL, " + "sender_id BIGINT NOT NULL, recipient_id BIGINT NOT NULL, units BIGINT NOT NULL, timestamp INT NOT NULL, " + "height INT NOT NULL)"); case 159: apply("CREATE UNIQUE INDEX IF NOT EXISTS currency_transfer_id_idx ON currency_transfer (id)"); case 160: apply("CREATE INDEX IF NOT EXISTS currency_transfer_currency_id_idx ON currency_transfer (currency_id, height DESC)"); case 161: apply("CREATE INDEX IF NOT EXISTS currency_transfer_sender_id_idx ON currency_transfer (sender_id, height DESC)"); case 162: apply("CREATE INDEX IF NOT EXISTS currency_transfer_recipient_id_idx ON currency_transfer (recipient_id, height DESC)"); case 163: apply("CREATE INDEX IF NOT EXISTS account_currency_units_idx ON account_currency (units DESC)"); case 164: apply("CREATE INDEX IF NOT EXISTS currency_name_idx ON currency (name_lower, height DESC)"); case 165: apply("CREATE INDEX IF NOT EXISTS currency_code_idx ON currency (code, height DESC)"); case 166: apply("CREATE INDEX IF NOT EXISTS buy_offer_rate_height_idx ON buy_offer (rate DESC, creation_height ASC)"); case 167: apply("CREATE INDEX IF NOT EXISTS sell_offer_rate_height_idx ON sell_offer (rate ASC, creation_height ASC)"); case 168: apply("ALTER TABLE account ADD COLUMN IF NOT EXISTS message_pattern_regex VARCHAR"); case 169: apply("ALTER TABLE account ADD COLUMN IF NOT EXISTS message_pattern_flags INT"); case 170: apply("DROP INDEX IF EXISTS unconfirmed_transaction_height_fee_timestamp_idx"); case 171: apply("ALTER TABLE unconfirmed_transaction DROP COLUMN IF EXISTS timestamp"); case 172: apply("ALTER TABLE unconfirmed_transaction ADD COLUMN IF NOT EXISTS arrival_timestamp BIGINT NOT NULL DEFAULT 0"); case 173: apply("CREATE INDEX IF NOT EXISTS unconfirmed_transaction_height_fee_timestamp_idx ON unconfirmed_transaction " + "(transaction_height ASC, fee_per_byte DESC, arrival_timestamp ASC)"); case 174: BlockDb.deleteAll(); apply(null); case 175: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS transaction_index SMALLINT NOT NULL"); case 176: apply(null); case 177: apply("TRUNCATE TABLE ask_order"); case 178: apply("ALTER TABLE ask_order ADD COLUMN IF NOT EXISTS transaction_index SMALLINT NOT NULL"); case 179: apply(null); case 180: apply("TRUNCATE TABLE bid_order"); case 181: apply("ALTER TABLE bid_order ADD COLUMN IF NOT EXISTS transaction_index SMALLINT NOT NULL"); case 182: apply(null); case 183: apply("CALL FTL_CREATE_INDEX('PUBLIC', 'CURRENCY', 'CODE,NAME,DESCRIPTION')"); case 184: apply("CREATE TABLE IF NOT EXISTS scan (rescan BOOLEAN NOT NULL DEFAULT FALSE, height INT NOT NULL DEFAULT 0, " + "validate BOOLEAN NOT NULL DEFAULT FALSE)"); case 185: apply("INSERT INTO scan (rescan, height, validate) VALUES (false, 0, false)"); case 186: apply("CREATE INDEX IF NOT EXISTS currency_creation_height_idx ON currency (creation_height DESC)"); case 187: apply(null); case 188: apply(null); case 189: apply(null); case 190: apply(null); case 191: apply(null); case 192: if (Constants.isTestnet) { BlockchainProcessorImpl.getInstance().scheduleScan(0, true); } apply(null); case 193: apply("CREATE TABLE IF NOT EXISTS currency_supply (db_id IDENTITY, id BIGINT NOT NULL, " + "current_supply BIGINT NOT NULL, current_reserve_per_unit_nqt BIGINT NOT NULL, height INT NOT NULL, " + "latest BOOLEAN NOT NULL DEFAULT TRUE)"); case 194: apply("CREATE UNIQUE INDEX IF NOT EXISTS currency_supply_id_height_idx ON currency_supply (id, height DESC)"); case 195: apply("TRUNCATE TABLE currency"); case 196: apply("ALTER TABLE currency DROP COLUMN IF EXISTS current_supply"); case 197: apply("ALTER TABLE currency DROP COLUMN IF EXISTS current_reserve_per_unit_nqt"); case 198: BlockchainProcessorImpl.getInstance().scheduleScan(0, false); apply(null); case 199: apply("ALTER TABLE block ADD COLUMN IF NOT EXISTS nonce BIGINT NOT NULL"); case 200: return; default: throw new RuntimeException("Blockchain database inconsistent with code, probably trying to run older code on newer database"); } } }
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.credit.creditdefaultswap.definition; import javax.time.calendar.ZonedDateTime; import org.apache.commons.lang.ObjectUtils; import com.opengamma.analytics.financial.credit.BuySellProtection; import com.opengamma.analytics.financial.credit.DebtSeniority; import com.opengamma.analytics.financial.credit.PriceType; import com.opengamma.analytics.financial.credit.RestructuringClause; import com.opengamma.analytics.financial.credit.StubType; import com.opengamma.analytics.financial.credit.obligormodel.definition.Obligor; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.financial.convention.daycount.DayCount; import com.opengamma.financial.convention.frequency.PeriodFrequency; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; /** * Definition of a generic Single Name Credit Default Swap contract (different types of CDS will inherit from this) */ public abstract class CreditDefaultSwapDefinition { // ---------------------------------------------------------------------------------------------------------------------------------------- // Cashflow Conventions are assumed to be as below (these will apply throughout the entire credit suite for credit default swaps) // Notional amount > 0 always - long/short positions are captured by the setting of the 'BuySellProtection' flag // This convention is chosen to avoid confusion about whether a negative notional means a long/short position etc // Buy protection -> Pay premium leg, receive contingent leg -> 'long' protection -> 'short' credit risk // Sell protection -> Receive premium leg, pay contingent leg -> 'short' protection -> 'long' credit risk // Coupon conventions - coupons are always assumed to be entered in bps (therefore there are internal conversions to absolute values by division by 10,000) // ---------------------------------------------------------------------------------------------------------------------------------------- // TODO : Make sure the 'equals' method has all the necessary fields and the hashCode method is correct // TODO : Check that buyer is not equal to the seller etc // TODO : More detailed description of ref entity obligation will be necessary // TODO : Move _protectionStart and _protectionOffset variables into the PV calculator? // ---------------------------------------------------------------------------------------------------------------------------------------- // Member variables (all private and final) of the CDS contract (defines what a CDS is) // From the users perspective, are we buying or selling protection private final BuySellProtection _buySellProtection; // The (three) counterparties in the trade private final Obligor _protectionBuyer; private final Obligor _protectionSeller; private final Obligor _referenceEntity; // The currency the trade is executed in e.g. USD private final Currency _currency; // The seniority of the debt of the reference entity the CDS is written on private final DebtSeniority _debtSeniority; // The restructuring type in the event of a credit event deemed to be a restructuring of the reference entities debt private final RestructuringClause _restructuringClause; // Holiday calendar for the determination of adjusted business days in the cashflow schedule private final Calendar _calendar; // The date of the contract inception private final ZonedDateTime _startDate; // The effective date for protection to begin (usually T + 1d for a legacy CDS, T - 60d or T - 90d for a standard CDS) private final ZonedDateTime _effectiveDate; // The maturity date of the contract (when premium and protection coverage ceases) private final ZonedDateTime _maturityDate; // The date on which we want to calculate the CDS MtM private final ZonedDateTime _valuationDate; // The method for generating the schedule of premium payments private final StubType _stubType; // The frequency of coupon payments (usually quarterly for legacy and standard CDS) private final PeriodFrequency _couponFrequency; // Day-count convention (usually Act/360 for legacy and standard CDS) private final DayCount _daycountFractionConvention; // Business day adjustment convention (usually following for legacy and standard CDS) private final BusinessDayConvention _businessdayAdjustmentConvention; // Flag to determine if we adjust the maturity date to fall on the next IMM date private final boolean _immAdjustMaturityDate; //Flag to determine if we business day adjust the user input effective date (not a feature of legacy or standard CDS) private final boolean _adjustEffectiveDate; // Flag to determine if we business day adjust the final maturity date (not a feature of legacy or standard CDS) private final boolean _adjustMaturityDate; // The trade notional (in the trade currency), convention is that this will always be a positive amount private final double _notional; // The recovery rate to be used in the calculation of the CDS MtM (the recovery used in pricing can be different to the rate used to calibrate the hazard rates) private final double _recoveryRate; // Flag to determine whether the accrued coupons should be included in the CDS premium leg calculation private final boolean _includeAccruedPremium; // Calculate clean or dirty price (clean price includes the accrued interest from valuation date to the previous coupon date) private final PriceType _priceType; // Flag to determine if survival probabilities are calculated at the beginning or end of the day (hard coded to TRUE in ISDA model) private final boolean _protectionStart; // The credit key to uniquely identify a reference entities par spread CDS curve private final String _creditKey; // If _protectionStart = true then this is the offset (one extra day of protection) private final double _protectionOffset = 1.0 / 365.0; // ---------------------------------------------------------------------------------------------------------------------------------------- // Constructor for a CDS contract object public CreditDefaultSwapDefinition( BuySellProtection buySellProtection, Obligor protectionBuyer, Obligor protectionSeller, Obligor referenceEntity, Currency currency, DebtSeniority debtSeniority, RestructuringClause restructuringClause, Calendar calendar, ZonedDateTime startDate, ZonedDateTime effectiveDate, ZonedDateTime maturityDate, ZonedDateTime valuationDate, StubType stubType, PeriodFrequency couponFrequency, DayCount daycountFractionConvention, BusinessDayConvention businessdayAdjustmentConvention, boolean immAdjustMaturityDate, boolean adjustEffectiveDate, boolean adjustMaturityDate, double notional, double recoveryRate, boolean includeAccruedPremium, PriceType priceType, boolean protectionStart) { // ------------------------------------------------------------------------------------------------ // Check the validity of the input arguments ArgumentChecker.notNull(buySellProtection, "Buy/Sell"); ArgumentChecker.notNull(protectionBuyer, "Protection buyer"); ArgumentChecker.notNull(protectionSeller, "Protection seller"); ArgumentChecker.notNull(referenceEntity, "Reference entity"); ArgumentChecker.notNull(currency, "Currency"); ArgumentChecker.notNull(debtSeniority, "Debt seniority"); ArgumentChecker.notNull(restructuringClause, "Restructuring clause"); ArgumentChecker.notNull(calendar, "Calendar"); ArgumentChecker.notNull(startDate, "Start date"); ArgumentChecker.notNull(effectiveDate, "Effective date"); ArgumentChecker.notNull(maturityDate, "Maturity date"); ArgumentChecker.notNull(valuationDate, "Valuation date"); // Check the temporal ordering of the input dates (these are the unadjusted dates entered by the user) ArgumentChecker.isTrue(!startDate.isAfter(valuationDate), "Start date {} must be on or before valuation date {}", startDate, valuationDate); ArgumentChecker.isTrue(!startDate.isAfter(effectiveDate), "Start date {} must be on or before effective date {}", startDate, effectiveDate); ArgumentChecker.isTrue(!startDate.isAfter(maturityDate), "Start date {} must be on or before maturity date {}", startDate, maturityDate); ArgumentChecker.isTrue(!valuationDate.isAfter(maturityDate), "Valuation date {} must be on or before maturity date {}", valuationDate, maturityDate); ArgumentChecker.isTrue(!valuationDate.isBefore(effectiveDate), "Valuation date {} must be on or after effective date {}", valuationDate, effectiveDate); ArgumentChecker.notNull(stubType, "Stub Type"); ArgumentChecker.notNull(couponFrequency, "Coupon frequency"); ArgumentChecker.notNull(daycountFractionConvention, "Daycount convention"); ArgumentChecker.notNull(businessdayAdjustmentConvention, "Business day adjustment convention"); ArgumentChecker.notNegative(notional, "Notional amount"); ArgumentChecker.notNegative(recoveryRate, "Recovery Rate"); ArgumentChecker.isTrue(recoveryRate <= 1.0, "Recovery rate should be less than or equal to 100%"); ArgumentChecker.notNull(priceType, "Price type"); // ------------------------------------------------------------------------------------------------ // Assign the member variables for the CDS object _buySellProtection = buySellProtection; _protectionBuyer = protectionBuyer; _protectionSeller = protectionSeller; _referenceEntity = referenceEntity; _currency = currency; _debtSeniority = debtSeniority; _restructuringClause = restructuringClause; _calendar = calendar; _startDate = startDate; _effectiveDate = effectiveDate; _maturityDate = maturityDate; _valuationDate = valuationDate; _stubType = stubType; _couponFrequency = couponFrequency; _daycountFractionConvention = daycountFractionConvention; _businessdayAdjustmentConvention = businessdayAdjustmentConvention; _immAdjustMaturityDate = immAdjustMaturityDate; _adjustEffectiveDate = adjustEffectiveDate; _adjustMaturityDate = adjustMaturityDate; _notional = notional; _recoveryRate = recoveryRate; _includeAccruedPremium = includeAccruedPremium; _priceType = priceType; _protectionStart = protectionStart; // REVIEW 29/8/2012 think about using UniqueId instead of _creditKey _creditKey = _referenceEntity.getObligorTicker() + "_" + _currency + "_" + _debtSeniority + "_" + _restructuringClause; // ------------------------------------------------------------------------------------------------ } // ---------------------------------------------------------------------------------------------------------------------------------------- // Public member accessor methods public BuySellProtection getBuySellProtection() { return _buySellProtection; } public Obligor getProtectionBuyer() { return _protectionBuyer; } public Obligor getProtectionSeller() { return _protectionSeller; } public Obligor getReferenceEntity() { return _referenceEntity; } //---------------------------------------------------------------------------------------------------------------------------------------- public Currency getCurrency() { return _currency; } public DebtSeniority getDebtSeniority() { return _debtSeniority; } public RestructuringClause getRestructuringClause() { return _restructuringClause; } public Calendar getCalendar() { return _calendar; } //---------------------------------------------------------------------------------------------------------------------------------------- public ZonedDateTime getStartDate() { return _startDate; } public ZonedDateTime getEffectiveDate() { return _effectiveDate; } public ZonedDateTime getMaturityDate() { return _maturityDate; } public ZonedDateTime getValuationDate() { return _valuationDate; } //---------------------------------------------------------------------------------------------------------------------------------------- public StubType getStubType() { return _stubType; } public PeriodFrequency getCouponFrequency() { return _couponFrequency; } public DayCount getDayCountFractionConvention() { return _daycountFractionConvention; } public BusinessDayConvention getBusinessDayAdjustmentConvention() { return _businessdayAdjustmentConvention; } public boolean getIMMAdjustMaturityDate() { return _immAdjustMaturityDate; } public boolean getAdjustEffectiveDate() { return _adjustEffectiveDate; } public boolean getAdjustMaturityDate() { return _adjustMaturityDate; } //---------------------------------------------------------------------------------------------------------------------------------------- public double getNotional() { return _notional; } public double getRecoveryRate() { return _recoveryRate; } public boolean getIncludeAccruedPremium() { return _includeAccruedPremium; } public PriceType getPriceType() { return _priceType; } public boolean getProtectionStart() { return _protectionStart; } //---------------------------------------------------------------------------------------------------------------------------------------- public String getCreditKey() { return _creditKey; } public double getProtectionOffset() { return _protectionOffset; } // ---------------------------------------------------------------------------------------------------------------------------------------- /* @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (_adjustMaturityDate ? 1231 : 1237); result = prime * result + ((_calendar == null) ? 0 : _calendar.hashCode()); long temp; temp = Double.doubleToLongBits(_notional); result = prime * result + (int) (temp ^ (temp >>> 32)); result = prime * result + (_includeAccruedPremium ? 1231 : 1237); result = prime * result + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(_premiumLegCoupon); result = prime * result + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(_recoveryRate); result = prime * result + (int) (temp ^ (temp >>> 32)); return result; } */ // ---------------------------------------------------------------------------------------------------------------------------------------- @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } CreditDefaultSwapDefinition other = (CreditDefaultSwapDefinition) obj; if (_buySellProtection != other._buySellProtection) { return false; } if (_adjustMaturityDate != other._adjustMaturityDate) { return false; } if (!ObjectUtils.equals(_businessdayAdjustmentConvention, other._businessdayAdjustmentConvention)) { return false; } if (!ObjectUtils.equals(_calendar, other._calendar)) { return false; } if (!ObjectUtils.equals(_couponFrequency, other._couponFrequency)) { return false; } if (!ObjectUtils.equals(_creditKey, other._creditKey)) { return false; } if (!ObjectUtils.equals(_currency, other._currency)) { return false; } if (!ObjectUtils.equals(_daycountFractionConvention, other._daycountFractionConvention)) { return false; } if (_debtSeniority != other._debtSeniority) { return false; } if (!ObjectUtils.equals(_effectiveDate, other._effectiveDate)) { return false; } if (_includeAccruedPremium != other._includeAccruedPremium) { return false; } if (_priceType != other._priceType) { return false; } if (_protectionStart != other._protectionStart) { return false; } if (!ObjectUtils.equals(_maturityDate, other._maturityDate)) { return false; } if (Double.doubleToLongBits(_notional) != Double.doubleToLongBits(other._notional)) { return false; } if (!ObjectUtils.equals(_protectionBuyer, other._protectionBuyer)) { return false; } if (!ObjectUtils.equals(_protectionSeller, other._protectionSeller)) { return false; } if (_restructuringClause != other._restructuringClause) { return false; } if (_stubType != other._stubType) { return false; } if (!ObjectUtils.equals(_startDate, other._startDate)) { return false; } if (!ObjectUtils.equals(_valuationDate, other._valuationDate)) { return false; } if (Double.doubleToLongBits(_recoveryRate) != Double.doubleToLongBits(other._recoveryRate)) { return false; } return true; } // ---------------------------------------------------------------------------------------------------------------------------------------- } // ----------------------------------------------------------------------------------------------------------------------------------------
package com.michelboudreau.testv2; import com.amazonaws.services.dynamodbv2.model.AttributeDefinition; import com.amazonaws.services.dynamodbv2.model.AttributeValue; import com.amazonaws.services.dynamodbv2.model.BatchGetItemRequest; import com.amazonaws.services.dynamodbv2.model.BatchGetItemResult; import com.amazonaws.services.dynamodbv2.model.BatchWriteItemRequest; import com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult; import com.amazonaws.services.dynamodbv2.model.ConsumedCapacity; import com.amazonaws.services.dynamodbv2.model.DeleteRequest; import com.amazonaws.services.dynamodbv2.model.KeysAndAttributes; import com.amazonaws.services.dynamodbv2.model.PutRequest; import com.amazonaws.services.dynamodbv2.model.ResourceNotFoundException; import com.amazonaws.services.dynamodbv2.model.TableDescription; import com.amazonaws.services.dynamodbv2.model.WriteRequest; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.matchers.JUnitMatchers; import org.junit.runner.RunWith; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = {"classpath:/applicationContext.xml"}) public class AlternatorBatchItemTest extends AlternatorTest { private String tableName1; private String tableName2; private String hashKeyName1; private String hashKeyName2; @Before public void setUp() throws Exception { tableName1 = createTableName(); AttributeDefinition hashAttr1 = createNumberAttributeDefinition(); TableDescription tableDescription1 = createTable(tableName1, hashAttr1); hashKeyName1 = getHashKeyElement(tableDescription1.getKeySchema()).getAttributeName(); tableName2 = createTableName(); AttributeDefinition hashAttr2 = createNumberAttributeDefinition(); TableDescription tableDescription2 = createTable(tableName2, hashAttr2); hashKeyName2 = getHashKeyElement(tableDescription2.getKeySchema()).getAttributeName(); } @After public void tearDown() throws Exception { deleteAllTables(); } @Test public void vanillaBatchGetItemTest() throws Exception { this.vanillaBatchWriteItemTest(); BatchGetItemRequest batchGetItemRequest = new BatchGetItemRequest(); Map<String, KeysAndAttributes> requestItems = new HashMap<String, KeysAndAttributes>(); KeysAndAttributes keysAndAttributes1 = new KeysAndAttributes(); List<Map<String, AttributeValue>> itemKeys1 = new ArrayList<Map<String, AttributeValue>>(); itemKeys1.add(createItemKey(hashKeyName1, new AttributeValue().withN("6"))); keysAndAttributes1.setKeys(itemKeys1); List<String> attributesToGet1 = new ArrayList<String>(); attributesToGet1.add(hashKeyName1); keysAndAttributes1.setAttributesToGet(attributesToGet1); KeysAndAttributes keysAndAttributes2 = new KeysAndAttributes(); List<Map<String, AttributeValue>> itemKeys2 = new ArrayList<Map<String, AttributeValue>>(); itemKeys2.add(createItemKey(hashKeyName2, new AttributeValue().withN("1"))); keysAndAttributes2.setKeys(itemKeys2); List<String> attributesToGet2 = new ArrayList<String>(); attributesToGet2.add(hashKeyName2); keysAndAttributes2.setAttributesToGet(attributesToGet2); //Test case 1: Every request has matches. // keys.add(new Key(new AttributeValue("4"))); // keys.add(new Key(new AttributeValue("5"))); // keys.add(new Key(new AttributeValue("3"))); //Test case 2: Requests has no match. //Test case 3: Complicated test, some requests has matches, some doesn't. // keys.add(new Key(new AttributeValue("7"))); // keys.add(new Key(new AttributeValue("4"))); //Test case 4: Duplicated request //Duplicated requests return duplicated results. // keys.add(new Key(new AttributeValue("7"))); // keys.add(new Key(new AttributeValue("7"))); // keys.add(new Key(new AttributeValue("4"))); // keys.add(new Key(new AttributeValue("4"))); //Test case for Exception: Table doesn't exist. // requestItems.put("Vito's Table", keysAndAttributes); // Normal test // TODO: Multi table test failed. Need to be fixed. requestItems.put(tableName1, keysAndAttributes1); requestItems.put(tableName2, keysAndAttributes2); batchGetItemRequest.withRequestItems(requestItems); BatchGetItemResult result = getClient().batchGetItem(batchGetItemRequest); junit.framework.Assert.assertNotNull("UnprocessedKeys should be empty rather than null.", result.getUnprocessedKeys()); } @Test public void vanillaBatchWriteItemTest() throws Exception{ BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest(); BatchWriteItemResult result; // Create a map for the requests in the batch Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>(); // Test: write items to database Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>(); forumItem.put(hashKeyName1, new AttributeValue().withN("1")); forumItem.put("range", new AttributeValue().withS("a")); List<WriteRequest> forumList = new ArrayList<WriteRequest>(); forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem))); Map<String, AttributeValue> forumItem1 = new HashMap<String, AttributeValue>(); forumItem1.put(hashKeyName1, new AttributeValue().withN("2")); forumItem1.put("range", new AttributeValue().withS("b")); forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem1))); Map<String, AttributeValue> forumItem5 = new HashMap<String, AttributeValue>(); forumItem5.put(hashKeyName1, new AttributeValue().withN("3")); forumItem5.put("range", new AttributeValue().withS("c")); forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem5))); Map<String, AttributeValue> forumItem2 = new HashMap<String, AttributeValue>(); forumItem2.put(hashKeyName1, new AttributeValue().withN("4")); forumItem2.put("range", new AttributeValue().withS("d")); forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem2))); Map<String, AttributeValue> forumItem3 = new HashMap<String, AttributeValue>(); forumItem3.put(hashKeyName1, new AttributeValue().withN("5")); forumItem3.put("range", new AttributeValue().withS("e")); forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem3))); Map<String, AttributeValue> forumItem4 = new HashMap<String, AttributeValue>(); forumItem4.put(hashKeyName1, new AttributeValue().withN("6")); forumItem4.put("range", new AttributeValue().withS("f")); forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem4))); //Test case: with duplicated hashkey item but distinguished range key input. Map<String, AttributeValue> forumItem6 = new HashMap<String, AttributeValue>(); forumItem6.put(hashKeyName1, new AttributeValue().withN("6")); forumItem6.put("range", new AttributeValue().withS("ff")); forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem6))); //Test on Table 2 Map<String, AttributeValue> forumItemT2 = new HashMap<String, AttributeValue>(); forumItemT2.put(hashKeyName2, new AttributeValue().withN("1")); forumItemT2.put("range", new AttributeValue().withS("a")); List<WriteRequest> forumListT2 = new ArrayList<WriteRequest>(); forumListT2.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItemT2))); requestItems.put(tableName1, forumList); requestItems.put(tableName2, forumListT2); do { System.out.println("Making the request."); batchWriteItemRequest.withRequestItems(requestItems); result = getClient().batchWriteItem(batchWriteItemRequest); // Print consumed capacity units for(ConsumedCapacity entry : result.getConsumedCapacity()) { String tableName1 = entry.getTableName(); Double consumedCapacityUnits = entry.getCapacityUnits(); System.out.println("Consumed capacity units for table " + tableName1 + ": " + consumedCapacityUnits); } // Check for unprocessed keys which could happen if you exceed provisioned throughput System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems()); requestItems = result.getUnprocessedItems(); } while (result.getUnprocessedItems().size() > 0); } @Test public void batchWriteItemWithDeletionsTest() throws Exception{ this.vanillaBatchWriteItemTest(); BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest(); BatchWriteItemResult result; // Create a map for the requests in the batch Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>(); // Test: delete some items from database List<WriteRequest> forumList = new ArrayList<WriteRequest>(); //Test case: Delete Requests Map<String, AttributeValue> forumKey3c = createItemKey( hashKeyName1, new AttributeValue().withN("3"), "range", new AttributeValue().withS("c")); Map<String, AttributeValue> forumKey5e = createItemKey( hashKeyName1, new AttributeValue().withN("5"), "range", new AttributeValue().withS("e")); Map<String, AttributeValue> forumKey6f = createItemKey( hashKeyName1, new AttributeValue().withN("6"), "range", new AttributeValue().withS("f")); forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey3c))); forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey5e))); forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey6f))); //Test on Table 2 List<WriteRequest> forumListT2 = new ArrayList<WriteRequest>(); Map<String, AttributeValue> forumKeyT2 = createItemKey( hashKeyName2, new AttributeValue().withN("1"), "range", new AttributeValue().withS("a")); forumListT2.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKeyT2))); requestItems.put(tableName1, forumList); requestItems.put(tableName2, forumListT2); do { System.out.println("Making the request."); batchWriteItemRequest.withRequestItems(requestItems); result = getClient().batchWriteItem(batchWriteItemRequest); // Print consumed capacity units for(ConsumedCapacity entry : result.getConsumedCapacity()) { String tableName1 = entry.getTableName(); Double consumedCapacityUnits = entry.getCapacityUnits(); System.out.println("Consumed capacity units for table " + tableName1 + ": " + consumedCapacityUnits); } // Check for unprocessed keys which could happen if you exceed provisioned throughput System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems()); requestItems = result.getUnprocessedItems(); } while (result.getUnprocessedItems().size() > 0); } @Test public void batchWriteItemWithDuplicateDeletionTest() throws Exception{ this.vanillaBatchWriteItemTest(); BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest(); BatchWriteItemResult result; // Create a map for the requests in the batch Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>(); // Test: delete some items from database List<WriteRequest> forumList = new ArrayList<WriteRequest>(); //Test case: Delete Requests Map<String, AttributeValue> forumKey5e = createItemKey( hashKeyName1, new AttributeValue().withN("5"), "range", new AttributeValue().withS("e")); Map<String, AttributeValue> forumKey6f = createItemKey( hashKeyName1, new AttributeValue().withN("6"), "range", new AttributeValue().withS("f")); forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey5e))); forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey6f))); //Test case: Duplicated delete request forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey5e))); requestItems.put(tableName1, forumList); System.out.println("Making the request."); batchWriteItemRequest.withRequestItems(requestItems); String exceptionMessage = null; try { result = getClient().batchWriteItem(batchWriteItemRequest); } catch (ResourceNotFoundException ex) { exceptionMessage = ex.getMessage(); } // Question: Is this the actual behavior of DynamoDB? Assert.assertNotNull("Expected an exception.", exceptionMessage); Assert.assertThat("Incorrect exception message.", exceptionMessage, JUnitMatchers.containsString("The item with hash key '5' doesn't exist in table '" + tableName1 + "'")); } /* @Test public void batchGetItemInTableTest() { BatchGetItemResult result = client.batchGetItem(new BatchGetItemRequest()); Assert.assertNotNull(result); } */ }
/* * $Header: /home/cvs/jakarta-tomcat-jasper/jasper2/src/share/org/apache/jasper/compiler/PageDataImpl.java,v 1.36 2003/11/11 23:19:29 luehe Exp $ * $Revision: 1.36 $ * $Date: 2003/11/11 23:19:29 $ * * ==================================================================== * * The Apache Software License, Version 1.1 * * Copyright (c) 1999 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Tomcat", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.jasper.compiler; import java.io.InputStream; import java.io.ByteArrayInputStream; import java.io.CharArrayWriter; import java.io.UnsupportedEncodingException; import java.util.ListIterator; import javax.servlet.jsp.tagext.PageData; import org.xml.sax.Attributes; import org.xml.sax.helpers.AttributesImpl; import org.apache.jasper.JasperException; /** * An implementation of <tt>javax.servlet.jsp.tagext.PageData</tt> which * builds the XML view of a given page. * * The XML view is built in two passes: * * During the first pass, the FirstPassVisitor collects the attributes of the * top-level jsp:root and those of the jsp:root elements of any included * pages, and adds them to the jsp:root element of the XML view. * In addition, any taglib directives are converted into xmlns: attributes and * added to the jsp:root element of the XML view. * This pass ignores any nodes other than JspRoot and TaglibDirective. * * During the second pass, the SecondPassVisitor produces the XML view, using * the combined jsp:root attributes determined in the first pass and any * remaining pages nodes (this pass ignores any JspRoot and TaglibDirective * nodes). * * @author Jan Luehe */ class PageDataImpl extends PageData implements TagConstants { private static final String JSP_VERSION = "2.0"; private static final String CDATA_START_SECTION = "<![CDATA[\n"; private static final String CDATA_END_SECTION = "]]>\n"; // string buffer used to build XML view private StringBuffer buf; /** * Constructor. * * @param page the page nodes from which to generate the XML view */ public PageDataImpl(Node.Nodes page, Compiler compiler) throws JasperException { // First pass FirstPassVisitor firstPass = new FirstPassVisitor(page.getRoot(), compiler.getPageInfo()); page.visit(firstPass); // Second pass buf = new StringBuffer(); SecondPassVisitor secondPass = new SecondPassVisitor(page.getRoot(), buf, compiler, firstPass.getJspIdPrefix()); page.visit(secondPass); } /** * Returns the input stream of the XML view. * * @return the input stream of the XML view */ public InputStream getInputStream() { // Turn StringBuffer into InputStream try { return new ByteArrayInputStream(buf.toString().getBytes("UTF-8")); } catch (UnsupportedEncodingException uee) { // should never happen throw new RuntimeException(uee.toString()); } } /* * First-pass Visitor for JspRoot nodes (representing jsp:root elements) * and TablibDirective nodes, ignoring any other nodes. * * The purpose of this Visitor is to collect the attributes of the * top-level jsp:root and those of the jsp:root elements of any included * pages, and add them to the jsp:root element of the XML view. * In addition, this Visitor converts any taglib directives into xmlns: * attributes and adds them to the jsp:root element of the XML view. */ static class FirstPassVisitor extends Node.Visitor implements TagConstants { private Node.Root root; private AttributesImpl rootAttrs; private PageInfo pageInfo; // Prefix for the 'id' attribute private String jspIdPrefix; /* * Constructor */ public FirstPassVisitor(Node.Root root, PageInfo pageInfo) { this.root = root; this.pageInfo = pageInfo; this.rootAttrs = new AttributesImpl(); this.rootAttrs.addAttribute("", "", "version", "CDATA", JSP_VERSION); this.jspIdPrefix = "jsp"; } public void visit(Node.Root n) throws JasperException { visitBody(n); if (n == root) { /* * Top-level page. * * Add * xmlns:jsp="http://java.sun.com/JSP/Page" * attribute only if not already present. */ if (!JSP_URI.equals(rootAttrs.getValue("xmlns:jsp"))) { rootAttrs.addAttribute("", "", "xmlns:jsp", "CDATA", JSP_URI); } if (pageInfo.isJspPrefixHijacked()) { /* * 'jsp' prefix has been hijacked, that is, bound to a * namespace other than the JSP namespace. This means that * when adding an 'id' attribute to each element, we can't * use the 'jsp' prefix. Therefore, create a new prefix * (one that is unique across the translation unit) for use * by the 'id' attribute, and bind it to the JSP namespace */ jspIdPrefix += "jsp"; while (pageInfo.containsPrefix(jspIdPrefix)) { jspIdPrefix += "jsp"; } rootAttrs.addAttribute("", "", "xmlns:" + jspIdPrefix, "CDATA", JSP_URI); } root.setAttributes(rootAttrs); } } public void visit(Node.JspRoot n) throws JasperException { addAttributes(n.getTaglibAttributes()); addAttributes(n.getAttributes()); visitBody(n); } /* * Converts taglib directive into "xmlns:..." attribute of jsp:root * element. */ public void visit(Node.TaglibDirective n) throws JasperException { Attributes attrs = n.getAttributes(); if (attrs != null) { String qName = "xmlns:" + attrs.getValue("prefix"); /* * According to javadocs of org.xml.sax.helpers.AttributesImpl, * the addAttribute method does not check to see if the * specified attribute is already contained in the list: This * is the application's responsibility! */ if (rootAttrs.getIndex(qName) == -1) { String location = attrs.getValue("uri"); if (location != null) { if (location.startsWith("/")) { location = URN_JSPTLD + location; } rootAttrs.addAttribute("", "", qName, "CDATA", location); } else { location = attrs.getValue("tagdir"); rootAttrs.addAttribute("", "", qName, "CDATA", URN_JSPTAGDIR + location); } } } } public String getJspIdPrefix() { return jspIdPrefix; } private void addAttributes(Attributes attrs) { if (attrs != null) { int len = attrs.getLength(); for (int i=0; i<len; i++) { if ("version".equals(attrs.getQName(i))) { continue; } rootAttrs.addAttribute(attrs.getURI(i), attrs.getLocalName(i), attrs.getQName(i), attrs.getType(i), attrs.getValue(i)); } } } } /* * Second-pass Visitor responsible for producing XML view and assigning * each element a unique jsp:id attribute. */ static class SecondPassVisitor extends Node.Visitor implements TagConstants { private Node.Root root; private StringBuffer buf; private Compiler compiler; private String jspIdPrefix; private boolean resetDefaultNS = false; // Current value of jsp:id attribute private int jspId; /* * Constructor */ public SecondPassVisitor(Node.Root root, StringBuffer buf, Compiler compiler, String jspIdPrefix) { this.root = root; this.buf = buf; this.compiler = compiler; this.jspIdPrefix = jspIdPrefix; } /* * Visits root node. */ public void visit(Node.Root n) throws JasperException { if (n == this.root) { // top-level page appendXmlProlog(); appendTag(n); } else { boolean resetDefaultNSSave = resetDefaultNS; if (n.isXmlSyntax()) { resetDefaultNS = true; } visitBody(n); resetDefaultNS = resetDefaultNSSave; } } /* * Visits jsp:root element of JSP page in XML syntax. * * Any nested jsp:root elements (from pages included via an * include directive) are ignored. */ public void visit(Node.JspRoot n) throws JasperException { visitBody(n); } public void visit(Node.PageDirective n) throws JasperException { appendPageDirective(n); } public void visit(Node.IncludeDirective n) throws JasperException { // expand in place visitBody(n); } public void visit(Node.Comment n) throws JasperException { // Comments are ignored in XML view } public void visit(Node.Declaration n) throws JasperException { appendTag(n); } public void visit(Node.Expression n) throws JasperException { appendTag(n); } public void visit(Node.Scriptlet n) throws JasperException { appendTag(n); } public void visit(Node.JspElement n) throws JasperException { appendTag(n); } public void visit(Node.ELExpression n) throws JasperException { if (!n.getRoot().isXmlSyntax()) { buf.append("<").append(JSP_TEXT_ACTION); buf.append(" "); buf.append(jspIdPrefix); buf.append(":id=\""); buf.append(jspId++).append("\">"); } buf.append("${"); buf.append(JspUtil.escapeXml(n.getText())); buf.append("}"); if (!n.getRoot().isXmlSyntax()) { buf.append(JSP_TEXT_ACTION_END); } buf.append("\n"); } public void visit(Node.IncludeAction n) throws JasperException { appendTag(n); } public void visit(Node.ForwardAction n) throws JasperException { appendTag(n); } public void visit(Node.GetProperty n) throws JasperException { appendTag(n); } public void visit(Node.SetProperty n) throws JasperException { appendTag(n); } public void visit(Node.ParamAction n) throws JasperException { appendTag(n); } public void visit(Node.ParamsAction n) throws JasperException { appendTag(n); } public void visit(Node.FallBackAction n) throws JasperException { appendTag(n); } public void visit(Node.UseBean n) throws JasperException { appendTag(n); } public void visit(Node.PlugIn n) throws JasperException { appendTag(n); } public void visit(Node.NamedAttribute n) throws JasperException { appendTag(n); } public void visit(Node.JspBody n) throws JasperException { appendTag(n); } public void visit(Node.CustomTag n) throws JasperException { boolean resetDefaultNSSave = resetDefaultNS; appendTag(n, resetDefaultNS); resetDefaultNS = resetDefaultNSSave; } public void visit(Node.UninterpretedTag n) throws JasperException { boolean resetDefaultNSSave = resetDefaultNS; appendTag(n, resetDefaultNS); resetDefaultNS = resetDefaultNSSave; } public void visit(Node.JspText n) throws JasperException { appendTag(n); } public void visit(Node.DoBodyAction n) throws JasperException { appendTag(n); } public void visit(Node.InvokeAction n) throws JasperException { appendTag(n); } public void visit(Node.TagDirective n) throws JasperException { appendTagDirective(n); } public void visit(Node.AttributeDirective n) throws JasperException { appendTag(n); } public void visit(Node.VariableDirective n) throws JasperException { appendTag(n); } public void visit(Node.TemplateText n) throws JasperException { /* * If the template text came from a JSP page written in JSP syntax, * create a jsp:text element for it (JSP 5.3.2). */ appendText(n.getText(), !n.getRoot().isXmlSyntax()); } /* * Appends the given tag, including its body, to the XML view. */ private void appendTag(Node n) throws JasperException { appendTag(n, false); } /* * Appends the given tag, including its body, to the XML view, * and optionally reset default namespace to "", if none specified. */ private void appendTag(Node n, boolean addDefaultNS) throws JasperException { Node.Nodes body = n.getBody(); String text = n.getText(); buf.append("<").append(n.getQName()); buf.append("\n"); printAttributes(n, addDefaultNS); buf.append(" ").append(jspIdPrefix).append(":id").append("=\""); buf.append(jspId++).append("\"\n"); if (ROOT_ACTION.equals(n.getLocalName()) || body != null || text != null) { buf.append(">\n"); if (ROOT_ACTION.equals(n.getLocalName())) { if (compiler.getCompilationContext().isTagFile()) { appendTagDirective(); } else { appendPageDirective(); } } if (body != null) { body.visit(this); } else { appendText(text, false); } buf.append("</" + n.getQName() + ">\n"); } else { buf.append("/>\n"); } } /* * Appends the page directive with the given attributes to the XML * view. * * Since the import attribute of the page directive is the only page * attribute that is allowed to appear multiple times within the same * document, and since XML allows only single-value attributes, * the values of multiple import attributes must be combined into one, * separated by comma. * * If the given page directive contains just 'contentType' and/or * 'pageEncoding' attributes, we ignore it, as we've already appended * a page directive containing just these two attributes. */ private void appendPageDirective(Node.PageDirective n) { boolean append = false; Attributes attrs = n.getAttributes(); int len = (attrs == null) ? 0 : attrs.getLength(); for (int i=0; i<len; i++) { String attrName = attrs.getQName(i); if (!"pageEncoding".equals(attrName) && !"contentType".equals(attrName)) { append = true; break; } } if (!append) { return; } buf.append("<").append(n.getQName()); buf.append("\n"); // append jsp:id buf.append(" ").append(jspIdPrefix).append(":id").append("=\""); buf.append(jspId++).append("\"\n"); // append remaining attributes for (int i=0; i<len; i++) { String attrName = attrs.getQName(i); if ("import".equals(attrName) || "contentType".equals(attrName) || "pageEncoding".equals(attrName)) { /* * Page directive's 'import' attribute is considered * further down, and its 'pageEncoding' and 'contentType' * attributes are ignored, since we've already appended * a new page directive containing just these two * attributes */ continue; } String value = attrs.getValue(i); buf.append(" ").append(attrName).append("=\""); buf.append(JspUtil.getExprInXml(value)).append("\"\n"); } if (n.getImports().size() > 0) { // Concatenate names of imported classes/packages boolean first = true; ListIterator iter = n.getImports().listIterator(); while (iter.hasNext()) { if (first) { first = false; buf.append(" import=\""); } else { buf.append(","); } buf.append(JspUtil.getExprInXml((String) iter.next())); } buf.append("\"\n"); } buf.append("/>\n"); } /* * Appends a page directive with 'pageEncoding' and 'contentType' * attributes. * * The value of the 'pageEncoding' attribute is hard-coded * to UTF-8, whereas the value of the 'contentType' attribute, which * is identical to what the container will pass to * ServletResponse.setContentType(), is derived from the pageInfo. */ private void appendPageDirective() { buf.append("<").append(JSP_PAGE_DIRECTIVE_ACTION); buf.append("\n"); // append jsp:id buf.append(" ").append(jspIdPrefix).append(":id").append("=\""); buf.append(jspId++).append("\"\n"); buf.append(" ").append("pageEncoding").append("=\"UTF-8\"\n"); buf.append(" ").append("contentType").append("=\""); buf.append(compiler.getPageInfo().getContentType()).append("\"\n"); buf.append("/>\n"); } /* * Appends the tag directive with the given attributes to the XML * view. * * If the given tag directive contains just a 'pageEncoding' * attributes, we ignore it, as we've already appended * a tag directive containing just this attributes. */ private void appendTagDirective(Node.TagDirective n) throws JasperException { boolean append = false; Attributes attrs = n.getAttributes(); int len = (attrs == null) ? 0 : attrs.getLength(); for (int i=0; i<len; i++) { String attrName = attrs.getQName(i); if (!"pageEncoding".equals(attrName)) { append = true; break; } } if (!append) { return; } appendTag(n); } /* * Appends a tag directive containing a single 'pageEncoding' * attribute whose value is hard-coded to UTF-8. */ private void appendTagDirective() { buf.append("<").append(JSP_TAG_DIRECTIVE_ACTION); buf.append("\n"); // append jsp:id buf.append(" ").append(jspIdPrefix).append(":id").append("=\""); buf.append(jspId++).append("\"\n"); buf.append(" ").append("pageEncoding").append("=\"UTF-8\"\n"); buf.append("/>\n"); } private void appendText(String text, boolean createJspTextElement) { if (createJspTextElement) { buf.append("<").append(JSP_TEXT_ACTION); buf.append("\n"); // append jsp:id buf.append(" ").append(jspIdPrefix).append(":id").append("=\""); buf.append(jspId++).append("\"\n"); buf.append(">\n"); appendCDATA(text); buf.append(JSP_TEXT_ACTION_END); buf.append("\n"); } else { appendCDATA(text); } } /* * Appends the given text as a CDATA section to the XML view, unless * the text has already been marked as CDATA. */ private void appendCDATA(String text) { buf.append(CDATA_START_SECTION); buf.append(escapeCDATA(text)); buf.append(CDATA_END_SECTION); } /* * Escapes any occurrences of "]]>" (by replacing them with "]]&gt;") * within the given text, so it can be included in a CDATA section. */ private String escapeCDATA(String text) { if( text==null ) return ""; int len = text.length(); CharArrayWriter result = new CharArrayWriter(len); for (int i=0; i<len; i++) { if (((i+2) < len) && (text.charAt(i) == ']') && (text.charAt(i+1) == ']') && (text.charAt(i+2) == '>')) { // match found result.write(']'); result.write(']'); result.write('&'); result.write('g'); result.write('t'); result.write(';'); i += 2; } else { result.write(text.charAt(i)); } } return result.toString(); } /* * Appends the attributes of the given Node to the XML view. */ private void printAttributes(Node n, boolean addDefaultNS) { /* * Append "xmlns" attributes that represent tag libraries */ Attributes attrs = n.getTaglibAttributes(); int len = (attrs == null) ? 0 : attrs.getLength(); for (int i=0; i<len; i++) { String name = attrs.getQName(i); String value = attrs.getValue(i); buf.append(" ").append(name).append("=\"").append(value).append("\"\n"); } /* * Append "xmlns" attributes that do not represent tag libraries */ attrs = n.getNonTaglibXmlnsAttributes(); len = (attrs == null) ? 0 : attrs.getLength(); boolean defaultNSSeen = false; for (int i=0; i<len; i++) { String name = attrs.getQName(i); String value = attrs.getValue(i); buf.append(" ").append(name).append("=\"").append(value).append("\"\n"); defaultNSSeen |= "xmlns".equals(name); } if (addDefaultNS && !defaultNSSeen) { buf.append(" xmlns=\"\"\n"); } resetDefaultNS = false; /* * Append all other attributes */ attrs = n.getAttributes(); len = (attrs == null) ? 0 : attrs.getLength(); for (int i=0; i<len; i++) { String name = attrs.getQName(i); String value = attrs.getValue(i); buf.append(" ").append(name).append("=\""); buf.append(JspUtil.getExprInXml(value)).append("\"\n"); } } /* * Appends XML prolog with encoding declaration. */ private void appendXmlProlog() { buf.append("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n"); } } }
// Copyright 2017 Archos SA // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.archos.mediaprovider.video; import android.content.BroadcastReceiver; import android.content.ContentProvider; import android.content.ContentProviderOperation; import android.content.ContentProviderResult; import android.content.ContentResolver; import android.content.ContentUris; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.OperationApplicationException; import android.content.SharedPreferences; import android.content.UriMatcher; import android.content.SharedPreferences.OnSharedPreferenceChangeListener; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.graphics.Bitmap; import android.net.ConnectivityManager; import android.net.Uri; import android.os.Binder; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.os.Message; import android.os.ParcelFileDescriptor; import android.os.Process; import android.os.RemoteException; import android.preference.PreferenceManager; import android.provider.BaseColumns; import android.text.TextUtils; import android.util.Log; import com.archos.filecorelibrary.FileEditor; import com.archos.filecorelibrary.Utils; import com.archos.mediacenter.filecoreextension.upnp2.FileEditorFactoryWithUpnp; import com.archos.mediacenter.filecoreextension.upnp2.UpnpServiceManager; import com.archos.mediacenter.utils.AppState; import com.archos.medialib.IMediaMetadataRetriever; import com.archos.medialib.MediaFactory; import com.archos.mediaprovider.ArchosMediaCommon; import com.archos.mediaprovider.DbHolder; import com.archos.mediaprovider.IMediaThumbnailService; import com.archos.mediaprovider.MediaRetrieverService; import com.archos.mediaprovider.MediaThumbnailService; import com.archos.mediaprovider.NetworkState; import com.archos.mediaprovider.video.VideoStore.MediaColumns; import com.archos.mediaprovider.video.VideoStore.Video; import com.archos.mediaprovider.video.VideoStore.Files.FileColumns; import com.archos.mediaprovider.video.VideoStore.Video.VideoColumns; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.PriorityQueue; import java.util.Random; public class VideoProvider extends ContentProvider { private static final String TAG = ArchosMediaCommon.TAG_PREFIX + "VideoProvider"; public static final String TAG_DOCTOR_WHO = "DoctorWhoDebug"; private static final boolean LOCAL_DBG = true; private static final boolean DBG = false; private static final boolean DBG_NET = false; // network state handling private DbHolder mDbHolder; private Handler mThumbHandler; private VobHandler mVobHandler; private ScraperProvider mScraperProvider; private OnSharedPreferenceChangeListener mPreferencechChangeListener; private static final int IMAGE_THUMB = 2; private static final int THUMB_TRY_MAX = 5 ; private ContentResolver mCr; private static final int LIGHT_INDEX_STORAGE_MIN_ID = ArchosMediaCommon.LIGHT_INDEX_MIN_STORAGE_ID; private static final String LIGHT_INDEX_STORAGE_QUERY = "SELECT " + BaseColumns._ID + " FROM files WHERE " + BaseColumns._ID + "=?";// AND storage_id<" + LIGHT_INDEX_STORAGE_MIN_ID; /** place for (video) image thumbs */ private String mImageThumbFolder; private static final String IMAGE_THUMB_FOLDER_NAME = "image_thumbs"; public static final String PREFERENCE_CREATE_REMOTE_THUMBS = "pref_create_remote_thumbs"; public VideoProvider() { } @Override public boolean onCreate() { if (DBG) Log.d(TAG, "onCreate"); final Context context = getContext(); mImageThumbFolder = context.getDir(IMAGE_THUMB_FOLDER_NAME, Context.MODE_PRIVATE).getPath(); mVobHandler = new VobHandler(context); VobUpdateCallback vobCb = new VobUpdateCallback(mVobHandler); mDbHolder = new DbHolder(new VideoOpenHelper(context, vobCb)); mCr = context.getContentResolver(); // implementation that handles scraper requests mScraperProvider = new ScraperProvider(context, mDbHolder); mPreferencechChangeListener = new OnSharedPreferenceChangeListener() { @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { if ("vpn_mobile".equals(key)) { RemoteStateService.start(context); } } }; PreferenceManager.getDefaultSharedPreferences(context).registerOnSharedPreferenceChangeListener(mPreferencechChangeListener); try { VideoStoreImportService.start(context); }catch(java.lang.IllegalStateException e){ } // handles connectivity changes AppState.addOnForeGroundListener(mForeGroundListener); handleForeGround(AppState.isForeGround()); HandlerThread ht = new HandlerThread("thumbs thread", Process.THREAD_PRIORITY_BACKGROUND); ht.start(); mThumbHandler = new Handler(ht.getLooper()) { private static final int HDD_MEDIAPROVIDER_TIMEOUT = 25; private static final int HDD_MEDIAPROVIDER_DELAY = (HDD_MEDIAPROVIDER_TIMEOUT + 2) * 1000; @Override public void handleMessage(Message msg) { if (msg.what == IMAGE_THUMB) { synchronized (mMediaThumbQueue) { mCurrentThumbRequest = mMediaThumbQueue.poll(); } if (mCurrentThumbRequest == null) { Log.w(TAG, "Have message but no request?"); } else { try { Uri encodedUri = Utils.encodeUri(Uri.parse(mCurrentThumbRequest.mPath)); FileEditor editor = FileEditorFactoryWithUpnp.getFileEditorForUrl(encodedUri, null); if(DBG) Log.d(TAG_DOCTOR_WHO,mCurrentThumbRequest.mPath+" does file exists ? "+ String.valueOf(editor.exists())); if (editor.exists()) { Log.d(TAG,"mCurrentThumbRequest"); mCurrentThumbRequest.execute(); } else { // original file hasn't been stored yet synchronized (mMediaThumbQueue) { Log.w(TAG, "original file hasn't been stored yet: " + mCurrentThumbRequest.mPath); } } } catch (IOException ex) { Log.w(TAG, ex); } catch (UnsupportedOperationException ex) { // This could happen if we unplug the sd card during insert/update/delete // See getDatabaseForUri. Log.w(TAG, ex); } catch (OutOfMemoryError err) { /* * Note: Catching Errors is in most cases considered * bad practice. However, in this case it is * motivated by the fact that corrupt or very large * images may cause a huge allocation to be * requested and denied. The bitmap handling API in * Android offers no other way to guard against * these problems than by catching OutOfMemoryError. */ Log.w(TAG, err); } finally { synchronized (mCurrentThumbRequest) { mCurrentThumbRequest.mState = MediaThumbRequest.State.DONE; mCurrentThumbRequest.notifyAll(); } } } } } }; return true; } @Override public Cursor query(Uri uri, String[] projectionIn, String selection, String[] selectionArgs, String sort) { if (DBG) Log.d(TAG, "QUERY " + uri); int table = URI_MATCHER.match(uri); // let ScraperProvider handle that if (ScraperProvider.handles(table)) return mScraperProvider.query(uri, projectionIn, selection, selectionArgs, sort); SQLiteDatabase db = mDbHolder.get(); // forward raw query requests to .rawQuery using selection as sql string if (table == RAWQUERY) { Cursor c = db.rawQuery(selection, selectionArgs); if (c != null) { // notify for any change in the db c.setNotificationUri(mCr, VideoStore.ALL_CONTENT_URI); } return c; } String limit = uri.getQueryParameter("limit"); String groupby = uri.getQueryParameter("group"); String having = uri.getQueryParameter("having"); // query our custom files tables directly if (table == RAW) { String tableName = uri.getLastPathSegment(); return db.query(tableName, projectionIn, selection, selectionArgs, groupby, having, sort, limit); } List<String> prependArgs = new ArrayList<String>(); SQLiteQueryBuilder qb = new SQLiteQueryBuilder(); if (uri.getQueryParameter("distinct") != null) { qb.setDistinct(true); } boolean hasThumbnailId = false; switch (table) { case FILES_ID: qb.appendWhere("_id=?"); prependArgs.add(uri.getLastPathSegment()); //$FALL-THROUGH$ case FILES: qb.setTables(VideoOpenHelper.FILES_TABLE_NAME); break; case VIDEO_MEDIA_ID: qb.appendWhere("_id=?"); prependArgs.add(uri.getLastPathSegment()); //$FALL-THROUGH$ case VIDEO_MEDIA: qb.setTables(VideoOpenHelper.VIDEO_VIEW_NAME); break; case VIDEO_LIST: { qb.setTables(ListTables.VIDEO_LIST_TABLE); qb.appendWhere(VideoStore.List.Columns.ID+"=?"); prependArgs.add(uri.getLastPathSegment()); break; } case LIST:{ qb.setTables(ListTables.LIST_TABLE); break; } case VIDEO_THUMBNAILS_ID: hasThumbnailId = true; //$FALL-THROUGH$ case VIDEO_THUMBNAILS: if (!queryThumbnail(qb, uri, VideoOpenHelper.VIDEOTHUMBNAIL_TABLE_NAME, "video_id", hasThumbnailId)) { return null; } break; case ARCHOS_SMB_SERVER_ID: qb.appendWhere("_id=?"); prependArgs.add(uri.getPathSegments().get(2)); //$FALL-THROUGH$ case ARCHOS_SMB_SERVER: qb.setTables(VideoOpenHelper.SMB_SERVER_TABLE_NAME); break; case SUBS_MEDIA_ID: qb.appendWhere("_id=?"); prependArgs.add(uri.getLastPathSegment()); //$FALL-THROUGH$ case SUBS_MEDIA: qb.setTables(VideoOpenHelper.SUBTITLES_TABLE_NAME); break; case SUBS_MEDIA_VIDEO_ID: qb.appendWhere("video_id=?"); prependArgs.add(uri.getLastPathSegment()); qb.setTables(VideoOpenHelper.SUBTITLES_TABLE_NAME); break; default: throw new IllegalStateException("Unknown Uri : " + uri); } Cursor c = qb.query(db, projectionIn, selection, combine(prependArgs, selectionArgs), groupby, having, sort, limit); if (c != null) { c.setNotificationUri(mCr, uri); } return c; } private static String[] combine(List<String> prepend, String[] userArgs) { int presize = prepend.size(); if (presize == 0) { return userArgs; } int usersize = (userArgs != null) ? userArgs.length : 0; String [] combined = new String[presize + usersize]; for (int i = 0; i < presize; i++) { combined[i] = prepend.get(i); } for (int i = 0; i < usersize; i++) { combined[presize + i] = userArgs[i]; } return combined; } private static final String[] MIME_TYPE_PROJECTION = new String[] { BaseColumns._ID, // 0 MediaColumns.MIME_TYPE, // 1 }; @Override public String getType(Uri url) { if (DBG) Log.d(TAG, "getType" + url); // determine match int match = URI_MATCHER.match(url); // let ScraperProvider handle what it can if (ScraperProvider.handles(match)) return mScraperProvider.getType(url); // return what we can switch (match) { case VIDEO_MEDIA_ID: case FILES_ID: Cursor c = null; try { c = query(url, MIME_TYPE_PROJECTION, null, null, null); if (c != null && c.getCount() == 1) { c.moveToFirst(); String mimeType = c.getString(1); c.deactivate(); return mimeType; } } finally { if (c != null) { c.close(); } } break; case VIDEO_MEDIA: return Video.Media.CONTENT_TYPE; } throw new IllegalStateException("Unknown URL : " + url); } @Override public Uri insert(Uri uri, ContentValues values) { if (DBG) Log.d(TAG, "INSRT " + uri + " PID:" + Process.myPid() + " TID:" + Process.myTid()); int match = URI_MATCHER.match(uri); // let ScraperProvider handle that if (ScraperProvider.handles(match)) return mScraperProvider.insert(uri, values); SQLiteDatabase db = mDbHolder.get(); // insert into our custom files tables. if (match == RAW) { String table = uri.getLastPathSegment(); long rowId = db.insert(table, null, values); if (rowId > 0) { Uri result = ContentUris.withAppendedId(uri, rowId); if (!db.inTransaction()) { mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); } return result; } return null; } long rowId = -1; Uri newUri = null; switch (match) { case VIDEO_THUMBNAILS: { ContentValues newValues = ensureFile(values, ".jpg", mImageThumbFolder); rowId = db.insert(VideoOpenHelper.VIDEOTHUMBNAIL_TABLE_NAME, "_id", newValues); if (rowId > 0) { newUri = ContentUris.withAppendedId(VideoStore.Video.Thumbnails. getContentUri(uri.getPathSegments().get(0)), rowId); } break; } case ARCHOS_SMB_SERVER: { rowId = db.insert(VideoOpenHelper.SMB_SERVER_TABLE_NAME, BaseColumns._ID, values); if (rowId > 0) { newUri = VideoStore.SmbServer.getContentUri(rowId); } break; } case SUBS_MEDIA: { rowId = db.insert(VideoOpenHelper.SUBTITLES_TABLE_NAME, BaseColumns._ID, values); if (rowId > 0) { newUri = VideoStore.Subtitle.getContentUri(rowId); } break; } case VIDEO_LIST: { int listId = Integer.valueOf(uri.getLastPathSegment()); values.put(VideoStore.VideoList.Columns.LIST_ID,listId); db.insertWithOnConflict(ListTables.VIDEO_LIST_TABLE, null, values, SQLiteDatabase.CONFLICT_REPLACE); newUri = uri; mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); break; } case LIST:{ rowId = db.insert(ListTables.LIST_TABLE, null, values); if (rowId > 0) { newUri = VideoStore.List.getListUri(rowId); } mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); break; } default: throw new IllegalStateException("Unknown Uri : " + uri); } if (newUri != null && !db.inTransaction()) { mCr.notifyChange(newUri, null); } return newUri; } @Override public ParcelFileDescriptor openFile(Uri uri, String mode) throws FileNotFoundException { ParcelFileDescriptor pfd = null; int match = URI_MATCHER.match(uri); // let scraper handle it's part if (ScraperProvider.handles(match)) return mScraperProvider.openFile(uri, mode); try { pfd = openFileHelper(uri, mode); } catch (FileNotFoundException ex) { if (mode.contains("w")) { // if the file couldn't be created, we shouldn't extract album art throw ex; } if (pfd == null) { throw ex; } } return pfd; } private static ContentValues ensureFile(ContentValues initialValues, String preferredExtension, String directoryName) { ContentValues values; String file = initialValues.getAsString(VideoStore.MediaColumns.DATA); if (TextUtils.isEmpty(file)) { file = generateFileName(preferredExtension, directoryName); values = new ContentValues(initialValues); values.put(VideoStore.MediaColumns.DATA, file); } else { values = initialValues; } if (!ensureFileExists(file)) { throw new IllegalStateException("Unable to create new file: " + file); } return values; } private static boolean ensureFileExists(String path) { File file = new File(path); if (file.exists()) { return true; } // we will not attempt to create the first directory in the path // (for example, do not create /sdcard if the SD card is not mounted) int secondSlash = path.indexOf('/', 1); if (secondSlash < 1) return false; String directoryPath = path.substring(0, secondSlash); File directory = new File(directoryPath); if (!directory.exists()) return false; // it's possible that we cannot create the directory if (!file.getParentFile().exists() && !file.getParentFile().mkdirs()) { Log.e(TAG, "could not create " + file.getParent()); return false; } try { boolean ret = file.createNewFile(); // file needs to be world readable, enforce that here. if (ret) file.setReadable(true, false); return ret; } catch(IOException ioe) { Log.e(TAG, "File creation failed", ioe); } return false; } private static String generateFileName(String preferredExtension, String directoryName) { // create a random file String name = String.valueOf(System.currentTimeMillis()); return directoryName + "/" + name + preferredExtension; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { if (DBG) Log.d(TAG, "DELTE " + uri); int match = URI_MATCHER.match(uri); // let ScraperProvider handle that if (ScraperProvider.handles(match)) return mScraperProvider.delete(uri, selection, selectionArgs); SQLiteDatabase db = mDbHolder.get(); switch (match) { case UriMatcher.NO_MATCH: case FILES: case FILES_ID: case VIDEO_MEDIA: case VIDEO_MEDIA_ID: // those must be deleted in Android's db and the result imported throw new IllegalStateException("delete not supported, has to be done via Android's MediaStore"); case RAW: String tableName = uri.getLastPathSegment(); int result = db.delete(tableName, selection, selectionArgs); if (result > 0 && !db.inTransaction()) { mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); } return result; case VIDEO_LIST: selection+= " AND "+ VideoStore.VideoList.Columns.LIST_ID+" = ?"; List<String> whereArgs = new ArrayList<String>(Arrays.asList(selectionArgs)); whereArgs.add(uri.getLastPathSegment()); result = db.delete(ListTables.VIDEO_LIST_TABLE, selection, whereArgs.toArray(new String[0])); mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); return result; case LIST: result = db.delete(ListTables.LIST_TABLE, selection, selectionArgs); mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); return result; } // the rest uses the usual way as in Android int count; GetTableAndWhereOutParameter tableAndWhere = sGetTableAndWhereParam.get(); getTableAndWhere(uri, match, selection, tableAndWhere); count = db.delete(tableAndWhere.table, tableAndWhere.where, selectionArgs); if (count > 0 && !db.inTransaction()) mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); return count; } @Override public int update(Uri uri, ContentValues initialValues, String userWhere, String[] whereArgs) { if (DBG) Log.d(TAG, "UPDTE " + uri); int count; // Log.v(TAG, "update for uri="+uri+", initValues="+initialValues); int match = URI_MATCHER.match(uri); // let ScraperProvider handle that if (ScraperProvider.handles(match)) return mScraperProvider.update(uri, initialValues, userWhere, whereArgs); SQLiteDatabase db = mDbHolder.get(); switch (match) { case RAW: { String tableName = uri.getLastPathSegment(); if (VideoOpenHelper.FILES_TABLE_NAME.equals(tableName)) { // if KEY_SCANNER is present that update was generated by our scanner if (initialValues.containsKey(VideoStoreInternal.KEY_SCANNER)) { initialValues.remove(VideoStoreInternal.KEY_SCANNER); } initialValues.remove(BaseColumns._ID); initialValues.remove(MediaColumns.DATA); } int result = db.update(tableName, initialValues, userWhere, whereArgs); if (result > 0 && !db.inTransaction()) { mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); } return result; } case VIDEO_LIST: { userWhere+= " AND "+ VideoStore.VideoList.Columns.LIST_ID+" = ?"; List<String> whereArgs2 = new ArrayList<String>(Arrays.asList(whereArgs)); whereArgs2.add(uri.getLastPathSegment()); int result = db.update(ListTables.VIDEO_LIST_TABLE, initialValues, userWhere, whereArgs2.toArray(new String[0])); mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); return result; } case LIST: { int result = db.update(ListTables.LIST_TABLE, initialValues, userWhere, whereArgs); mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); return result; } case VIDEO_MEDIA: case VIDEO_MEDIA_ID: case VIDEO_THUMBNAILS: case VIDEO_THUMBNAILS_ID: case ARCHOS_SMB_SERVER: case ARCHOS_SMB_SERVER_ID: break; // continue below default: throw new IllegalStateException("can't update Uri" + uri); } GetTableAndWhereOutParameter tableAndWhere = sGetTableAndWhereParam.get(); getTableAndWhere(uri, match, userWhere, tableAndWhere); String table = tableAndWhere.table; String where = tableAndWhere.where; switch (match) { case VIDEO_MEDIA: case VIDEO_MEDIA_ID: { ContentValues values = new ContentValues(initialValues); // Don't allow imported stuff to be updated. valuesRemove(values, BaseColumns._ID); valuesRemove(values, MediaColumns.DATA); valuesRemove(values, MediaColumns.DISPLAY_NAME); valuesRemove(values, MediaColumns.SIZE); valuesRemove(values, MediaColumns.DATE_ADDED); valuesRemove(values, MediaColumns.DATE_MODIFIED); valuesRemove(values, VideoColumns.BUCKET_ID); valuesRemove(values, VideoColumns.BUCKET_DISPLAY_NAME); valuesRemove(values, VideoStore.Files.FileColumns.FORMAT); valuesRemove(values, VideoStore.Files.FileColumns.PARENT); valuesRemove(values, VideoStore.Files.FileColumns.STORAGE_ID); if (values.size() < 1) { Log.e(TAG, "no more Values, aborting update."); return 0; } count = db.update(table, values, where, whereArgs); // if this is a request from MediaScanner, DATA should contains file path // we only process update request from media scanner, otherwise the requests // could be duplicate. if (count > 0 && values.getAsString(VideoStore.MediaColumns.DATA) != null) { Cursor c = db.query(table, READY_FLAG_PROJECTION, where, whereArgs, null, null, null); if (c != null) { try { while (c.moveToNext()) { long magic = c.getLong(2); if (magic == 0) { requestMediaThumbnail(c.getString(1), uri, MediaThumbRequest.PRIORITY_NORMAL, 0); } } } finally { c.close(); } } } } break; default: count = db.update(table, initialValues, where, whereArgs); break; } // in a transaction, the code that began the transaction should be taking // care of notifications once it ends the transaction successfully if (count > 0 && !db.inTransaction()) { mCr.notifyChange(uri, null); } return count; } static class PipeByteWriter implements PipeDataWriter<byte[]> { @Override public void writeDataToPipe(ParcelFileDescriptor output, Uri uri, String mimeType, Bundle opts, byte[] args) { FileOutputStream fout = new FileOutputStream(output.getFileDescriptor()); try { fout.write(args); } catch (IOException e) { Log.w(TAG, e); } finally { try { fout.close(); } catch (IOException e) { // ignored } } } } private static final String[] READY_FLAG_PROJECTION = new String[] { BaseColumns._ID, MediaColumns.DATA, VideoColumns.MINI_THUMB_MAGIC }; private static void valuesRemove(ContentValues cv, String what) { if (cv.containsKey(what)) { Log.e(TAG, "Removing: " + what + " since that is not supported."); cv.remove(what); } } private static final class GetTableAndWhereOutParameter { public GetTableAndWhereOutParameter() { /* empty */ } public String table; public String where; } static final ThreadLocal<GetTableAndWhereOutParameter> sGetTableAndWhereParam = new ThreadLocal<VideoProvider.GetTableAndWhereOutParameter>() { @Override protected GetTableAndWhereOutParameter initialValue() { return new GetTableAndWhereOutParameter(); } }; private static void getTableAndWhere(Uri uri, int match, String userWhere, GetTableAndWhereOutParameter out) { String where = null; switch (match) { case VIDEO_MEDIA: out.table = VideoOpenHelper.FILES_TABLE_NAME; where = FileColumns.MEDIA_TYPE + "=" + FileColumns.MEDIA_TYPE_VIDEO; break; case VIDEO_MEDIA_ID: out.table = VideoOpenHelper.FILES_TABLE_NAME; where = "_id=" + uri.getLastPathSegment(); break; case VIDEO_THUMBNAILS_ID: where = "_id=" + uri.getLastPathSegment(); //$FALL-THROUGH$ case VIDEO_THUMBNAILS: out.table = VideoOpenHelper.VIDEOTHUMBNAIL_TABLE_NAME; break; case ARCHOS_SMB_SERVER_ID: where = "_id=" + uri.getLastPathSegment(); //$FALL-THROUGH$ case ARCHOS_SMB_SERVER: out.table = VideoOpenHelper.SMB_SERVER_TABLE_NAME; break; case FILES_ID: //case MTP_OBJECTS_ID: where = "_id=" + uri.getPathSegments().get(2); //$FALL-THROUGH$ case FILES: //case MTP_OBJECTS: out.table = VideoOpenHelper.FILES_TABLE_NAME; break; default: throw new UnsupportedOperationException( "Unknown or unsupported URL: " + uri.toString()); } // Add in the user requested WHERE clause, if needed if (!TextUtils.isEmpty(userWhere)) { if (!TextUtils.isEmpty(where)) { out.where = where + " AND (" + userWhere + ")"; } else { out.where = userWhere; } } else { out.where = where; } } // The lock of mMediaThumbQueue protects both mMediaThumbQueue and mCurrentThumbRequest. protected volatile MediaThumbRequest mCurrentThumbRequest = null; protected final PriorityQueue<MediaThumbRequest> mMediaThumbQueue = new PriorityQueue<MediaThumbRequest>(MediaThumbRequest.PRIORITY_NORMAL, MediaThumbRequest.getComparator()); private boolean queryThumbnail(SQLiteQueryBuilder qb, Uri uri, String table, String column, boolean hasThumbnailId) { qb.setTables(table); if (hasThumbnailId) { // For uri dispatched to this method, the 4th path segment is always // the thumbnail id. qb.appendWhere("_id = " + uri.getPathSegments().get(3)); // client already knows which thumbnail it wants, bypass it. return true; } String origId = uri.getQueryParameter("orig_id"); // We can't query ready_flag unless we know original id if (origId == null) { // this could be thumbnail query for other purpose, bypass it. return true; } boolean needBlocking = "1".equals(uri.getQueryParameter("blocking")); boolean cancelRequest = "1".equals(uri.getQueryParameter("cancel")); Uri origUri = uri.buildUpon().encodedPath( uri.getPath().replaceFirst("thumbnails", "media")) .appendPath(origId).build(); if (needBlocking && !waitForThumbnailReady(origUri)) { if (DBG) Log.w(TAG, "original media doesn't exist or it's canceled."); return false; } else if (cancelRequest) { String groupId = uri.getQueryParameter("group_id"); boolean isVideo = "video".equals(uri.getPathSegments().get(1)); int pid = Binder.getCallingPid(); long id = -1; long gid = -1; try { id = Long.parseLong(origId); gid = Long.parseLong(groupId); } catch (NumberFormatException ex) { // invalid cancel request return false; } synchronized (mMediaThumbQueue) { if (mCurrentThumbRequest != null && matchThumbRequest(mCurrentThumbRequest, pid, id, gid, isVideo)) { synchronized (mCurrentThumbRequest) { mCurrentThumbRequest.mState = MediaThumbRequest.State.CANCEL; mCurrentThumbRequest.notifyAll(); } } for (MediaThumbRequest mtq : mMediaThumbQueue) { if (matchThumbRequest(mtq, pid, id, gid, isVideo)) { synchronized (mtq) { mtq.mState = MediaThumbRequest.State.CANCEL; mtq.notifyAll(); } mMediaThumbQueue.remove(mtq); } } } } if (origId != null) { qb.appendWhere(column + " = " + origId); } return true; } /** * This method blocks until thumbnail is ready. * * @param thumbUri * @return */ private boolean waitForThumbnailReady(Uri origUri) { Log.d(TAG,"waitForThumbnailReady"); String origId = origUri.getLastPathSegment(); String[] whereArgs = new String[] { origId }; Cursor c = query(origUri, new String[] { BaseColumns._ID, MediaColumns.DATA, VideoColumns.MINI_THUMB_MAGIC, VideoColumns.ARCHOS_THUMB_TRY}, LIGHT_INDEX_STORAGE_QUERY, whereArgs , null); if(DBG) Log.d(TAG_DOCTOR_WHO, "is cursor null ? "+String.valueOf(c==null)); if (c == null) return false; boolean result = false; if (c.moveToFirst()) { long id = c.getLong(0); String path = c.getString(1); if(DBG) Log.d(TAG_DOCTOR_WHO, "trying to create thumb for "+path); long magic = c.getLong(2); int nbTry = c.getInt(3); if (magic == 0 && nbTry >= THUMB_TRY_MAX|| !Utils.isLocal(Uri.parse(path))&&!PreferenceManager.getDefaultSharedPreferences(getContext()).getBoolean(PREFERENCE_CREATE_REMOTE_THUMBS, false)) { // thumbnail creation failed more than one time: abort. if(DBG) Log.d(TAG_DOCTOR_WHO, "thumbnail creation failed more than "+THUMB_TRY_MAX+" times: abort. "); c.close(); return false; } MediaThumbRequest req = requestMediaThumbnail(path, origUri, MediaThumbRequest.PRIORITY_HIGH, magic); if(DBG) Log.d(TAG_DOCTOR_WHO, "is MediaThumbRequest null ? "+String.valueOf(req==null)); if (req == null) { return false; } synchronized (req) { try { while (req.mState == MediaThumbRequest.State.WAIT) { req.wait(); } } catch (InterruptedException e) { Log.w(TAG, e); } if (req.mState == MediaThumbRequest.State.DONE) { result = true; if (magic == 0) { /* * previous magic = 0, thumbnail was never created, * retrieve the new magic after requestMediaThumbnail * call to check if thumbnail is valid after that call. */ c.close(); c = query(origUri, new String[] { VideoColumns.MINI_THUMB_MAGIC, VideoColumns.ARCHOS_THUMB_TRY}, null, null, null); if (c == null) return result; if (c.moveToFirst()) { magic = c.getLong(0); nbTry = c.getInt(1) + 1; if(DBG) Log.d(TAG_DOCTOR_WHO, " MediaThumbRequest set try to "+String.valueOf(nbTry)); if (magic == 0) { ContentValues values = new ContentValues(); values.put(VideoColumns.ARCHOS_THUMB_TRY, nbTry); update(origUri, values, null, null); } } } } } } c.close(); return result; } private static boolean matchThumbRequest(MediaThumbRequest req, int pid, long id, long gid, boolean isVideo) { boolean cancelAllOrigId = (id == -1); boolean cancelAllGroupId = (gid == -1); return (req.mCallingPid == pid) && (cancelAllGroupId || req.mGroupId == gid) && (cancelAllOrigId || req.mOrigId == id) && (req.mIsVideo == isVideo); } private MediaThumbRequest requestMediaThumbnail(String path, Uri uri, int priority, long magic) { synchronized (mMediaThumbQueue) { MediaThumbRequest req = null; try { req = new MediaThumbRequest( getContext(), path, uri, priority, magic); mMediaThumbQueue.add(req); // Trigger the handler. Message msg = mThumbHandler.obtainMessage(IMAGE_THUMB); msg.sendToTarget(); } catch (Throwable t) { Log.w(TAG, t); } return req; } } private static final UriMatcher URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH); private static final int VIDEO_MEDIA = 200; private static final int VIDEO_MEDIA_ID = 201; private static final int VIDEO_THUMBNAILS = 202; private static final int VIDEO_THUMBNAILS_ID = 203; private static final int FILES = 700; private static final int FILES_ID = 701; private static final int ARCHOS_SMB_SERVER = 803; private static final int ARCHOS_SMB_SERVER_ID = 804; private static final int RAW = 900; private static final int RAWQUERY = 901; private static final int SUBS_MEDIA = 1000; private static final int SUBS_MEDIA_ID = 1001; private static final int SUBS_MEDIA_VIDEO_ID = 1002; private static final int LIST = 1100; private static final int VIDEO_LIST = 1101; static { URI_MATCHER.addURI(VideoStore.AUTHORITY, "raw/*", RAW); URI_MATCHER.addURI(VideoStore.AUTHORITY, "rawquery", RAWQUERY); URI_MATCHER.addURI(VideoStore.AUTHORITY, "list", LIST); URI_MATCHER.addURI(VideoStore.AUTHORITY, "list/#", VIDEO_LIST); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/video/media", VIDEO_MEDIA); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/video/media/#", VIDEO_MEDIA_ID); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/video/thumbnails", VIDEO_THUMBNAILS); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/video/thumbnails/#", VIDEO_THUMBNAILS_ID); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/file", FILES); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/file/#", FILES_ID); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/smb_server/#", ARCHOS_SMB_SERVER_ID); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/smb_server", ARCHOS_SMB_SERVER); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/subtitles/media", SUBS_MEDIA); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/subtitles/media/#", SUBS_MEDIA_ID); URI_MATCHER.addURI(VideoStore.AUTHORITY, "*/subtitles/media/video/#", SUBS_MEDIA_VIDEO_ID); // registering ScraperProvider's uris here ScraperProvider.hookUriMatcher(URI_MATCHER); } @Override public int bulkInsert(Uri uri, ContentValues[] values) { if (DBG) Log.d(TAG, "bulkInsert " + uri); int match = URI_MATCHER.match(uri); // let ScraperProvider handle that if (ScraperProvider.handles(match)) return mScraperProvider.bulkInsert(uri, values); if (match != -1) { int result = 0; mVobHandler.onBeginTransaction(); SQLiteDatabase db = mDbHolder.get(); db.beginTransactionNonExclusive(); try { int numValues = values.length; int yield = 100; for (int i = 0; i < numValues; i++) { insert(uri, values[i]); if (yield-- < 0) { yield = 100; db.yieldIfContendedSafely(); } } result = numValues; db.setTransactionSuccessful(); } finally { db.endTransaction(); mVobHandler.onEndTransaction(); } if (result > 0) mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); return result; } return 0; } @Override public ContentProviderResult[] applyBatch(ArrayList<ContentProviderOperation> operations) throws OperationApplicationException { if (DBG) Log.d(TAG, "applyBatch"); ContentProviderResult[] result = null; SQLiteDatabase db = mDbHolder.get(); mVobHandler.onBeginTransaction(); db.beginTransactionNonExclusive(); try { final int numOperations = operations.size(); final ContentProviderResult[] results = new ContentProviderResult[numOperations]; int yield = 100; for (int i = 0; i < numOperations; i++) { results[i] = operations.get(i).apply(this, results, i); if (yield-- < 0) { yield = 100; db.yieldIfContendedSafely(); } } result = results; db.setTransactionSuccessful(); } finally { db.endTransaction(); mVobHandler.onEndTransaction(); } if (result != null) { mCr.notifyChange(VideoStore.ALL_CONTENT_URI, null); mCr.notifyChange(ScraperStore.ALL_CONTENT_URI, null); } return result; } /** * Instances of this class are created and put in a queue to be executed sequentially to see if * it needs to (re)generate the thumbnails. */ static class MediaThumbRequest { private static final String TAG = ArchosMediaCommon.TAG_PREFIX + "MediaThumbRequest"; private static final boolean DBG = false; static final int PRIORITY_LOW = 20; static final int PRIORITY_NORMAL = 10; static final int PRIORITY_HIGH = 5; static final int PRIORITY_CRITICAL = 0; static enum State {WAIT, DONE, CANCEL} private static final String[] THUMB_PROJECTION = new String[] { BaseColumns._ID // 0 }; ContentResolver mCr; Context mContext; String mPath; long mRequestTime = System.currentTimeMillis(); int mCallingPid = Binder.getCallingPid(); long mGroupId; int mPriority; Uri mUri; Uri mThumbUri; String mOrigColumnName; boolean mIsVideo; long mOrigId; State mState = State.WAIT; long mMagic; private static final Random sRandom = new Random(); static Comparator<MediaThumbRequest> getComparator() { return new Comparator<MediaThumbRequest>() { public int compare(MediaThumbRequest r1, MediaThumbRequest r2) { if (r1.mPriority != r2.mPriority) { return r1.mPriority < r2.mPriority ? -1 : 1; } return r1.mRequestTime == r2.mRequestTime ? 0 : r1.mRequestTime < r2.mRequestTime ? -1 : 1; } }; } MediaThumbRequest(Context ctx, String path, Uri uri, int priority, long magic) { mContext = ctx; mCr = ctx.getContentResolver(); mPath = path; mPriority = priority; mMagic = magic; mUri = uri; mIsVideo = "video".equals(uri.getPathSegments().get(1)); mOrigId = ContentUris.parseId(uri); mThumbUri = VideoStore.Video.Thumbnails.EXTERNAL_CONTENT_URI; mOrigColumnName = VideoStore.Video.Thumbnails.VIDEO_ID; // Only requests from Thumbnail API has this group_id parameter. In other cases, // mGroupId will always be zero and can't be canceled due to pid mismatch. String groupIdParam = uri.getQueryParameter("group_id"); if (groupIdParam != null) { mGroupId = Long.parseLong(groupIdParam); } } Uri updateDatabase(Bitmap thumbnail) { Cursor c = mCr.query(mThumbUri, THUMB_PROJECTION, mOrigColumnName+ " = " + mOrigId, null, null); if (c == null) return null; try { if (c.moveToFirst()) { return ContentUris.withAppendedId(mThumbUri, c.getLong(0)); } } finally { if (c != null) c.close(); } ContentValues values = new ContentValues(4); values.put(Video.Thumbnails.KIND, Integer.valueOf(Video.Thumbnails.MINI_KIND)); values.put(mOrigColumnName, Long.valueOf(mOrigId)); values.put(Video.Thumbnails.WIDTH, Integer.valueOf(thumbnail.getWidth())); values.put(Video.Thumbnails.HEIGHT, Integer.valueOf(thumbnail.getHeight())); try { if (DBG) Log.d(TAG, "insert Thumbnail " + mThumbUri + " val:" + values); return mCr.insert(mThumbUri, values); } catch (Exception ex) { Log.w(TAG, ex); return null; } } /** * Check if the corresponding thumbnail and mini-thumb have been created * for the given uri. This method creates both of them if they do not * exist yet or have been changed since last check. After thumbnails are * created, MINI_KIND thumbnail is stored in JPEG file and MICRO_KIND * thumbnail is stored in a random access file (MiniThumbFile). * * @throws IOException */ void execute() throws IOException { if(DBG) Log.d(TAG_DOCTOR_WHO," executing thumb creation "); long magic = mMagic; if (magic != 0) { Cursor c = null; ParcelFileDescriptor pfd = null; try { c = mCr.query(mThumbUri, THUMB_PROJECTION, mOrigColumnName + " = " + mOrigId, null, null); if (c != null && c.moveToFirst()) { pfd = mCr.openFileDescriptor( mThumbUri.buildUpon().appendPath(c.getString(0)).build(), "r"); } } catch (IOException ex) { // MINI_THUMBNAIL not exists, ignore the exception and generate one. } finally { if (c != null) c.close(); if (pfd != null) { pfd.close(); if (DBG) Log.d(TAG, "ThumbRequest, already exists."); } } return; } if (DBG) Log.d(TAG, "ThumbRequest, creating."); // If we can't retrieve the thumbnail, first check if there is one // embedded in the EXIF data. If not, or it's not big enough, // decompress the full size image. Bitmap bitmap = null; if (mPath != null) { if (mIsVideo) { // ARCHOS: this uses libavos if(DBG) Log.d(TAG_DOCTOR_WHO,"is video"); bitmap = createVideoThumbnail(mContext, mPath, Video.Thumbnails.MINI_KIND); if(DBG) Log.d(TAG_DOCTOR_WHO, "test 2 for bitmap "+String.valueOf(bitmap==null)); } if (bitmap == null) { Log.w(TAG, "Can't create mini thumbnail for " + mPath); return; } Uri uri = updateDatabase(bitmap); if (uri != null) { OutputStream thumbOut = mCr.openOutputStream(uri); bitmap.compress(Bitmap.CompressFormat.JPEG, 85, thumbOut); thumbOut.close(); if (DBG) Log.d(TAG, "ThumbRequest written bitmap"); // also put some random mini_thumb_magic do { magic = sRandom.nextLong(); } while (magic == 0); ContentValues values = new ContentValues(); values.put(VideoColumns.MINI_THUMB_MAGIC, magic); mCr.update(mUri, values, null, null); } } } /** * Create a video thumbnail for a video. May return null if the video is * corrupt or the format is not supported. * * @param filePath the path of video file * @param kind could be MINI_KIND or MICRO_KIND */ public static Bitmap createVideoThumbnail(Context ctx, String filePath, int kind) { Bitmap res = createVideoThumbnail_(ctx, filePath, kind); if (DBG) Log.d(TAG, "createVideoThumbnail: " + res); return res; } private static class Result{ Bitmap bm; public Result(){ } public void setBitmap(Bitmap bm){ this.bm= bm; } } public static Bitmap createVideoThumbnail_(final Context ctx, final String filePath, int kind) { Bitmap bitmap = null; final Result result = new Result(); final IMediaThumbnailService service = MediaThumbnailService.bind_sync(ctx); if ( service!= null) { try { Thread t = new Thread(){ public void run(){ try { if(DBG) Log.d(TAG_DOCTOR_WHO, "get Thumb for "+filePath); result.setBitmap(service.getThumbnail(filePath, -1)); } catch (RemoteException e) { if(DBG) Log.d(TAG_DOCTOR_WHO, "get Thumb for "+filePath+ " failed (RemoteException)"); Log.e(TAG, "can't get thumbnail, service crashed?", e); } } }; t.start(); t.join(); bitmap = result.bm; if (DBG) Log.d(TAG, "MediaThumbnailService gave us: " + bitmap); } catch (InterruptedException e) { e.printStackTrace(); } MediaThumbnailService.release(ctx); } else { Log.d(TAG, "no Thumbnail service, crash?"); if(DBG) Log.d(TAG_DOCTOR_WHO, "no Thumbnail service, crash?"); IMediaMetadataRetriever retriever = MediaFactory.createMetadataRetriever(ctx); try { retriever.setDataSource(filePath); if(DBG) Log.d(TAG_DOCTOR_WHO, "getFrameAtTime -1 "); bitmap = retriever.getFrameAtTime(-1); } catch (IllegalArgumentException ex) { // Assume this is a corrupt video file if(DBG) Log.d(TAG_DOCTOR_WHO, "IllegalArgumentException "+ex.toString()); } catch (RuntimeException ex) { // Assume this is a corrupt video file. if(DBG) Log.d(TAG_DOCTOR_WHO, "RuntimeException "+ex.toString()); } finally { try { retriever.release(); } catch (RuntimeException ex) { // Ignore failures while cleaning up. } } } if (bitmap == null) { if(DBG) Log.d(TAG_DOCTOR_WHO, "bitmap is null "); return null; } if(DBG) Log.d(TAG_DOCTOR_WHO, "bitmap is not null "); if (kind == Video.Thumbnails.MINI_KIND) { if(DBG) Log.d(TAG_DOCTOR_WHO, "MINI_KIND ? "); // Scale down the bitmap if it's too large. int width = bitmap.getWidth(); int height = bitmap.getHeight(); int max = Math.max(width, height); if (max > 512) { float scale = 512f / max; int w = Math.round(scale * width); int h = Math.round(scale * height); bitmap = Bitmap.createScaledBitmap(bitmap, w, h, true); if(DBG) Log.d(TAG_DOCTOR_WHO, "createScaledBitmap"); } } return bitmap; } } // ---------------------------------------------------------------------- // // ------------ Network State change handling --------------------------- // // ---------------------------------------------------------------------- // protected int mNetworkState = -1; protected static final IntentFilter INTENT_FILTER = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION); private boolean mNetworkStateReceiverRegistered = false; private final BroadcastReceiver mNetworkStateReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { NetworkState networkState = NetworkState.instance(context); networkState.updateFrom(context); int newState = (networkState.isConnected() ? 1 : 0 )+(networkState.hasLocalConnection() ? 1 : 0); if (newState != mNetworkState) { if (DBG_NET) Log.d(TAG, "NetworkState changed " + mNetworkState + " -> " + newState); mNetworkState = newState; RemoteStateService.start(context); } } }; private final AppState.OnForeGroundListener mForeGroundListener = new AppState.OnForeGroundListener() { @Override public void onForeGroundState(Context applicationContext, boolean foreground) { if(foreground) VideoStoreImportService.start(applicationContext); handleForeGround(foreground); } }; protected void handleForeGround(boolean foreground) { final Context context = getContext(); if (foreground) { if (DBG_NET) Log.d(TAG, "App now in ForeGround"); // coming back to front: register network receiver if (!mNetworkStateReceiverRegistered) { context.registerReceiver(mNetworkStateReceiver, INTENT_FILTER); mNetworkStateReceiverRegistered = true; } UpnpServiceManager.restartUpnpServiceIfWasStartedBefore(); // get current network state NetworkState networkState = NetworkState.instance(context); networkState.updateFrom(context); mNetworkState = (networkState.isConnected() ? 1 : 0 )+(networkState.hasLocalConnection() ? 1 : 0); // force check RemoteStateService.start(context); } else { if (DBG_NET) Log.d(TAG, "App now in BackGround"); // going back to background, unregister receiver and set network state // to unknown if (mNetworkStateReceiverRegistered) { context.unregisterReceiver(mNetworkStateReceiver); mNetworkStateReceiverRegistered = false; } UpnpServiceManager.stopServiceIfLaunched(); mNetworkState = -1; } } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.modifier.orm.ibatis; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import java.sql.SQLException; import java.util.List; import javassist.CtClass; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import com.ibatis.sqlmap.client.SqlMapClient; import com.ibatis.sqlmap.engine.impl.SqlMapClientImpl; import com.ibatis.sqlmap.engine.impl.SqlMapExecutorDelegate; import com.ibatis.sqlmap.engine.scope.SessionScope; import com.navercorp.pinpoint.common.bo.AnnotationBo; import com.navercorp.pinpoint.common.bo.SpanEventBo; import com.navercorp.pinpoint.common.trace.AnnotationKey; import com.navercorp.pinpoint.test.junit4.BasePinpointTest; /** * @author Hyun Jeong */ public class SqlMapClientImplModifierTest extends BasePinpointTest { public class MockSqlMapExecutorDelegate extends SqlMapExecutorDelegate { @Override public SessionScope beginSessionScope() { return mockSessionScope; } } @Mock private MockSqlMapExecutorDelegate mockSqlMapExecutorDelegate; @Mock private SessionScope mockSessionScope; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); when(this.mockSqlMapExecutorDelegate.beginSessionScope()).thenReturn(this.mockSessionScope); } @Test public void exceptionsThrownShouldBeTraced() throws Exception { // Given when(this.mockSqlMapExecutorDelegate.beginSessionScope()).thenReturn(null); SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When try { sqlMapClient.insert("insertShouldThrowNPE"); fail("sqlMapClient.insert should throw NullPointerException"); } catch (NullPointerException e) { // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(1)); final SpanEventBo exceptionSpanEventBo = spanEvents.get(0); assertThat(exceptionSpanEventBo.hasException(), is(true)); assertThat(exceptionSpanEventBo.getExceptionId(), not(0)); } } @Test(expected=NullPointerException.class) public void test() throws SQLException { // Given when(this.mockSqlMapExecutorDelegate.beginSessionScope()).thenReturn(null); SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.insert("insertShouldThrowNPE"); } @Test public void nullParametersShouldNotBeTraced() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.insert(null); sqlMapClient.queryForList(null); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(2)); // Check Method final SpanEventBo insertSpanEventBo = spanEvents.get(0); final SpanEventBo queryForListSpanEventBo = spanEvents.get(1); assertThat(insertSpanEventBo.getApiId(), not(0)); assertThat(queryForListSpanEventBo.getApiId(), not(0)); assertThat(insertSpanEventBo.getApiId(), not(queryForListSpanEventBo.getApiId())); // Check Parameter assertNull(insertSpanEventBo.getAnnotationBoList()); assertNull(queryForListSpanEventBo.getAnnotationBoList()); } @Test public void sameApiCallsShouldHaveTheSameApiId() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.insert("insertA"); sqlMapClient.insert("insertB"); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(2)); // Check Method final SpanEventBo insertASpanEventBo = spanEvents.get(0); final SpanEventBo insertBSpanEventBo = spanEvents.get(1); assertThat(insertASpanEventBo.getApiId(), not(0)); assertThat(insertBSpanEventBo.getApiId(), not(0)); assertThat(insertASpanEventBo.getApiId(), is(insertBSpanEventBo.getApiId())); } @Test public void insertShouldBeTraced() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.insert("insertId"); sqlMapClient.insert("insertId", new Object()); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(2)); // Check Method final SpanEventBo insertWith1ArgSpanEventBo = spanEvents.get(0); final SpanEventBo insertWith2ArgSpanEventBo = spanEvents.get(1); assertThat(insertWith1ArgSpanEventBo.getApiId(), not(0)); assertThat(insertWith2ArgSpanEventBo.getApiId(), not(0)); assertThat(insertWith1ArgSpanEventBo.getApiId(), not(insertWith2ArgSpanEventBo.getApiId())); // Check Parameter final List<AnnotationBo> insertWith1ArgAnnotations = insertWith1ArgSpanEventBo.getAnnotationBoList(); assertThat(insertWith1ArgAnnotations.size(), is(1)); final AnnotationBo insertWith1ArgParameterAnnotation = insertWith1ArgAnnotations.get(0); assertThat(insertWith1ArgParameterAnnotation.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); final List<AnnotationBo> insertWith2ArgAnnotations = insertWith2ArgSpanEventBo.getAnnotationBoList(); assertThat(insertWith2ArgAnnotations.size(), is(1)); final AnnotationBo insertWith2ArgAnnotation = insertWith2ArgAnnotations.get(0); assertThat(insertWith2ArgAnnotation.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); } @Test public void deleteShouldBeTraced() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.delete("deleteId"); sqlMapClient.delete("deleteId", new Object()); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(2)); // Check Method final SpanEventBo deleteWith1ArgSpanEvent = spanEvents.get(0); final SpanEventBo deleteWith2ArgSpanEvent = spanEvents.get(1); assertThat(deleteWith1ArgSpanEvent.getApiId(), not(0)); assertThat(deleteWith2ArgSpanEvent.getApiId(), not(0)); assertThat(deleteWith1ArgSpanEvent.getApiId(), not(deleteWith2ArgSpanEvent.getApiId())); // Check Parameter final List<AnnotationBo> deleteWith1ArgAnnotations = deleteWith1ArgSpanEvent.getAnnotationBoList(); assertThat(deleteWith1ArgAnnotations.size(), is(1)); final AnnotationBo deleteWith1ArgParameterAnnotation = deleteWith1ArgAnnotations.get(0); assertThat(deleteWith1ArgParameterAnnotation.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); final List<AnnotationBo> deleteWith2ArgAnnotations = deleteWith2ArgSpanEvent.getAnnotationBoList(); assertThat(deleteWith2ArgAnnotations.size(), is(1)); final AnnotationBo deleteWith2ArgAnnotation = deleteWith2ArgAnnotations.get(0); assertThat(deleteWith2ArgAnnotation.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); } @Test public void updateShouldBeTraced() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.update("updateId"); sqlMapClient.update("updateId", new Object()); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(2)); // Check Method final SpanEventBo updateWith1ArgSpanEvent = spanEvents.get(0); final SpanEventBo updateWith2ArgSpanEvent = spanEvents.get(1); assertThat(updateWith1ArgSpanEvent.getApiId(), not(0)); assertThat(updateWith2ArgSpanEvent.getApiId(), not(0)); assertThat(updateWith1ArgSpanEvent.getApiId(), not(updateWith2ArgSpanEvent.getApiId())); // Check Parameter final List<AnnotationBo> updateWith1ArgAnnotations = updateWith1ArgSpanEvent.getAnnotationBoList(); assertThat(updateWith1ArgAnnotations.size(), is(1)); final AnnotationBo updateWith1ArgParameterAnnotation = updateWith1ArgAnnotations.get(0); assertThat(updateWith1ArgParameterAnnotation.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); final List<AnnotationBo> updateWith2ArgAnnotations = updateWith2ArgSpanEvent.getAnnotationBoList(); assertThat(updateWith2ArgAnnotations.size(), is(1)); final AnnotationBo updateWith2ArgAnnotation = updateWith2ArgAnnotations.get(0); assertThat(updateWith2ArgAnnotation.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); } @Test public void queryForListShouldBeTraced() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.queryForList("abc"); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(1)); // Check Method final SpanEventBo apiCallSpanEventBo = spanEvents.get(0); assertThat(apiCallSpanEventBo.getApiId(), not(0)); // Check Parameter final List<AnnotationBo> annotationBoList = apiCallSpanEventBo.getAnnotationBoList(); assertThat(annotationBoList.size(), is(1)); final AnnotationBo parameterAnnotationBo = annotationBoList.get(0); assertThat(parameterAnnotationBo.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); } @Test public void queryForObjectShouldBeTraced() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.queryForObject("abrgrgfdaghertah", new Object()); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(1)); // Check Method final SpanEventBo apiCallSpanEventBo = spanEvents.get(0); assertThat(apiCallSpanEventBo.getApiId(), not(0)); // Check Parameter final List<AnnotationBo> annotationBoList = apiCallSpanEventBo.getAnnotationBoList(); assertThat(annotationBoList.size(), is(1)); final AnnotationBo parameterAnnotationBo = annotationBoList.get(0); assertThat(parameterAnnotationBo.getKey(), is(AnnotationKey.CACHE_ARGS0.getCode())); } @Ignore // Changed to trace only query operations @Test public void transactionsShouldBeTraced() throws Exception { // Given SqlMapClient sqlMapClient = new SqlMapClientImpl(this.mockSqlMapExecutorDelegate); // When sqlMapClient.startTransaction(); sqlMapClient.commitTransaction(); sqlMapClient.endTransaction(); // Then final List<SpanEventBo> spanEvents = getCurrentSpanEvents(); assertThat(spanEvents.size(), is(3)); // Check Method final SpanEventBo startTransactionSpanEventBo = spanEvents.get(0); final SpanEventBo commitTransactionSpanEventBo = spanEvents.get(1); final SpanEventBo endTransactionSpanEventBo = spanEvents.get(2); assertThat(startTransactionSpanEventBo.getApiId(), not(0)); assertThat(commitTransactionSpanEventBo.getApiId(), not(0)); assertThat(endTransactionSpanEventBo.getApiId(), not(0)); assertThat(startTransactionSpanEventBo.getApiId(), not(commitTransactionSpanEventBo.getApiId())); assertThat(commitTransactionSpanEventBo.getApiId(), not(endTransactionSpanEventBo.getApiId())); assertThat(endTransactionSpanEventBo.getApiId(), not(startTransactionSpanEventBo.getApiId())); // Check Parameter assertNull(startTransactionSpanEventBo.getAnnotationBoList()); assertNull(commitTransactionSpanEventBo.getAnnotationBoList()); assertNull(endTransactionSpanEventBo.getAnnotationBoList()); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.bpmn2.xml; import java.io.StringReader; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.drools.compiler.compiler.xml.XmlDumper; import org.drools.core.xml.Handler; import org.drools.core.xml.SemanticModule; import org.drools.core.xml.SemanticModules; import org.drools.mvel.java.JavaDialect; import org.jbpm.bpmn2.core.Association; import org.jbpm.bpmn2.core.DataStore; import org.jbpm.bpmn2.core.Definitions; import org.jbpm.bpmn2.core.Error; import org.jbpm.bpmn2.core.ItemDefinition; import org.jbpm.compiler.xml.XmlProcessReader; import org.jbpm.process.core.ContextContainer; import org.jbpm.process.core.Work; import org.jbpm.process.core.context.swimlane.Swimlane; import org.jbpm.process.core.context.swimlane.SwimlaneContext; import org.jbpm.process.core.context.variable.Variable; import org.jbpm.process.core.context.variable.VariableScope; import org.jbpm.process.core.datatype.impl.type.ObjectDataType; import org.jbpm.process.core.event.EventFilter; import org.jbpm.process.core.event.EventTypeFilter; import org.jbpm.process.core.impl.ProcessImpl; import org.jbpm.process.core.impl.XmlProcessDumper; import org.jbpm.ruleflow.core.RuleFlowProcess; import org.jbpm.workflow.core.Constraint; import org.jbpm.workflow.core.impl.ConnectionImpl; import org.jbpm.workflow.core.impl.DroolsConsequenceAction; import org.jbpm.workflow.core.node.ActionNode; import org.jbpm.workflow.core.node.CompositeNode; import org.jbpm.workflow.core.node.EndNode; import org.jbpm.workflow.core.node.EventNode; import org.jbpm.workflow.core.node.EventTrigger; import org.jbpm.workflow.core.node.FaultNode; import org.jbpm.workflow.core.node.ForEachNode; import org.jbpm.workflow.core.node.HumanTaskNode; import org.jbpm.workflow.core.node.Join; import org.jbpm.workflow.core.node.Split; import org.jbpm.workflow.core.node.StartNode; import org.jbpm.workflow.core.node.Trigger; import org.jbpm.workflow.core.node.WorkItemNode; import org.kie.api.definition.process.Connection; import org.kie.api.definition.process.Node; import org.kie.api.definition.process.NodeContainer; import org.kie.api.definition.process.Process; import org.kie.api.definition.process.WorkflowProcess; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class XmlBPMNProcessDumper implements XmlProcessDumper { public static final String JAVA_LANGUAGE = "http://www.java.com/java"; public static final String MVEL_LANGUAGE = "http://www.mvel.org/2.0"; public static final String RULE_LANGUAGE = "http://www.jboss.org/drools/rule"; public static final String XPATH_LANGUAGE = "http://www.w3.org/1999/XPath"; public static final String JAVASCRIPT_LANGUAGE = "http://www.javascript.com/javascript"; public static final String FEEL_LANGUAGE = "http://www.omg.org/spec/FEEL/20140401"; public static final int NO_META_DATA = 0; public static final int META_DATA_AS_NODE_PROPERTY = 1; public static final int META_DATA_USING_DI = 2; public static final XmlBPMNProcessDumper INSTANCE = new XmlBPMNProcessDumper(); private static final Logger logger = LoggerFactory.getLogger(XmlBPMNProcessDumper.class); private final static String EOL = System.getProperty( "line.separator" ); private SemanticModule semanticModule; private int metaDataType = META_DATA_USING_DI; private XmlBPMNProcessDumper() { semanticModule = new BPMNSemanticModule(); } public String dump(WorkflowProcess process) { return dump(process, META_DATA_USING_DI); } public String dump(WorkflowProcess process, boolean includeMeta) { return dump(process, META_DATA_AS_NODE_PROPERTY); } public String dump(WorkflowProcess process, int metaDataType) { StringBuilder xmlDump = new StringBuilder(); visitProcess(process, xmlDump, metaDataType); return xmlDump.toString(); } public int getMetaDataType() { return metaDataType; } public void setMetaDataType(int metaDataType) { this.metaDataType = metaDataType; } private Set<String> visitedVariables; protected void visitProcess(WorkflowProcess process, StringBuilder xmlDump, int metaDataType) { String targetNamespace = (String) process.getMetaData().get("TargetNamespace"); if (targetNamespace == null) { targetNamespace = "http://www.jboss.org/drools"; } xmlDump.append( "<?xml version=\"1.0\" encoding=\"UTF-8\"?> " + EOL + "<definitions id=\"Definition\"" + EOL + " targetNamespace=\"" + targetNamespace + "\"" + EOL + " typeLanguage=\"http://www.java.com/javaTypes\"" + EOL + " expressionLanguage=\"http://www.mvel.org/2.0\"" + EOL + " xmlns=\"http://www.omg.org/spec/BPMN/20100524/MODEL\"" + EOL + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + EOL + " xsi:schemaLocation=\"http://www.omg.org/spec/BPMN/20100524/MODEL BPMN20.xsd\"" + EOL + " xmlns:g=\"http://www.jboss.org/drools/flow/gpd\"" + EOL + (metaDataType == META_DATA_USING_DI ? " xmlns:bpmndi=\"http://www.omg.org/spec/BPMN/20100524/DI\"" + EOL + " xmlns:dc=\"http://www.omg.org/spec/DD/20100524/DC\"" + EOL + " xmlns:di=\"http://www.omg.org/spec/DD/20100524/DI\"" + EOL : "") + " xmlns:tns=\"http://www.jboss.org/drools\">" + EOL + EOL); // item definitions this.visitedVariables = new HashSet<String>(); VariableScope variableScope = (VariableScope) ((org.jbpm.process.core.Process) process).getDefaultContext(VariableScope.VARIABLE_SCOPE); Set<String> dumpedItemDefs = new HashSet<String>(); Map<String, ItemDefinition> itemDefs = (Map<String, ItemDefinition>) process.getMetaData().get("ItemDefinitions"); if (itemDefs != null) { for (ItemDefinition def : itemDefs.values()) { xmlDump.append( " <itemDefinition id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(def.getId()) + "\" "); if (def.getStructureRef() != null && !"java.lang.Object".equals(def.getStructureRef())) { xmlDump.append("structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(def.getStructureRef()) + "\" "); } xmlDump.append("/>" + EOL); dumpedItemDefs.add(def.getId().intern()); } } visitVariableScope(variableScope, "_", xmlDump, dumpedItemDefs); visitSubVariableScopes(process.getNodes(), xmlDump, dumpedItemDefs); visitInterfaces(process.getNodes(), xmlDump); visitEscalations(process.getNodes(), xmlDump, new ArrayList<String>()); Definitions def = (Definitions) process.getMetaData().get("Definitions"); visitErrors(def, xmlDump); //data stores if (def != null && def.getDataStores() != null) { for (DataStore dataStore : def.getDataStores()) { visitDataStore(dataStore, xmlDump); } } // the process itself xmlDump.append(" <process processType=\"Private\" isExecutable=\"true\" "); if (process.getId() == null || process.getId().trim().length() == 0) { ((ProcessImpl) process).setId("com.sample.bpmn2"); } xmlDump.append("id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(process.getId()) + "\" "); if (process.getName() != null) { xmlDump.append("name=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(process.getName()) + "\" "); } String packageName = process.getPackageName(); if (packageName != null && !"org.drools.bpmn2".equals(packageName)) { xmlDump.append("tns:packageName=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(packageName) + "\" "); } if (((org.jbpm.workflow.core.WorkflowProcess) process).isDynamic()) { xmlDump.append("tns:adHoc=\"true\" "); } String version = process.getVersion(); if (version != null && !"".equals(version)) { xmlDump.append("tns:version=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(version) + "\" "); } // TODO: package, version xmlDump.append(">" + EOL + EOL); visitHeader(process, xmlDump, metaDataType); List<org.jbpm.workflow.core.Node> processNodes = new ArrayList<org.jbpm.workflow.core.Node>(); for( Node procNode : process.getNodes()) { processNodes.add((org.jbpm.workflow.core.Node) procNode); } visitNodes(processNodes, xmlDump, metaDataType); visitConnections(process.getNodes(), xmlDump, metaDataType); // add associations List<Association> associations = (List<Association>) process.getMetaData().get(ProcessHandler.ASSOCIATIONS); if( associations != null ) { for (Association association : associations ) { visitAssociation(association, xmlDump); } } xmlDump.append(" </process>" + EOL + EOL); if (metaDataType == META_DATA_USING_DI) { xmlDump.append( " <bpmndi:BPMNDiagram>" + EOL + " <bpmndi:BPMNPlane bpmnElement=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(process.getId()) + "\" >" + EOL); visitNodesDi(process.getNodes(), xmlDump); visitConnectionsDi(process.getNodes(), xmlDump); xmlDump.append( " </bpmndi:BPMNPlane>" + EOL + " </bpmndi:BPMNDiagram>" + EOL + EOL); } xmlDump.append("</definitions>"); } private void visitDataStore(DataStore dataStore, StringBuilder xmlDump) { String itemSubjectRef = dataStore.getItemSubjectRef(); String itemDefId = itemSubjectRef.substring(itemSubjectRef.indexOf(':') + 1); xmlDump.append(" <itemDefinition id=\"" + itemDefId + "\" "); if (dataStore.getType() != null && !"java.lang.Object".equals(dataStore.getType().getStringType())) { xmlDump.append("structureRef=\"" + XmlDumper.replaceIllegalChars(dataStore.getType().getStringType()) + "\" "); } xmlDump.append("/>" + EOL); xmlDump.append(" <dataStore name=\"" + XmlDumper.replaceIllegalChars(dataStore.getName()) + "\""); xmlDump.append(" id=\"" + XmlDumper.replaceIllegalChars(dataStore.getId()) + "\""); xmlDump.append(" itemSubjectRef=\"" + XmlDumper.replaceIllegalChars(dataStore.getItemSubjectRef()) + "\""); xmlDump.append("/>" + EOL); } public void visitAssociation(Association association, StringBuilder xmlDump) { xmlDump.append(" <association id=\"" + association.getId() + "\" "); xmlDump.append(" sourceRef=\"" + association.getSourceRef() + "\" "); xmlDump.append(" targetRef=\"" + association.getTargetRef() + "\" "); xmlDump.append("/>" + EOL); } private void visitVariableScope(VariableScope variableScope, String prefix, StringBuilder xmlDump, Set<String> dumpedItemDefs) { if (variableScope != null && !variableScope.getVariables().isEmpty()) { int variablesAdded = 0; for (Variable variable: variableScope.getVariables()) { String itemDefId = (String) variable.getMetaData("ItemSubjectRef"); if( itemDefId == null ) { itemDefId = prefix + variable.getName(); } if( itemDefId != null && ! dumpedItemDefs.add(itemDefId.intern()) ) { continue; } if( ! visitedVariables.add(variable.getName()) ) { continue; } ++variablesAdded; xmlDump.append( " <itemDefinition id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(itemDefId) + "\" "); if (variable.getType() != null && !"java.lang.Object".equals(variable.getType().getStringType())) { xmlDump.append("structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(variable.getType().getStringType()) + "\" "); } xmlDump.append("/>" + EOL); } if( variablesAdded > 0 ) { xmlDump.append(EOL); } } } private void visitSubVariableScopes(Node[] nodes, StringBuilder xmlDump, Set<String> dumpedItemDefs) { for (Node node: nodes) { if (node instanceof ContextContainer) { VariableScope variableScope = (VariableScope) ((ContextContainer) node).getDefaultContext(VariableScope.VARIABLE_SCOPE); if (variableScope != null) { visitVariableScope(variableScope, XmlBPMNProcessDumper.getUniqueNodeId(node) + "-", xmlDump, dumpedItemDefs); } } if (node instanceof NodeContainer) { visitSubVariableScopes(((NodeContainer) node).getNodes(), xmlDump, dumpedItemDefs); } } } private void visitLanes(WorkflowProcess process, StringBuilder xmlDump) { // lanes Collection<Swimlane> swimlanes = ((SwimlaneContext) ((org.jbpm.workflow.core.WorkflowProcess) process) .getDefaultContext(SwimlaneContext.SWIMLANE_SCOPE)).getSwimlanes(); if (!swimlanes.isEmpty()) { xmlDump.append(" <laneSet>" + EOL); for (Swimlane swimlane: swimlanes) { xmlDump.append(" <lane name=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(swimlane.getName()) + "\" >" + EOL); visitLane(process, swimlane.getName(), xmlDump); xmlDump.append(" </lane>" + EOL); } xmlDump.append(" </laneSet>" + EOL); } } private void visitLane(NodeContainer container, String lane, StringBuilder xmlDump) { for (Node node: container.getNodes()) { if (node instanceof HumanTaskNode) { String swimlane = ((HumanTaskNode) node).getSwimlane(); if (lane.equals(swimlane)) { xmlDump.append(" <flowNodeRef>" + XmlBPMNProcessDumper.getUniqueNodeId(node) + "</flowNodeRef>" + EOL); } } else { String swimlane = (String) node.getMetaData().get("Lane"); if (lane.equals(swimlane)) { xmlDump.append(" <flowNodeRef>" + XmlBPMNProcessDumper.getUniqueNodeId(node) + "</flowNodeRef>" + EOL); } } if (node instanceof NodeContainer) { visitLane((NodeContainer) node, lane, xmlDump); } } } protected void visitHeader(WorkflowProcess process, StringBuilder xmlDump, int metaDataType) { Map<String, Object> metaData = getMetaData(process.getMetaData()); Set<String> imports = ((org.jbpm.process.core.Process) process).getImports(); Map<String, String> globals = ((org.jbpm.process.core.Process) process).getGlobals(); if ((imports != null && !imports.isEmpty()) || (globals != null && globals.size() > 0) || !metaData.isEmpty()) { xmlDump.append(" <extensionElements>" + EOL); if (imports != null) { for (String s: imports) { xmlDump.append(" <tns:import name=\"" + s + "\" />" + EOL); } } if (globals != null) { for (Map.Entry<String, String> global: globals.entrySet()) { xmlDump.append(" <tns:global identifier=\"" + global.getKey() + "\" type=\"" + global.getValue() + "\" />" + EOL); } } writeMetaData(getMetaData(process.getMetaData()), xmlDump); xmlDump.append(" </extensionElements>" + EOL); } // TODO: function imports // TODO: exception handlers VariableScope variableScope = (VariableScope) ((org.jbpm.process.core.Process) process).getDefaultContext(VariableScope.VARIABLE_SCOPE); if (variableScope != null) { visitVariables(variableScope.getVariables(), xmlDump); } visitLanes(process, xmlDump); } public static void visitVariables(List<Variable> variables, StringBuilder xmlDump) { if (!variables.isEmpty()) { xmlDump.append(" <!-- process variables -->" + EOL); for (Variable variable: variables) { if (variable.getMetaData("DataObject") == null) { xmlDump.append(" <property id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(variable.getName()) + "\" "); if (variable.getType() != null) { xmlDump.append("itemSubjectRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute((String) variable.getMetaData("ItemSubjectRef")) + "\"" ); } // TODO: value? Map<String, Object> metaData = getMetaData(variable.getMetaData()); if (metaData.isEmpty()) { xmlDump.append("/>" + EOL); } else { xmlDump.append(">" + EOL + " <extensionElements>" + EOL); writeMetaData(metaData, xmlDump); xmlDump.append(" </extensionElements>" + EOL + " </property>" + EOL); } } } for (Variable variable: variables) { if (variable.getMetaData("DataObject") != null) { xmlDump.append(" <dataObject id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(variable.getName()) + "\" "); if (variable.getType() != null) { xmlDump.append("itemSubjectRef=\"_" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(variable.getName()) + "\"" ); } // TODO: value? Map<String, Object> metaData = getMetaData(variable.getMetaData()); if (metaData.isEmpty()) { xmlDump.append("/>" + EOL); } else { xmlDump.append(">" + EOL + " <extensionElements>" + EOL); writeMetaData(metaData, xmlDump); xmlDump.append(" </extensionElements>" + EOL + " </property>" + EOL); } } } xmlDump.append(EOL); } } public static Map<String, Object> getMetaData(Map<String, Object> input) { Map<String, Object> metaData = new HashMap<String, Object>(); for (Map.Entry<String, Object> entry: input.entrySet()) { String name = entry.getKey(); if (entry.getKey().startsWith("custom") && entry.getValue() instanceof String) { metaData.put(name, entry.getValue()); } } return metaData; } public static void writeMetaData(Map<String, Object> metaData, final StringBuilder xmlDump) { if (!metaData.isEmpty()) { for (Map.Entry<String, Object> entry: metaData.entrySet()) { xmlDump.append(" <tns:metaData name=\"" + entry.getKey() + "\">" + EOL); xmlDump.append(" <tns:metaValue>" + entry.getValue() + "</tns:metaValue>" + EOL); xmlDump.append(" </tns:metaData>" + EOL); } } } protected void visitInterfaces(Node[] nodes, StringBuilder xmlDump) { for (Node node: nodes) { if (node instanceof WorkItemNode) { Work work = ((WorkItemNode) node).getWork(); if (work != null) { if ("Service Task".equals(work.getName())) { String interfaceName = (String) work.getParameter("Interface"); if (interfaceName == null) { interfaceName = ""; } String interfaceRef = (String) work.getParameter("interfaceImplementationRef"); if (interfaceRef == null) { interfaceRef = ""; } String operationName = (String) work.getParameter("Operation"); if (operationName == null) { operationName = ""; } String operationRef = (String) work.getParameter("operationImplementationRef"); if (operationRef == null) { operationRef = ""; } String parameterType = (String) work.getParameter("ParameterType"); if (parameterType == null) { parameterType = ""; } xmlDump.append( " <itemDefinition id=\"" + getUniqueNodeId(node) + "_InMessageType\" " + ("".equals(parameterType) || "java.lang.Object".equals(parameterType) ? "" : "structureRef=\"" + parameterType + "\" ") + "/>" + EOL + " <message id=\"" + getUniqueNodeId(node) + "_InMessage\" itemRef=\"" + getUniqueNodeId(node) + "_InMessageType\" />" + EOL + " <interface id=\"" + getUniqueNodeId(node) + "_ServiceInterface\" name=\"" + interfaceName + "\" implementationRef=\""+ interfaceRef+"\" >" + EOL + " <operation id=\"" + getUniqueNodeId(node) + "_ServiceOperation\" name=\"" + operationName + "\" implementationRef=\""+ operationRef+"\" >" + EOL + " <inMessageRef>" + getUniqueNodeId(node) + "_InMessage</inMessageRef>" + EOL + " </operation>" + EOL + " </interface>" + EOL + EOL); } else if ("Send Task".equals(work.getName())) { String messageType = (String) work.getParameter("MessageType"); if (messageType == null) { messageType = ""; } xmlDump.append( " <itemDefinition id=\"" + getUniqueNodeId(node) + "_MessageType\" " + ("".equals(messageType) || "java.lang.Object".equals(messageType) ? "" : "structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageType) + "\" ") + "/>" + EOL + " <message id=\"" + getUniqueNodeId(node) + "_Message\" itemRef=\"" + getUniqueNodeId(node) + "_MessageType\" />" + EOL + EOL); } else if ("Receive Task".equals(work.getName())) { String messageId = (String) work.getParameter("MessageId"); String messageType = (String) work.getParameter("MessageType"); if (messageType == null) { messageType = ""; } xmlDump.append( " <itemDefinition id=\"" + getUniqueNodeId(node) + "_MessageType\" " + ("".equals(messageType) || "java.lang.Object".equals(messageType) ? "" : "structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageType) + "\" ") + "/>" + EOL + " <message id=\"" + messageId + "\" itemRef=\"" + getUniqueNodeId(node) + "_MessageType\" />" + EOL + EOL); } } } else if (node instanceof EndNode) { String messageType = (String) node.getMetaData().get("MessageType"); if (messageType != null) { xmlDump.append( " <itemDefinition id=\"" + getUniqueNodeId(node) + "_MessageType\" " + ("".equals(messageType) || "java.lang.Object".equals(messageType) ? "" : "structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageType) + "\" ") + "/>" + EOL + " <message id=\"" + getUniqueNodeId(node) + "_Message\" itemRef=\"" + getUniqueNodeId(node) + "_MessageType\" />" + EOL + EOL); } } else if (node instanceof ActionNode) { String messageType = (String) node.getMetaData().get("MessageType"); if (messageType != null) { xmlDump.append( " <itemDefinition id=\"" + getUniqueNodeId(node) + "_MessageType\" " + ("".equals(messageType) || "java.lang.Object".equals(messageType) ? "" : "structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageType) + "\" ") + "/>" + EOL + " <message id=\"" + getUniqueNodeId(node) + "_Message\" itemRef=\"" + getUniqueNodeId(node) + "_MessageType\" />" + EOL + EOL); } } else if (node instanceof EventNode) { List<EventFilter> filters = ((EventNode) node).getEventFilters(); if (filters.size() > 0) { String messageRef = ((EventTypeFilter) filters.get(0)).getType(); if (messageRef.startsWith("Message-")) { messageRef = messageRef.substring(8); String messageType = (String) node.getMetaData().get("MessageType"); xmlDump.append( " <itemDefinition id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageRef) + "Type\" " + ("".equals(messageType) || "java.lang.Object".equals(messageType) ? "" : "structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageType) + "\" ") + "/>" + EOL + " <message id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageRef) + "\" itemRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageRef) + "Type\" />" + EOL + EOL); } } } else if (node instanceof StartNode) { StartNode startNode = (StartNode) node; if (startNode.getTriggers() != null && !startNode.getTriggers().isEmpty()) { Trigger trigger = startNode.getTriggers().get(0); if (trigger instanceof EventTrigger) { String eventType = ((EventTypeFilter) ((EventTrigger) trigger).getEventFilters().get(0)).getType(); if (eventType.startsWith("Message-")) { eventType = eventType.substring(8); String messageType = (String) node.getMetaData().get("MessageType"); xmlDump.append( " <itemDefinition id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(eventType) + "Type\" " + ("".equals(messageType) || "java.lang.Object".equals(messageType) ? "" : "structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(messageType) + "\" ") + "/>" + EOL + " <message id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(eventType) + "\" itemRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(eventType) + "Type\" />" + EOL + EOL); } } } } else if (node instanceof ForEachNode) { ForEachNode forEachNode = (ForEachNode) node; String type = null; if (forEachNode.getVariableType() instanceof ObjectDataType) { type = ((ObjectDataType) forEachNode.getVariableType()).getClassName(); } xmlDump.append( " <itemDefinition id=\"" + XmlBPMNProcessDumper.getUniqueNodeId(forEachNode) + "_multiInstanceItemType\" " + (type == null || "java.lang.Object".equals(type) ? "" : "structureRef=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(type) + "\" ") + "/>" + EOL + EOL); } if (node instanceof CompositeNode) { visitInterfaces(((CompositeNode) node).getNodes(), xmlDump); } } } protected void visitEscalations(Node[] nodes, StringBuilder xmlDump, List<String> escalations) { for (Node node: nodes) { if (node instanceof FaultNode) { FaultNode faultNode = (FaultNode) node; if (!faultNode.isTerminateParent()) { String escalationCode = faultNode.getFaultName(); if (!escalations.contains(escalationCode)) { escalations.add(escalationCode); xmlDump.append( " <escalation id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(escalationCode) + "\" escalationCode=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(escalationCode) + "\" />" + EOL); } } } else if (node instanceof ActionNode) { ActionNode actionNode = (ActionNode) node; if(actionNode.getAction() instanceof DroolsConsequenceAction) { DroolsConsequenceAction action = (DroolsConsequenceAction) actionNode.getAction(); if (action != null) { String s = action.getConsequence(); if (s.startsWith("org.drools.core.process.instance.context.exception.ExceptionScopeInstance scopeInstance = (org.drools.core.process.instance.context.exception.ExceptionScopeInstance) ((org.drools.workflow.instance.NodeInstance) kcontext.getNodeInstance()).resolveContextInstance(org.drools.core.process.core.context.exception.ExceptionScope.EXCEPTION_SCOPE, \"")) { s = s.substring(327); String type = s.substring(0, s.indexOf("\"")); if (!escalations.contains(type)) { escalations.add(type); xmlDump.append( " <escalation id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(type) + "\" escalationCode=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(type) + "\" />" + EOL); } } } } else { logger.warn("Cannot serialize custom implementation of the Action interface to XML"); } } else if (node instanceof EventNode) { EventNode eventNode = (EventNode) node; String type = (String) eventNode.getMetaData("EscalationEvent"); if (type != null) { if (!escalations.contains(type)) { escalations.add(type); xmlDump.append( " <escalation id=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(type) + "\" escalationCode=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(type) + "\" />" + EOL); } } } if (node instanceof CompositeNode) { visitEscalations(((CompositeNode) node).getNodes(), xmlDump, escalations); } } } protected void visitErrors(Definitions definitions, StringBuilder xmlDump) { if( definitions == null ) { return; } List<Error> errors = definitions.getErrors(); if( errors == null || errors.isEmpty() ) { return; } for( org.jbpm.bpmn2.core.Error error : errors ) { String id = XmlBPMNProcessDumper.replaceIllegalCharsAttribute(error.getId()); String code = error.getErrorCode(); xmlDump.append(" <error id=\"" + id + "\"" ); if (error.getErrorCode() != null) { code = XmlBPMNProcessDumper.replaceIllegalCharsAttribute(code); xmlDump.append(" errorCode=\"" + code + "\"" ); } String structureRef = error.getStructureRef(); if( structureRef != null ) { structureRef = XmlBPMNProcessDumper.replaceIllegalCharsAttribute(structureRef); xmlDump.append(" structureRef=\"" + structureRef + "\""); } xmlDump.append("/>" + EOL ); } } public void visitNodes(List<org.jbpm.workflow.core.Node> nodes, StringBuilder xmlDump, int metaDataType) { xmlDump.append(" <!-- nodes -->" + EOL); for (Node node: nodes) { visitNode(node, xmlDump, metaDataType); } xmlDump.append(EOL); } private void visitNode(Node node, StringBuilder xmlDump, int metaDataType) { Handler handler = semanticModule.getHandlerByClass(node.getClass()); if (handler != null) { ((AbstractNodeHandler) handler).writeNode((org.jbpm.workflow.core.Node) node, xmlDump, metaDataType); } else { throw new IllegalArgumentException( "Unknown node type: " + node); } } private void visitNodesDi(Node[] nodes, StringBuilder xmlDump) { for (Node node: nodes) { Integer x = (Integer) node.getMetaData().get("x"); Integer y = (Integer) node.getMetaData().get("y"); Integer width = (Integer) node.getMetaData().get("width"); Integer height = (Integer) node.getMetaData().get("height"); if (x == null) { x = 0; } if (y == null) { y = 0; } if (width == null) { width = 48; } if (height == null) { height = 48; } if (node instanceof StartNode || node instanceof EndNode || node instanceof EventNode || node instanceof FaultNode) { int offsetX = (int) ((width - 48) / 2); width = 48; x = x + offsetX; int offsetY = (int) ((height - 48) / 2); y = y + offsetY; height = 48; } else if (node instanceof Join || node instanceof Split) { int offsetX = (int) ((width - 48) / 2); width = 48; x = x + offsetX; int offsetY = (int) ((height - 48) / 2); y = y + offsetY; height = 48; } int parentOffsetX = 0; int parentOffsetY = 0; NodeContainer nodeContainer = node.getNodeContainer(); while (nodeContainer instanceof CompositeNode) { CompositeNode parent = (CompositeNode) nodeContainer; Integer parentX = (Integer) parent.getMetaData().get("x"); if (parentX != null) { parentOffsetX += parentX; } Integer parentY = (Integer) parent.getMetaData().get("y"); if (parentY != null) { parentOffsetY += (Integer) parent.getMetaData().get("y"); } nodeContainer = parent.getNodeContainer(); } x += parentOffsetX; y += parentOffsetY; xmlDump.append( " <bpmndi:BPMNShape bpmnElement=\"" + getUniqueNodeId(node) + "\" >" + EOL + " <dc:Bounds x=\"" + x + "\" " + "y=\"" + y + "\" " + "width=\"" + width + "\" " + "height=\"" + height + "\" />" + EOL + " </bpmndi:BPMNShape>" + EOL); if (node instanceof CompositeNode) { visitNodesDi(((CompositeNode) node).getNodes(), xmlDump); } } } private void visitConnections(Node[] nodes, StringBuilder xmlDump, int metaDataType) { xmlDump.append(" <!-- connections -->" + EOL); List<Connection> connections = new ArrayList<Connection>(); for (Node node: nodes) { for (List<Connection> connectionList: node.getIncomingConnections().values()) { connections.addAll(connectionList); } } for (Connection connection: connections) { visitConnection(connection, xmlDump, metaDataType); } xmlDump.append(EOL); } private boolean isConnectionRepresentingLinkEvent(Connection connection) { boolean bValue = connection.getMetaData().get("linkNodeHidden") != null; return bValue; } public void visitConnection(Connection connection, StringBuilder xmlDump, int metaDataType) { // if the connection was generated by a link event, don't dump. if (isConnectionRepresentingLinkEvent(connection)) { return; } // if the connection is a hidden one (compensations), don't dump Object hidden = ((ConnectionImpl) connection).getMetaData("hidden"); if( hidden != null && ((Boolean) hidden) ) { return; } xmlDump.append(" <sequenceFlow id=\"" + getUniqueNodeId(connection.getFrom()) + "-" + getUniqueNodeId(connection.getTo()) + "\" sourceRef=\"" + getUniqueNodeId(connection.getFrom()) + "\" "); // TODO fromType, toType xmlDump.append("targetRef=\"" + getUniqueNodeId(connection.getTo()) + "\" "); if (metaDataType == META_DATA_AS_NODE_PROPERTY) { String bendpoints = (String) connection.getMetaData().get("bendpoints"); if (bendpoints != null) { xmlDump.append("g:bendpoints=\"" + bendpoints + "\" "); } } if (connection.getFrom() instanceof Split) { Split split = (Split) connection.getFrom(); if (split.getType() == Split.TYPE_XOR || split.getType() == Split.TYPE_OR) { Constraint constraint = split.getConstraint(connection); if (constraint == null) { xmlDump.append(">" + EOL + " <conditionExpression xsi:type=\"tFormalExpression\" />"); } else { if (constraint.getName() != null && constraint.getName().trim().length() > 0) { xmlDump.append("name=\"" + XmlBPMNProcessDumper.replaceIllegalCharsAttribute(constraint.getName()) + "\" "); } if (constraint.getPriority() != 0) { xmlDump.append("tns:priority=\"" + constraint.getPriority() + "\" "); } xmlDump.append(">" + EOL + " <conditionExpression xsi:type=\"tFormalExpression\" "); if ("code".equals(constraint.getType())) { if ( JavaDialect.ID.equals(constraint.getDialect())) { xmlDump.append("language=\"" + JAVA_LANGUAGE + "\" "); } else if ("XPath".equals(constraint.getDialect())) { xmlDump.append("language=\"" + XPATH_LANGUAGE + "\" "); } else if ("JavaScript".equals(constraint.getDialect())) { xmlDump.append("language=\"" + JAVASCRIPT_LANGUAGE + "\" "); } else if ("FEEL".equals(constraint.getDialect())) { xmlDump.append("language=\"" + FEEL_LANGUAGE + "\" "); } } else { xmlDump.append("language=\"" + RULE_LANGUAGE + "\" "); } String constraintString = constraint.getConstraint(); if (constraintString == null) { constraintString = ""; } xmlDump.append(">" + XmlDumper.replaceIllegalChars(constraintString) + "</conditionExpression>"); } xmlDump.append(EOL + " </sequenceFlow>" + EOL); } else { xmlDump.append("/>" + EOL); } } else { xmlDump.append("/>" + EOL); } } private void visitConnectionsDi(Node[] nodes, StringBuilder xmlDump) { List<Connection> connections = new ArrayList<Connection>(); for (Node node: nodes) { for (List<Connection> connectionList: node.getIncomingConnections().values()) { connections.addAll(connectionList); } if (node instanceof CompositeNode) { visitConnectionsDi(((CompositeNode) node).getNodes(), xmlDump); } } for (Connection connection: connections) { String bendpoints = (String) connection.getMetaData().get("bendpoints"); xmlDump.append( " <bpmndi:BPMNEdge bpmnElement=\"" + getUniqueNodeId(connection.getFrom()) + "-" + getUniqueNodeId(connection.getTo()) + "\" >" + EOL); Integer x = (Integer) connection.getFrom().getMetaData().get("x"); if (x == null) { x = 0; } Integer y = (Integer) connection.getFrom().getMetaData().get("y"); if (y == null) { y = 0; } Integer width = (Integer) connection.getFrom().getMetaData().get("width"); if (width == null) { width = 40; } Integer height = (Integer) connection.getFrom().getMetaData().get("height"); if (height == null) { height = 40; } xmlDump.append( " <di:waypoint x=\"" + (x + width/2) + "\" y=\"" + (y + height/2) + "\" />" + EOL); if (bendpoints != null) { bendpoints = bendpoints.substring(1, bendpoints.length() - 1); String[] points = bendpoints.split(";"); for (String point: points) { String[] coords = point.split(","); if (coords.length == 2) { xmlDump.append( " <di:waypoint x=\"" + coords[0] + "\" y=\"" + coords[1] + "\" />" + EOL); } } } x = (Integer) connection.getTo().getMetaData().get("x"); if (x == null) { x = 0; } y = (Integer) connection.getTo().getMetaData().get("y"); if (y == null) { y = 0; } width = (Integer) connection.getTo().getMetaData().get("width"); if (width == null) { width = 40; } height = (Integer) connection.getTo().getMetaData().get("height"); if (height == null) { height = 40; } xmlDump.append( " <di:waypoint x=\"" + (x + width/2) + "\" y=\"" + (y + height/2) + "\" />" + EOL); xmlDump.append( " </bpmndi:BPMNEdge>" + EOL); } } public static String getUniqueNodeId(Node node) { String result = (String) node.getMetaData().get("UniqueId"); if (result != null) { return result; } result = node.getId() + ""; NodeContainer nodeContainer = node.getNodeContainer(); while (nodeContainer instanceof CompositeNode) { CompositeNode composite = (CompositeNode) nodeContainer; result = composite.getId() + "-" + result; nodeContainer = composite.getNodeContainer(); } return "_" + result; } public static String replaceIllegalCharsAttribute(final String code) { final StringBuilder sb = new StringBuilder(); if ( code != null ) { final int n = code.length(); for ( int i = 0; i < n; i++ ) { final char c = code.charAt( i ); switch ( c ) { case '<' : sb.append( "&lt;" ); break; case '>' : sb.append( "&gt;" ); break; case '&' : sb.append( "&amp;" ); break; case '"' : sb.append( "&quot;" ); break; default : sb.append( c ); break; } } } else { sb.append( "null" ); } return sb.toString(); } @Override public String dumpProcess(Process process) { return dump((RuleFlowProcess) process, false); } @Override public Process readProcess(String processXml) { SemanticModules semanticModules = new SemanticModules(); semanticModules.addSemanticModule(new BPMNSemanticModule()); semanticModules.addSemanticModule(new BPMNExtensionsSemanticModule()); semanticModules.addSemanticModule(new BPMNDISemanticModule()); XmlProcessReader xmlReader = new XmlProcessReader(semanticModules, Thread.currentThread().getContextClassLoader()); try { List<Process> processes = xmlReader.read(new StringReader(processXml)); return processes.get(0); } catch (Throwable t) { t.printStackTrace(); return null; } } }
/* * iVProg2 - interactive Visual Programming for the Internet * Java version * * LInE * Free Software for Better Education (FSBE) * http://www.matematica.br * http://line.ime.usp.br * * @see : usp/ime/line/ivprog/model/utils/Services.java: static {... render = new IVPRenderer(); ...} * */ package usp.ime.line.ivprog.view; import javax.swing.JComponent; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.AttributionLine; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Constant; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.DataObject; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Expression; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.For; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Function; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.IfElse; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Operation; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Print; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.ReadData; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Reference; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Variable; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.VariableReference; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.While; import usp.ime.line.ivprog.model.utils.Services; import usp.ime.line.ivprog.view.domaingui.editinplace.EditBoolean; import usp.ime.line.ivprog.view.domaingui.editinplace.EditInPlace; import usp.ime.line.ivprog.view.domaingui.variables.IVPVariableBasic; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.AttributionLineUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.BooleanOperationUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.ConstantUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.ExpressionHolderUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.ArithmeticOperationUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.ForUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.FunctionBodyUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.IfElseUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.OperationUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.ReadUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.StringOperationUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.VariableSelectorUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.WhileUI; import usp.ime.line.ivprog.view.domaingui.workspace.codecomponents.PrintUI; import usp.ime.line.ivprog.view.utils.language.ResourceBundleIVP; public class IVPRenderer { // ./src/ilm/framework/assignment/model/DomainAction.java // ilm.framework.assignment.model.DomainAction // implementado por: // ./src/usp/ime/line/ivprog/model/domainaction/ChangeExpressionSign.java // ./src/usp/ime/line/ivprog/model/domainaction/ChangeForMode.java // ./src/usp/ime/line/ivprog/model/domainaction/ChangeValue.java // ./src/usp/ime/line/ivprog/model/domainaction/ChangeVariableInitValue.java // ./src/usp/ime/line/ivprog/model/domainaction/ChangeVariableName.java // ./src/usp/ime/line/ivprog/model/domainaction/ChangeVariableType.java // ./src/usp/ime/line/ivprog/model/domainaction/CreateChild.java // ./src/usp/ime/line/ivprog/model/domainaction/CreateExpression.java // ./src/usp/ime/line/ivprog/model/domainaction/CreateVariable.java // ./src/usp/ime/line/ivprog/model/domainaction/DeleteExpression.java // ./src/usp/ime/line/ivprog/model/domainaction/DeleteVariable.java // ./src/usp/ime/line/ivprog/model/domainaction/ExpressionTypeChanged.java // ./src/usp/ime/line/ivprog/model/domainaction/MoveComponent.java // ./src/usp/ime/line/ivprog/model/domainaction/RemoveChild.java // ./src/usp/ime/line/ivprog/model/domainaction/UpdateReferencedVariable.java //./src/usp/ime/line/ivprog/model/utils/Services.java // public static void setDomainActionRender (DomainAction domainAction): render.setDomainAction(domainAction); // ./src/usp/ime/line/ivprog/model/domainaction/ChangeVariableInitValue.java // extends DomainAction public JComponent paint (Object objectKey) { DataObject codeElementModel = (DataObject) Services.getModelMapping().get((String) objectKey); if (codeElementModel instanceof Function) { return renderFunction((Function) codeElementModel); } else if (codeElementModel instanceof Variable) { return renderVariable((Variable) codeElementModel); } else if (codeElementModel instanceof While) { return renderWhile((While) codeElementModel); } else if (codeElementModel instanceof IfElse) { return renderIfElse((IfElse) codeElementModel); } else if (codeElementModel instanceof Print) { return renderWrite((Print) codeElementModel); } else if (codeElementModel instanceof AttributionLine) { return renderAttributionLine((AttributionLine) codeElementModel); } else if (codeElementModel instanceof Expression) { return renderExpresion((Expression) codeElementModel); } else if (codeElementModel instanceof Reference) { return renderReference((Reference) codeElementModel); } else if (codeElementModel instanceof ReadData) { return renderRead((ReadData) codeElementModel); } else if (codeElementModel instanceof For) { return renderFor((For) codeElementModel); } return null; } private JComponent renderReference (Reference referenceModel) { return null; } private JComponent renderExpresion (Expression expressionModel) { VariableSelectorUI var; OperationUI exp; ConstantUI constant; if (expressionModel instanceof VariableReference) { var = new VariableSelectorUI(expressionModel.getParentID()); var.setModelID(expressionModel.getUniqueID()); var.setScopeID(expressionModel.getScopeID()); Services.getViewMapping().put(expressionModel.getUniqueID(), var); return var; } else if (expressionModel instanceof Constant) { constant = new ConstantUI(expressionModel.getUniqueID()); constant.setExpressionType(expressionModel.getExpressionType()); constant.setModelScope(expressionModel.getScopeID()); Services.getViewMapping().put(expressionModel.getUniqueID(), constant); return constant; } else {// It's an operation if (expressionModel.getExpressionType() >= Expression.EXPRESSION_OPERATION_AND && expressionModel.getExpressionType() != Expression.EXPRESSION_OPERATION_CONCAT && expressionModel.getExpressionType() != Expression.EXPRESSION_OPERATION_INTDIV) { exp = new BooleanOperationUI(expressionModel.getParentID(), expressionModel.getScopeID(), expressionModel.getUniqueID()); } else if (expressionModel.getExpressionType() == Expression.EXPRESSION_OPERATION_CONCAT) { exp = new StringOperationUI(expressionModel.getParentID(), expressionModel.getScopeID(), expressionModel.getUniqueID()); } else { exp = new ArithmeticOperationUI(expressionModel.getParentID(), expressionModel.getScopeID(), expressionModel.getUniqueID()); } if (((Operation) expressionModel).getExpressionA() != null && !"".equals(((Operation) expressionModel).getExpressionA())) { exp.setExpressionBaseUI_1((JComponent) Services.getViewMapping().get(((Operation) expressionModel).getExpressionA())); } ((OperationUI) exp).setModelScope(expressionModel.getScopeID()); Services.getViewMapping().put(expressionModel.getUniqueID(), exp); return exp; } } private JComponent renderAttributionLine (AttributionLine attLineModel) { AttributionLineUI attLine = new AttributionLineUI(attLineModel.getUniqueID(), attLineModel.getScopeID(), attLineModel.getParentID()); attLine.setModelParent(attLineModel.getParentID()); attLine.setModelScope(attLineModel.getScopeID()); attLine.setLeftVarModelID(attLineModel.getLeftVariableID()); Services.getViewMapping().put(attLineModel.getUniqueID(), attLine); return attLine; } private JComponent renderWhile (While object) { WhileUI w = new WhileUI(object.getUniqueID()); w.setModelParent(object.getParentID()); w.setModelScope(object.getScopeID()); Services.getViewMapping().put(object.getUniqueID(), w); return w; } private JComponent renderFor (For object) { ForUI f = new ForUI(object.getUniqueID()); f.setModelParent(object.getParentID()); f.setModelScope(object.getScopeID()); Services.getViewMapping().put(object.getUniqueID(), f); return f; } private JComponent renderIfElse (IfElse object) { IfElseUI i = new IfElseUI(object.getUniqueID()); i.setModelParent(object.getParentID()); i.setModelScope(object.getScopeID()); Services.getViewMapping().put(object.getUniqueID(), i); return i; } public FunctionBodyUI renderFunction (Function f) { FunctionBodyUI function; if (f.getFunctionName().equals(ResourceBundleIVP.getString("mainFunctionName"))) { function = new FunctionBodyUI(f.getUniqueID(), true); } else function = new FunctionBodyUI(f.getUniqueID(), false); // parameters and variables need to be rendered function.setName(f.getFunctionName()); function.setType(f.getReturnType()); Services.getViewMapping().put(f.getUniqueID(), function); return function; } private JComponent renderWrite (Print p) { PrintUI print = new PrintUI(p.getUniqueID(), p.getParentID(), p.getScopeID()); Services.getViewMapping().put(p.getUniqueID(), print); return print; } private JComponent renderRead (ReadData r) { ReadUI read = new ReadUI(r.getUniqueID(), r.getParentID(), r.getScopeID()); Services.getViewMapping().put(r.getUniqueID(), read); return read; } private JComponent renderVariable (Variable object) { IVPVariableBasic variable = new IVPVariableBasic(object.getUniqueID(), object.getScopeID()); variable.setVariableName(object.getVariableName()); Services.getViewMapping().put(object.getUniqueID(), variable); return variable; } }
package ai.h2o.automl.targetencoding; import hex.ModelMetricsBinomial; import hex.ScoreKeeper; import hex.genmodel.utils.DistributionFamily; import hex.tree.gbm.GBM; import hex.tree.gbm.GBMModel; import org.junit.*; import water.Key; import water.Scope; import water.TestUtil; import water.fvec.Frame; import water.util.Log; import java.util.Arrays; import java.util.Map; import static ai.h2o.automl.targetencoding.TargetEncoderFrameHelper.addKFoldColumn; /* Be aware that `smalldata/airlines/target_encoding/airlines_*.csv` files are not present in the repo. Replace with your own splits. */ public class TargetEncodingAirlinesBenchmark extends TestUtil { @BeforeClass public static void setup() { stall_till_cloudsize(1); } @Test public void KFoldHoldoutTypeTest() { Scope.enter(); GBMModel gbm = null; Map<String, Frame> encodingMap = null; try { Frame airlinesTrainWithTEH = parse_test_file(Key.make("airlines_train"), "smalldata/airlines/target_encoding/airlines_train_with_teh.csv"); Frame airlinesValid = parse_test_file(Key.make("airlines_valid"), "smalldata/airlines/target_encoding/airlines_valid.csv"); Frame airlinesTestFrame = parse_test_file(Key.make("airlines_test"), "smalldata/airlines/target_encoding/airlines_test.csv"); Scope.track(airlinesTrainWithTEH, airlinesValid, airlinesTestFrame); long startTimeEncoding = System.currentTimeMillis(); String foldColumnName = "fold"; addKFoldColumn(airlinesTrainWithTEH, foldColumnName, 5, 1234L); BlendingParams params = new BlendingParams(5, 1); String[] teColumns = {"Origin", "Dest"}; TargetEncoder tec = new TargetEncoder(teColumns, params); String targetColumnName = "IsDepDelayed"; boolean withBlendedAvg = true; boolean withNoiseOnlyForTraining = true; boolean withImputationForNAsInOriginalColumns = true; // Create encoding encodingMap = tec.prepareEncodingMap(airlinesTrainWithTEH, targetColumnName, foldColumnName, true); // Apply encoding to the training set Frame trainEncoded; int seed = 1234; int seedForGBM = 1234; if (withNoiseOnlyForTraining) { trainEncoded = tec.applyTargetEncoding(airlinesTrainWithTEH, targetColumnName, encodingMap, TargetEncoder.DataLeakageHandlingStrategy.KFold, foldColumnName, withBlendedAvg, withImputationForNAsInOriginalColumns, seed); } else { trainEncoded = tec.applyTargetEncoding(airlinesTrainWithTEH, targetColumnName, encodingMap, TargetEncoder.DataLeakageHandlingStrategy.KFold, foldColumnName, withBlendedAvg, 0, withImputationForNAsInOriginalColumns, seed); } // Applying encoding to the valid set Frame validEncoded = tec.applyTargetEncoding(airlinesValid, targetColumnName, encodingMap, TargetEncoder.DataLeakageHandlingStrategy.None, foldColumnName, withBlendedAvg, 0, withImputationForNAsInOriginalColumns, seed); // Applying encoding to the test set Frame testEncoded = tec.applyTargetEncoding(airlinesTestFrame, targetColumnName, encodingMap, TargetEncoder.DataLeakageHandlingStrategy.None, foldColumnName, withBlendedAvg, 0, withImputationForNAsInOriginalColumns, seed); printOutColumnsMetadata(testEncoded); testEncoded = tec.ensureTargetColumnIsBinaryCategorical(testEncoded, targetColumnName); Scope.track(trainEncoded, validEncoded, testEncoded); //Frame.export(trainEncoded, "airlines_train_kfold_dest_noise_noblend.csv", trainEncoded._key.toString(), true, 1); //Frame.export(validEncoded, "airlines_valid_kfold_dest_noise_noblend.csv", validEncoded._key.toString(), true, 1); //Frame.export(testEncoded, "airlines_test_kfold_dest_noise_noblend.csv", testEncoded._key.toString(), true, 1); long finishTimeEncoding = System.currentTimeMillis(); System.out.println("Calculation of encodings took: " + (finishTimeEncoding - startTimeEncoding)); // With target encoded columns long startTime = System.currentTimeMillis(); GBMModel.GBMParameters parms = new GBMModel.GBMParameters(); parms._train = trainEncoded._key; parms._response_column = targetColumnName; parms._score_tree_interval = 10; parms._ntrees = 1000; parms._max_depth = 5; parms._distribution = DistributionFamily.AUTO; parms._valid = validEncoded._key; parms._stopping_tolerance = 0.001; parms._stopping_metric = ScoreKeeper.StoppingMetric.AUC; parms._stopping_rounds = 5; parms._ignored_columns = concat(new String[]{"IsDepDelayed_REC", foldColumnName}, teColumns); parms._seed = seedForGBM; GBM job = new GBM(parms); gbm = job.trainModel().get(); Assert.assertTrue(job.isStopped()); long finishTime = System.currentTimeMillis(); System.out.println("Calculation took: " + (finishTime - startTime)); Frame preds = gbm.score(testEncoded); Scope.track(preds); hex.ModelMetricsBinomial mm = ModelMetricsBinomial.make(preds.vec(2), testEncoded.vec(parms._response_column)); double auc = mm._auc._auc; // Without target encoding double auc2 = trainDefaultGBM(targetColumnName, tec); System.out.println("AUC with encoding:" + auc); System.out.println("AUC without encoding:" + auc2); Assert.assertTrue(auc2 < auc); } finally { encodingMapCleanUp(encodingMap); if (gbm != null) { gbm.delete(); gbm.deleteCrossValidationModels(); } Scope.exit(); } } @Test public void noneHoldoutTypeTest() { Scope.enter(); GBMModel gbm; try { Frame airlinesTrainWithoutTEH = parse_test_file(Key.make("airlines_train"), "smalldata/airlines/target_encoding/airlines_train_without_teh.csv"); Frame airlinesTEHoldout = parse_test_file(Key.make("airlines_te_holdout"), "smalldata/airlines/target_encoding/airlines_te_holdout.csv"); Frame airlinesValid = parse_test_file(Key.make("airlines_valid"), "smalldata/airlines/target_encoding/airlines_valid.csv"); Frame airlinesTestFrame = parse_test_file(Key.make("airlines_test"), "smalldata/airlines/AirlinesTest.csv.zip"); Scope.track(airlinesTrainWithoutTEH, airlinesTEHoldout, airlinesValid, airlinesTestFrame ); long startTimeEncoding = System.currentTimeMillis(); BlendingParams params = new BlendingParams(3, 1); String[] teColumns = {"Origin", "Dest"}; TargetEncoder tec = new TargetEncoder(teColumns, params); String targetColumnName = "IsDepDelayed"; boolean withBlendedAvg = true; boolean withImputationForNAsInOriginalColumns = true; // Create encoding Map<String, Frame> encodingMap = tec.prepareEncodingMap(airlinesTEHoldout, targetColumnName, null); // Apply encoding to the training set Frame trainEncoded = tec.applyTargetEncoding(airlinesTrainWithoutTEH, targetColumnName, encodingMap, TargetEncoder.DataLeakageHandlingStrategy.None, withBlendedAvg, 0, withImputationForNAsInOriginalColumns, 1234); // Applying encoding to the valid set Frame validEncoded = tec.applyTargetEncoding(airlinesValid, targetColumnName, encodingMap, TargetEncoder.DataLeakageHandlingStrategy.None, withBlendedAvg, 0, withImputationForNAsInOriginalColumns,1234); // Applying encoding to the test set Frame testEncoded = tec.applyTargetEncoding(airlinesTestFrame, targetColumnName, encodingMap, TargetEncoder.DataLeakageHandlingStrategy.None, withBlendedAvg, 0, withImputationForNAsInOriginalColumns, 1234); //We do it manually just to be able to measure metrics in the end. TargetEncoder should not be aware of target column for test dataset. testEncoded = tec.ensureTargetColumnIsBinaryCategorical(testEncoded, targetColumnName); Scope.track(trainEncoded, validEncoded, testEncoded); long finishTimeEncoding = System.currentTimeMillis(); System.out.println("Calculation of encodings took: " + (finishTimeEncoding - startTimeEncoding)); // With target encoded columns checkNumRows(airlinesTrainWithoutTEH, trainEncoded); checkNumRows(airlinesValid, validEncoded); checkNumRows(airlinesTestFrame, testEncoded); long startTime = System.currentTimeMillis(); GBMModel.GBMParameters parms = new GBMModel.GBMParameters(); parms._train = trainEncoded._key; parms._response_column = "IsDepDelayed"; parms._score_tree_interval = 10; parms._ntrees = 1000; parms._max_depth = 5; parms._distribution = DistributionFamily.AUTO; parms._valid = validEncoded._key; parms._stopping_tolerance = 0.001; parms._stopping_metric = ScoreKeeper.StoppingMetric.AUC; parms._stopping_rounds = 5; parms._ignored_columns = concat(new String[]{"IsDepDelayed_REC"}, teColumns); parms._seed = 1234L; GBM job = new GBM(parms); gbm = job.trainModel().get(); Assert.assertTrue(job.isStopped()); long finishTime = System.currentTimeMillis(); System.out.println("Calculation took: " + (finishTime - startTime)); Frame preds = gbm.score(testEncoded); Scope.track(preds); hex.ModelMetricsBinomial mm = ModelMetricsBinomial.make(preds.vec(2), testEncoded.vec(parms._response_column)); double auc = mm._auc._auc; // Without target encoded Origin column double auc2 = trainDefaultGBM(targetColumnName, tec); System.out.println("AUC with encoding:" + auc); System.out.println("AUC without encoding:" + auc2); encodingMapCleanUp(encodingMap); if (gbm != null) { gbm.delete(); gbm.deleteCrossValidationModels(); } Assert.assertTrue(auc2 < auc); } finally { Scope.exit(); } } private double trainDefaultGBM(String targetColumnName, TargetEncoder tec) { GBMModel gbm2 = null; Scope.enter(); try { Frame airlinesTrainWithTEHDefault = parse_test_file(Key.make("airlines_train_d"), "smalldata/airlines/target_encoding/airlines_train_with_teh.csv"); Frame airlinesValidDefault = parse_test_file(Key.make("airlines_valid_d"), "smalldata/airlines/target_encoding/airlines_valid.csv"); Frame airlinesTestFrameDefault = parse_test_file(Key.make("airlines_test_d"), "smalldata/airlines/AirlinesTest.csv.zip"); Scope.track(airlinesTrainWithTEHDefault, airlinesValidDefault, airlinesTestFrameDefault); airlinesTrainWithTEHDefault = tec.ensureTargetColumnIsBinaryCategorical(airlinesTrainWithTEHDefault, targetColumnName); airlinesValidDefault = tec.ensureTargetColumnIsBinaryCategorical(airlinesValidDefault, targetColumnName); airlinesTestFrameDefault = tec.ensureTargetColumnIsBinaryCategorical(airlinesTestFrameDefault, targetColumnName); GBMModel.GBMParameters parms2 = new GBMModel.GBMParameters(); parms2._train = airlinesTrainWithTEHDefault._key; parms2._response_column = targetColumnName; parms2._score_tree_interval = 10; parms2._ntrees = 1000; parms2._max_depth = 5; parms2._distribution = DistributionFamily.AUTO; parms2._valid = airlinesValidDefault._key; parms2._stopping_tolerance = 0.001; parms2._stopping_metric = ScoreKeeper.StoppingMetric.AUC; parms2._stopping_rounds = 5; parms2._ignored_columns = new String[]{"IsDepDelayed_REC"}; parms2._seed = 1234L; GBM job2 = new GBM(parms2); gbm2 = job2.trainModel().get(); Assert.assertTrue(job2.isStopped()); Frame preds2 = gbm2.score(airlinesTestFrameDefault); Scope.track(preds2); hex.ModelMetricsBinomial mm2 = ModelMetricsBinomial.make(preds2.vec(2), airlinesTestFrameDefault.vec(parms2._response_column)); double auc2 = mm2._auc._auc; return auc2; } finally { if( gbm2 != null ) { gbm2.delete(); gbm2.deleteCrossValidationModels(); } Scope.exit(); } } public void checkNumRows(Frame before, Frame after) { long droppedCount = before.numRows()- after.numRows(); if(droppedCount != 0) { Log.warn(String.format("Number of rows has dropped by %d after manipulations with frame ( %s , %s ).", droppedCount, before._key, after._key)); } } private void encodingMapCleanUp(Map<String, Frame> encodingMap) { for( Map.Entry<String, Frame> map : encodingMap.entrySet()) { map.getValue().delete(); } } public static <T> T[] concat(T[] first, T[] second) { T[] result = Arrays.copyOf(first, first.length + second.length); System.arraycopy(second, 0, result, first.length, second.length); return result; } }
/* * Copyright (c) 2009-2012 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.input; import com.jme3.app.Application; import com.jme3.cursors.plugins.JmeCursor; import com.jme3.input.controls.*; import com.jme3.input.event.*; import com.jme3.math.FastMath; import com.jme3.math.Vector2f; import com.jme3.util.IntMap; import com.jme3.util.IntMap.Entry; import java.util.ArrayList; import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; /** * The <code>InputManager</code> is responsible for converting input events * received from the Key, Mouse and Joy Input implementations into an * abstract, input device independent representation that user code can use. * <p> * By default an <code>InputManager</code> is included with every Application instance for use * in user code to query input, unless the Application is created as headless * or with input explicitly disabled. * <p> * The input manager has two concepts, a {@link Trigger} and a mapping. * A trigger represents a specific input trigger, such as a key button, * or a mouse axis. A mapping represents a link onto one or several triggers, * when the appropriate trigger is activated (e.g. a key is pressed), the * mapping will be invoked. Any listeners registered to receive an event * from the mapping will have an event raised. * <p> * There are two types of events that {@link InputListener input listeners} * can receive, one is {@link ActionListener#onAction(java.lang.String, boolean, float) action} * events and another is {@link AnalogListener#onAnalog(java.lang.String, float, float) analog} * events. * <p> * <code>onAction</code> events are raised when the specific input * activates or deactivates. For a digital input such as key press, the <code>onAction()</code> * event will be raised with the <code>isPressed</code> argument equal to true, * when the key is released, <code>onAction</code> is called again but this time * with the <code>isPressed</code> argument set to false. * For analog inputs, the <code>onAction</code> method will be called any time * the input is non-zero, however an exception to this is for joystick axis inputs, * which are only called when the input is above the {@link InputManager#setAxisDeadZone(float) dead zone}. * <p> * <code>onAnalog</code> events are raised every frame while the input is activated. * For digital inputs, every frame that the input is active will cause the * <code>onAnalog</code> method to be called, the argument <code>value</code> * argument will equal to the frame's time per frame (TPF) value but only * for digital inputs. For analog inputs however, the <code>value</code> argument * will equal the actual analog value. */ public class InputManager implements RawInputListener { private static final Logger logger = Logger.getLogger(InputManager.class.getName()); private final KeyInput keys; private final MouseInput mouse; private final JoyInput joystick; private final TouchInput touch; private float frameTPF; private long lastLastUpdateTime = 0; private long lastUpdateTime = 0; private long frameDelta = 0; private long firstTime = 0; private boolean eventsPermitted = false; private boolean mouseVisible = true; private boolean safeMode = false; private float axisDeadZone = 0.05f; private Vector2f cursorPos = new Vector2f(); private Joystick[] joysticks; private final IntMap<ArrayList<Mapping>> bindings = new IntMap<ArrayList<Mapping>>(); private final HashMap<String, Mapping> mappings = new HashMap<String, Mapping>(); private final IntMap<Long> pressedButtons = new IntMap<Long>(); private final IntMap<Float> axisValues = new IntMap<Float>(); private ArrayList<RawInputListener> rawListeners = new ArrayList<RawInputListener>(); private RawInputListener[] rawListenerArray = null; private ArrayList<InputEvent> inputQueue = new ArrayList<InputEvent>(); private static class Mapping { private final String name; private final ArrayList<Integer> triggers = new ArrayList<Integer>(); private final ArrayList<InputListener> listeners = new ArrayList<InputListener>(); public Mapping(String name) { this.name = name; } } /** * Initializes the InputManager. * * <p>This should only be called internally in {@link Application}. * * @param mouse * @param keys * @param joystick * @param touch * @throws IllegalArgumentException If either mouseInput or keyInput are null. */ public InputManager(MouseInput mouse, KeyInput keys, JoyInput joystick, TouchInput touch) { if (keys == null || mouse == null) { throw new IllegalArgumentException("Mouse or keyboard cannot be null"); } this.keys = keys; this.mouse = mouse; this.joystick = joystick; this.touch = touch; keys.setInputListener(this); mouse.setInputListener(this); if (joystick != null) { joystick.setInputListener(this); joysticks = joystick.loadJoysticks(this); } if (touch != null) { touch.setInputListener(this); } firstTime = keys.getInputTimeNanos(); } private void invokeActions(int hash, boolean pressed) { ArrayList<Mapping> maps = bindings.get(hash); if (maps == null) { return; } int size = maps.size(); for (int i = size - 1; i >= 0; i--) { Mapping mapping = maps.get(i); ArrayList<InputListener> listeners = mapping.listeners; int listenerSize = listeners.size(); for (int j = listenerSize - 1; j >= 0; j--) { InputListener listener = listeners.get(j); if (listener instanceof ActionListener) { ((ActionListener) listener).onAction(mapping.name, pressed, frameTPF); } } } } private float computeAnalogValue(long timeDelta) { if (safeMode || frameDelta == 0) { return 1f; } else { return FastMath.clamp((float) timeDelta / (float) frameDelta, 0, 1); } } private void invokeTimedActions(int hash, long time, boolean pressed) { if (!bindings.containsKey(hash)) { return; } if (pressed) { pressedButtons.put(hash, time); } else { Long pressTimeObj = pressedButtons.remove(hash); if (pressTimeObj == null) { return; // under certain circumstances it can be null, ignore } // the event then. long pressTime = pressTimeObj; long lastUpdate = lastLastUpdateTime; long releaseTime = time; long timeDelta = releaseTime - Math.max(pressTime, lastUpdate); if (timeDelta > 0) { invokeAnalogs(hash, computeAnalogValue(timeDelta), false); } } } private void invokeUpdateActions() { for (Entry<Long> pressedButton : pressedButtons) { int hash = pressedButton.getKey(); long pressTime = pressedButton.getValue(); long timeDelta = lastUpdateTime - Math.max(lastLastUpdateTime, pressTime); if (timeDelta > 0) { invokeAnalogs(hash, computeAnalogValue(timeDelta), false); } } for (Entry<Float> axisValue : axisValues) { int hash = axisValue.getKey(); float value = axisValue.getValue(); invokeAnalogs(hash, value * frameTPF, true); } } private void invokeAnalogs(int hash, float value, boolean isAxis) { ArrayList<Mapping> maps = bindings.get(hash); if (maps == null) { return; } if (!isAxis) { value *= frameTPF; } int size = maps.size(); for (int i = size - 1; i >= 0; i--) { Mapping mapping = maps.get(i); ArrayList<InputListener> listeners = mapping.listeners; int listenerSize = listeners.size(); for (int j = listenerSize - 1; j >= 0; j--) { InputListener listener = listeners.get(j); if (listener instanceof AnalogListener) { // NOTE: multiply by TPF for any button bindings ((AnalogListener) listener).onAnalog(mapping.name, value, frameTPF); } } } } private void invokeAnalogsAndActions(int hash, float value, boolean applyTpf) { if (value < axisDeadZone) { invokeAnalogs(hash, value, !applyTpf); return; } ArrayList<Mapping> maps = bindings.get(hash); if (maps == null) { return; } boolean valueChanged = !axisValues.containsKey(hash); if (applyTpf) { value *= frameTPF; } int size = maps.size(); for (int i = size - 1; i >= 0; i--) { Mapping mapping = maps.get(i); ArrayList<InputListener> listeners = mapping.listeners; int listenerSize = listeners.size(); for (int j = listenerSize - 1; j >= 0; j--) { InputListener listener = listeners.get(j); if (listener instanceof ActionListener && valueChanged) { ((ActionListener) listener).onAction(mapping.name, true, frameTPF); } if (listener instanceof AnalogListener) { ((AnalogListener) listener).onAnalog(mapping.name, value, frameTPF); } } } } /** * Callback from RawInputListener. Do not use. */ public void beginInput() { } /** * Callback from RawInputListener. Do not use. */ public void endInput() { } private void onJoyAxisEventQueued(JoyAxisEvent evt) { // for (int i = 0; i < rawListeners.size(); i++){ // rawListeners.get(i).onJoyAxisEvent(evt); // } int joyId = evt.getJoyIndex(); int axis = evt.getAxisIndex(); float value = evt.getValue(); if (value < axisDeadZone && value > -axisDeadZone) { int hash1 = JoyAxisTrigger.joyAxisHash(joyId, axis, true); int hash2 = JoyAxisTrigger.joyAxisHash(joyId, axis, false); Float val1 = axisValues.get(hash1); Float val2 = axisValues.get(hash2); if (val1 != null && val1.floatValue() > axisDeadZone) { invokeActions(hash1, false); } if (val2 != null && val2.floatValue() > axisDeadZone) { invokeActions(hash2, false); } axisValues.remove(hash1); axisValues.remove(hash2); } else if (value < 0) { int hash = JoyAxisTrigger.joyAxisHash(joyId, axis, true); int otherHash = JoyAxisTrigger.joyAxisHash(joyId, axis, false); // Clear the reverse direction's actions in case we // crossed center too quickly Float otherVal = axisValues.get(otherHash); if (otherVal != null && otherVal.floatValue() > axisDeadZone) { invokeActions(otherHash, false); } invokeAnalogsAndActions(hash, -value, true); axisValues.put(hash, -value); axisValues.remove(otherHash); } else { int hash = JoyAxisTrigger.joyAxisHash(joyId, axis, false); int otherHash = JoyAxisTrigger.joyAxisHash(joyId, axis, true); // Clear the reverse direction's actions in case we // crossed center too quickly Float otherVal = axisValues.get(otherHash); if (otherVal != null && otherVal.floatValue() > axisDeadZone) { invokeActions(otherHash, false); } invokeAnalogsAndActions(hash, value, true); axisValues.put(hash, value); axisValues.remove(otherHash); } } /** * Callback from RawInputListener. Do not use. */ public void onJoyAxisEvent(JoyAxisEvent evt) { if (!eventsPermitted) { throw new UnsupportedOperationException("JoyInput has raised an event at an illegal time."); } inputQueue.add(evt); } private void onJoyButtonEventQueued(JoyButtonEvent evt) { // for (int i = 0; i < rawListeners.size(); i++){ // rawListeners.get(i).onJoyButtonEvent(evt); // } int hash = JoyButtonTrigger.joyButtonHash(evt.getJoyIndex(), evt.getButtonIndex()); invokeActions(hash, evt.isPressed()); invokeTimedActions(hash, evt.getTime(), evt.isPressed()); } /** * Callback from RawInputListener. Do not use. */ public void onJoyButtonEvent(JoyButtonEvent evt) { if (!eventsPermitted) { throw new UnsupportedOperationException("JoyInput has raised an event at an illegal time."); } inputQueue.add(evt); } private void onMouseMotionEventQueued(MouseMotionEvent evt) { // for (int i = 0; i < rawListeners.size(); i++){ // rawListeners.get(i).onMouseMotionEvent(evt); // } if (evt.getDX() != 0) { float val = Math.abs(evt.getDX()) / 1024f; invokeAnalogsAndActions(MouseAxisTrigger.mouseAxisHash(MouseInput.AXIS_X, evt.getDX() < 0), val, false); } if (evt.getDY() != 0) { float val = Math.abs(evt.getDY()) / 1024f; invokeAnalogsAndActions(MouseAxisTrigger.mouseAxisHash(MouseInput.AXIS_Y, evt.getDY() < 0), val, false); } if (evt.getDeltaWheel() != 0) { float val = Math.abs(evt.getDeltaWheel()) / 100f; invokeAnalogsAndActions(MouseAxisTrigger.mouseAxisHash(MouseInput.AXIS_WHEEL, evt.getDeltaWheel() < 0), val, false); } } /** * Sets the mouse cursor image or animation. * Set cursor to null to show default system cursor. * To hide the cursor completely, use {@link #setCursorVisible(boolean) }. * * @param jmeCursor The cursor to set, or null to reset to system cursor. * * @see JmeCursor */ public void setMouseCursor(JmeCursor jmeCursor) { mouse.setNativeCursor(jmeCursor); } /** * Callback from RawInputListener. Do not use. */ public void onMouseMotionEvent(MouseMotionEvent evt) { if (!eventsPermitted) { throw new UnsupportedOperationException("MouseInput has raised an event at an illegal time."); } cursorPos.set(evt.getX(), evt.getY()); inputQueue.add(evt); } private void onMouseButtonEventQueued(MouseButtonEvent evt) { int hash = MouseButtonTrigger.mouseButtonHash(evt.getButtonIndex()); invokeActions(hash, evt.isPressed()); invokeTimedActions(hash, evt.getTime(), evt.isPressed()); } /** * Callback from RawInputListener. Do not use. */ public void onMouseButtonEvent(MouseButtonEvent evt) { if (!eventsPermitted) { throw new UnsupportedOperationException("MouseInput has raised an event at an illegal time."); } //updating cursor pos on click, so that non android touch events can properly update cursor position. cursorPos.set(evt.getX(), evt.getY()); inputQueue.add(evt); } private void onKeyEventQueued(KeyInputEvent evt) { if (evt.isRepeating()) { return; // repeat events not used for bindings } int hash = KeyTrigger.keyHash(evt.getKeyCode()); invokeActions(hash, evt.isPressed()); invokeTimedActions(hash, evt.getTime(), evt.isPressed()); } /** * Callback from RawInputListener. Do not use. */ public void onKeyEvent(KeyInputEvent evt) { if (!eventsPermitted) { throw new UnsupportedOperationException("KeyInput has raised an event at an illegal time."); } inputQueue.add(evt); } /** * Set the deadzone for joystick axes. * * <p>{@link ActionListener#onAction(java.lang.String, boolean, float) } * events will only be raised if the joystick axis value is greater than * the <code>deadZone</code>. * * @param deadZone the deadzone for joystick axes. */ public void setAxisDeadZone(float deadZone) { this.axisDeadZone = deadZone; } /** * Returns the deadzone for joystick axes. * * @return the deadzone for joystick axes. */ public float getAxisDeadZone() { return axisDeadZone; } /** * Adds a new listener to receive events on the given mappings. * * <p>The given InputListener will be registered to receive events * on the specified mapping names. When a mapping raises an event, the * listener will have its appropriate method invoked, either * {@link ActionListener#onAction(java.lang.String, boolean, float) } * or {@link AnalogListener#onAnalog(java.lang.String, float, float) } * depending on which interface the <code>listener</code> implements. * If the listener implements both interfaces, then it will receive the * appropriate event for each method. * * @param listener The listener to register to receive input events. * @param mappingNames The mapping names which the listener will receive * events from. * * @see InputManager#removeListener(com.jme3.input.controls.InputListener) */ public void addListener(InputListener listener, String... mappingNames) { for (String mappingName : mappingNames) { Mapping mapping = mappings.get(mappingName); if (mapping == null) { mapping = new Mapping(mappingName); mappings.put(mappingName, mapping); } if (!mapping.listeners.contains(listener)) { mapping.listeners.add(listener); } } } /** * Removes a listener from receiving events. * * <p>This will unregister the listener from any mappings that it * was previously registered with via * {@link InputManager#addListener(com.jme3.input.controls.InputListener, java.lang.String[]) }. * * @param listener The listener to unregister. * * @see InputManager#addListener(com.jme3.input.controls.InputListener, java.lang.String[]) */ public void removeListener(InputListener listener) { for (Mapping mapping : mappings.values()) { mapping.listeners.remove(listener); } } /** * Create a new mapping to the given triggers. * * <p> * The given mapping will be assigned to the given triggers, when * any of the triggers given raise an event, the listeners * registered to the mappings will receive appropriate events. * * @param mappingName The mapping name to assign. * @param triggers The triggers to which the mapping is to be registered. * * @see InputManager#deleteMapping(java.lang.String) */ public void addMapping(String mappingName, Trigger... triggers) { Mapping mapping = mappings.get(mappingName); if (mapping == null) { mapping = new Mapping(mappingName); mappings.put(mappingName, mapping); } for (Trigger trigger : triggers) { int hash = trigger.triggerHashCode(); ArrayList<Mapping> names = bindings.get(hash); if (names == null) { names = new ArrayList<Mapping>(); bindings.put(hash, names); } if (!names.contains(mapping)) { names.add(mapping); mapping.triggers.add(hash); } else { logger.log(Level.WARNING, "Attempted to add mapping \"{0}\" twice to trigger.", mappingName); } } } /** * Returns true if this InputManager has a mapping registered * for the given mappingName. * * @param mappingName The mapping name to check. * * @see InputManager#addMapping(java.lang.String, com.jme3.input.controls.Trigger[]) * @see InputManager#deleteMapping(java.lang.String) */ public boolean hasMapping(String mappingName) { return mappings.containsKey(mappingName); } /** * Deletes a mapping from receiving trigger events. * * <p> * The given mapping will no longer be assigned to receive trigger * events. * * @param mappingName The mapping name to unregister. * * @see InputManager#addMapping(java.lang.String, com.jme3.input.controls.Trigger[]) */ public void deleteMapping(String mappingName) { Mapping mapping = mappings.remove(mappingName); if (mapping == null) { //throw new IllegalArgumentException("Cannot find mapping: " + mappingName); logger.log(Level.WARNING, "Cannot find mapping to be removed, skipping: {0}", mappingName); return; } ArrayList<Integer> triggers = mapping.triggers; for (int i = triggers.size() - 1; i >= 0; i--) { int hash = triggers.get(i); ArrayList<Mapping> maps = bindings.get(hash); maps.remove(mapping); } } /** * Deletes a specific trigger registered to a mapping. * * <p> * The given mapping will no longer receive events raised by the * trigger. * * @param mappingName The mapping name to cease receiving events from the * trigger. * @param trigger The trigger to no longer invoke events on the mapping. */ public void deleteTrigger(String mappingName, Trigger trigger) { Mapping mapping = mappings.get(mappingName); if (mapping == null) { throw new IllegalArgumentException("Cannot find mapping: " + mappingName); } ArrayList<Mapping> maps = bindings.get(trigger.triggerHashCode()); maps.remove(mapping); } /** * Clears all the input mappings from this InputManager. * Consequently, also clears all of the * InputListeners as well. */ public void clearMappings() { mappings.clear(); bindings.clear(); reset(); } /** * Do not use. * Called to reset pressed keys or buttons when focus is restored. */ public void reset() { pressedButtons.clear(); axisValues.clear(); } /** * Returns whether the mouse cursor is visible or not. * * <p>By default the cursor is visible. * * @return whether the mouse cursor is visible or not. * * @see InputManager#setCursorVisible(boolean) */ public boolean isCursorVisible() { return mouseVisible; } /** * Set whether the mouse cursor should be visible or not. * * @param visible whether the mouse cursor should be visible or not. */ public void setCursorVisible(boolean visible) { if (mouseVisible != visible) { mouseVisible = visible; mouse.setCursorVisible(mouseVisible); } } /** * Returns the current cursor position. The position is relative to the * bottom-left of the screen and is in pixels. * * @return the current cursor position */ public Vector2f getCursorPosition() { return cursorPos; } /** * Returns an array of all joysticks installed on the system. * * @return an array of all joysticks installed on the system. */ public Joystick[] getJoysticks() { return joysticks; } /** * Adds a {@link RawInputListener} to receive raw input events. * * <p> * Any raw input listeners registered to this <code>InputManager</code> * will receive raw input events first, before they get handled * by the <code>InputManager</code> itself. The listeners are * each processed in the order they were added, e.g. FIFO. * <p> * If a raw input listener has handled the event and does not wish * other listeners down the list to process the event, it may set the * {@link InputEvent#setConsumed() consumed flag} to indicate the * event was consumed and shouldn't be processed any further. * The listener may do this either at each of the event callbacks * or at the {@link RawInputListener#endInput() } method. * * @param listener A listener to receive raw input events. * * @see RawInputListener */ public void addRawInputListener(RawInputListener listener) { rawListeners.add(listener); rawListenerArray = null; } /** * Removes a {@link RawInputListener} so that it no longer * receives raw input events. * * @param listener The listener to cease receiving raw input events. * * @see InputManager#addRawInputListener(com.jme3.input.RawInputListener) */ public void removeRawInputListener(RawInputListener listener) { rawListeners.remove(listener); rawListenerArray = null; } /** * Clears all {@link RawInputListener}s. * * @see InputManager#addRawInputListener(com.jme3.input.RawInputListener) */ public void clearRawInputListeners() { rawListeners.clear(); rawListenerArray = null; } private RawInputListener[] getRawListenerArray() { if (rawListenerArray == null) rawListenerArray = rawListeners.toArray(new RawInputListener[rawListeners.size()]); return rawListenerArray; } /** * Enable simulation of mouse events. Used for touchscreen input only. * * @param value True to enable simulation of mouse events */ public void setSimulateMouse(boolean value) { if (touch != null) { touch.setSimulateMouse(value); } } /** * @deprecated Use isSimulateMouse * Returns state of simulation of mouse events. Used for touchscreen input only. * */ public boolean getSimulateMouse() { if (touch != null) { return touch.isSimulateMouse(); } else { return false; } } /** * Returns state of simulation of mouse events. Used for touchscreen input only. * */ public boolean isSimulateMouse() { if (touch != null) { return touch.isSimulateMouse(); } else { return false; } } /** * Enable simulation of keyboard events. Used for touchscreen input only. * * @param value True to enable simulation of keyboard events */ public void setSimulateKeyboard(boolean value) { if (touch != null) { touch.setSimulateKeyboard(value); } } /** * Returns state of simulation of key events. Used for touchscreen input only. * */ public boolean isSimulateKeyboard() { if (touch != null) { return touch.isSimulateKeyboard(); } else { return false; } } private void processQueue() { int queueSize = inputQueue.size(); RawInputListener[] array = getRawListenerArray(); for (RawInputListener listener : array) { listener.beginInput(); for (int j = 0; j < queueSize; j++) { InputEvent event = inputQueue.get(j); if (event.isConsumed()) { continue; } if (event instanceof MouseMotionEvent) { listener.onMouseMotionEvent((MouseMotionEvent) event); } else if (event instanceof KeyInputEvent) { listener.onKeyEvent((KeyInputEvent) event); } else if (event instanceof MouseButtonEvent) { listener.onMouseButtonEvent((MouseButtonEvent) event); } else if (event instanceof JoyAxisEvent) { listener.onJoyAxisEvent((JoyAxisEvent) event); } else if (event instanceof JoyButtonEvent) { listener.onJoyButtonEvent((JoyButtonEvent) event); } else if (event instanceof TouchEvent) { listener.onTouchEvent((TouchEvent) event); } else { assert false; } } listener.endInput(); } for (int i = 0; i < queueSize; i++) { InputEvent event = inputQueue.get(i); if (event.isConsumed()) { continue; } if (event instanceof MouseMotionEvent) { onMouseMotionEventQueued((MouseMotionEvent) event); } else if (event instanceof KeyInputEvent) { onKeyEventQueued((KeyInputEvent) event); } else if (event instanceof MouseButtonEvent) { onMouseButtonEventQueued((MouseButtonEvent) event); } else if (event instanceof JoyAxisEvent) { onJoyAxisEventQueued((JoyAxisEvent) event); } else if (event instanceof JoyButtonEvent) { onJoyButtonEventQueued((JoyButtonEvent) event); } else if (event instanceof TouchEvent) { onTouchEventQueued((TouchEvent) event); } else { assert false; } // larynx, 2011.06.10 - flag event as reusable because // the android input uses a non-allocating ringbuffer which // needs to know when the event is not anymore in inputQueue // and therefor can be reused. event.setConsumed(); } inputQueue.clear(); } /** * Updates the <code>InputManager</code>. * This will query current input devices and send * appropriate events to registered listeners. * * @param tpf Time per frame value. */ public void update(float tpf) { frameTPF = tpf; // Activate safemode if the TPF value is so small // that rounding errors are inevitable safeMode = tpf < 0.015f; long currentTime = keys.getInputTimeNanos(); frameDelta = currentTime - lastUpdateTime; eventsPermitted = true; keys.update(); mouse.update(); if (joystick != null) { joystick.update(); } if (touch != null) { touch.update(); } eventsPermitted = false; processQueue(); invokeUpdateActions(); lastLastUpdateTime = lastUpdateTime; lastUpdateTime = currentTime; } /** * Dispatches touch events to touch listeners * @param evt The touch event to be dispatched to all onTouch listeners */ public void onTouchEventQueued(TouchEvent evt) { ArrayList<Mapping> maps = bindings.get(TouchTrigger.touchHash(evt.getKeyCode())); if (maps == null) { return; } int size = maps.size(); for (int i = size - 1; i >= 0; i--) { Mapping mapping = maps.get(i); ArrayList<InputListener> listeners = mapping.listeners; int listenerSize = listeners.size(); for (int j = listenerSize - 1; j >= 0; j--) { InputListener listener = listeners.get(j); if (listener instanceof TouchListener) { ((TouchListener) listener).onTouch(mapping.name, evt, frameTPF); } } } } /** * Callback from RawInputListener. Do not use. */ @Override public void onTouchEvent(TouchEvent evt) { if (!eventsPermitted) { throw new UnsupportedOperationException("TouchInput has raised an event at an illegal time."); } cursorPos.set(evt.getX(), evt.getY()); inputQueue.add(evt); } }
/* * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.hal.client.configuration.subsystem.elytron; import java.util.List; import elemental2.dom.HTMLElement; import org.jboss.gwt.elemento.core.IsElement; import org.jboss.hal.ballroom.Attachable; import org.jboss.hal.ballroom.Pages; import org.jboss.hal.ballroom.form.Form; import org.jboss.hal.ballroom.table.InlineAction; import org.jboss.hal.ballroom.table.Table; import org.jboss.hal.core.mbui.form.ModelNodeForm; import org.jboss.hal.core.mbui.table.ModelNodeTable; import org.jboss.hal.core.mbui.table.TableButtonFactory; import org.jboss.hal.core.mvp.HasPresenter; import org.jboss.hal.dmr.ModelNode; import org.jboss.hal.dmr.NamedNode; import org.jboss.hal.meta.Metadata; import org.jboss.hal.resources.Ids; import org.jboss.hal.resources.Names; import static org.jboss.gwt.elemento.core.Elements.h; import static org.jboss.gwt.elemento.core.Elements.p; import static org.jboss.gwt.elemento.core.Elements.section; import static org.jboss.hal.dmr.ModelDescriptionConstants.*; import static org.jboss.hal.dmr.ModelNodeHelper.failSafeList; import static org.jboss.hal.dmr.ModelNodeHelper.storeIndex; import static org.jboss.hal.resources.Ids.FORM; import static org.jboss.hal.resources.Ids.PAGE; import static org.jboss.hal.resources.Ids.PAGES; class HttpAuthenticationFactoryElement implements IsElement<HTMLElement>, Attachable, HasPresenter<FactoriesPresenter> { private final Table<NamedNode> factoryTable; private final Form<NamedNode> factoryForm; private final Table<ModelNode> mcTable; // mc = mechanism-configuration private final Form<ModelNode> mcForm; private final Table<ModelNode> mrcTable; // mrc = mechanism-realm-configurations private final Form<ModelNode> mrcForm; private final Pages pages; private FactoriesPresenter presenter; private String selectedFactory; private String selectedMc; private int mcIndex; private int mrcIndex; HttpAuthenticationFactoryElement(Metadata metadata, TableButtonFactory tableButtonFactory) { // HTTP authentication factory factoryTable = new ModelNodeTable.Builder<NamedNode>(id(Ids.TABLE), metadata) .button(tableButtonFactory.add(id(Ids.ADD), Names.HTTP_AUTHENTICATION_FACTORY, metadata.getTemplate(), (n, a) -> presenter.reloadHttpAuthenticationFactories())) .button(tableButtonFactory.remove(Names.HTTP_AUTHENTICATION_FACTORY, metadata.getTemplate(), (table) -> table.selectedRow().getName(), () -> presenter.reloadHttpAuthenticationFactories())) .column(NAME, (cell, type, row, meta) -> row.getName()) .column(new InlineAction<>(Names.MECHANISM_CONFIGURATIONS, this::showMechanismConfiguration), "15em") .build(); factoryForm = new ModelNodeForm.Builder<NamedNode>(id(FORM), metadata) .onSave((form, changedValues) -> presenter.saveHttpAuthenticationFactory(form, changedValues)) .build(); HTMLElement factorySection = section() .add(h(1).textContent(Names.HTTP_AUTHENTICATION_FACTORY)) .add(p().textContent(metadata.getDescription().getDescription())) .addAll(factoryTable, factoryForm).element(); // mechanism configurations Metadata mcMetadata = metadata.forComplexAttribute(MECHANISM_CONFIGURATIONS); mcTable = new ModelNodeTable.Builder<>(id(MECHANISM_CONFIGURATIONS, TABLE), mcMetadata) .button(tableButtonFactory.add(mcMetadata.getTemplate(), table -> presenter.addHttpMechanismConfiguration(selectedFactory))) .button(tableButtonFactory.remove(mcMetadata.getTemplate(), table -> presenter.removeHttpMechanismConfiguration(selectedFactory, table.selectedRow().get(HAL_INDEX).asInt()))) .column(MECHANISM_NAME) .column(new InlineAction<>(Names.MECHANISM_REALM_CONFIGURATIONS, this::showMechanismRealmConfiguration), "20em") .build(); mcForm = new ModelNodeForm.Builder<>(id(MECHANISM_CONFIGURATIONS, FORM), mcMetadata) .onSave(((form, changedValues) -> presenter.saveHttpMechanismConfiguration(selectedFactory, form.getModel().get(HAL_INDEX).asInt(), changedValues))) .build(); HTMLElement mcSection = section() .add(h(1).textContent(Names.MECHANISM_CONFIGURATIONS)) .add(p().textContent(mcMetadata.getDescription().getDescription())) .addAll(mcTable, mcForm).element(); // mechanism realm configurations Metadata mrcMetadata = mcMetadata.forComplexAttribute(MECHANISM_REALM_CONFIGURATIONS); mrcTable = new ModelNodeTable.Builder<>(id(MECHANISM_REALM_CONFIGURATIONS, Ids.TABLE), mrcMetadata) .button(tableButtonFactory.add(mrcMetadata.getTemplate(), table -> presenter.addHttpMechanismRealmConfiguration(selectedFactory, mcIndex))) .button(tableButtonFactory.remove(mrcMetadata.getTemplate(), table -> presenter.removeHttpMechanismRealmConfiguration(selectedFactory, mcIndex, table.selectedRow().get(HAL_INDEX).asInt()))) .column(REALM_NAME) .build(); mrcForm = new ModelNodeForm.Builder<>(id(MECHANISM_REALM_CONFIGURATIONS, FORM), mrcMetadata) .onSave(((form, changedValues) -> presenter.saveHttpMechanismRealmConfiguration(selectedFactory, mcIndex, mrcIndex, changedValues))) .build(); HTMLElement mrcSection = section() .add(h(1).textContent(Names.MECHANISM_REALM_CONFIGURATIONS)) .add(p().textContent(mrcMetadata.getDescription().getDescription())) .addAll(mrcTable, mrcForm).element(); pages = new Pages(id(PAGES), id(PAGE), factorySection); pages.addPage(id(PAGE), id(MECHANISM_CONFIGURATIONS, PAGE), () -> Names.HTTP_AUTHENTICATION_FACTORY + ": " + selectedFactory, () -> Names.MECHANISM_CONFIGURATIONS, mcSection); pages.addPage(id(MECHANISM_CONFIGURATIONS, PAGE), id(MECHANISM_REALM_CONFIGURATIONS, PAGE), () -> Names.MECHANISM_CONFIGURATIONS + ": " + selectedMc, () -> Names.MECHANISM_REALM_CONFIGURATIONS, mrcSection); } private String id(String... ids) { return Ids.build(Ids.ELYTRON_HTTP_AUTHENTICATION_FACTORY, ids); } @Override public HTMLElement element() { return pages.element(); } @Override public void attach() { factoryTable.attach(); factoryForm.attach(); factoryTable.bindForm(factoryForm); mcTable.attach(); mcForm.attach(); mcTable.bindForm(mcForm); mcTable.onSelectionChange(table -> mcTable.enableButton(1, mcTable.hasSelection())); mrcTable.attach(); mrcForm.attach(); mrcTable.bindForm(mrcForm); mrcTable.onSelectionChange(table -> { mrcTable.enableButton(1, mrcTable.hasSelection()); if (table.hasSelection()) { mrcIndex = table.selectedRow().get(HAL_INDEX).asInt(); } }); } @Override public void setPresenter(FactoriesPresenter presenter) { this.presenter = presenter; } void update(List<NamedNode> nodes) { factoryForm.clear(); factoryTable.update(nodes); if (id(MECHANISM_CONFIGURATIONS, PAGE).equals(pages.getCurrentId())) { nodes.stream() .filter(factory -> selectedFactory.equals(factory.getName())) .findFirst() .ifPresent(this::showMechanismConfiguration); } else if (id(MECHANISM_REALM_CONFIGURATIONS, PAGE).equals(pages.getCurrentId())) { nodes.stream() .filter(factory -> selectedFactory.equals(factory.getName())) .findFirst() .ifPresent(factory -> { List<ModelNode> mcNodes = failSafeList(factory, MECHANISM_CONFIGURATIONS); storeIndex(mcNodes); mcForm.clear(); mcTable.update(mcNodes, modelNode -> modelNode.get(MECHANISM_NAME).asString()); mcNodes.stream() .filter(mc -> selectedMc.equals(mc.get(MECHANISM_NAME).asString())) .findFirst() .ifPresent(this::showMechanismRealmConfiguration); }); } } private void showMechanismConfiguration(NamedNode httpAuthenticationFactory) { selectedFactory = httpAuthenticationFactory.getName(); List<ModelNode> mcNodes = failSafeList(httpAuthenticationFactory, MECHANISM_CONFIGURATIONS); storeIndex(mcNodes); mcForm.clear(); mcTable.update(mcNodes, modelNode -> modelNode.get(MECHANISM_NAME).asString()); mcTable.enableButton(1, mcTable.hasSelection()); pages.showPage(id(MECHANISM_CONFIGURATIONS, PAGE)); } private void showMechanismRealmConfiguration(ModelNode mechanismConfiguration) { selectedMc = mechanismConfiguration.get(MECHANISM_NAME).asString(); mcIndex = mechanismConfiguration.get(HAL_INDEX).asInt(); List<ModelNode> mrcNodes = failSafeList(mechanismConfiguration, MECHANISM_REALM_CONFIGURATIONS); storeIndex(mrcNodes); mrcForm.clear(); mrcTable.update(mrcNodes, modelNode -> modelNode.get(REALM_NAME).asString()); mrcTable.enableButton(1, mrcTable.hasSelection()); pages.showPage(id(MECHANISM_REALM_CONFIGURATIONS, PAGE)); } }
package example.passwordmanager; import android.app.Activity; import android.app.FragmentTransaction; import android.content.Context; import android.content.Intent; import android.graphics.Typeface; import android.os.Bundle; import android.app.Fragment; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v4.content.ContextCompat; import android.text.InputType; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.google.android.gms.auth.api.Auth; import com.google.android.gms.auth.api.signin.GoogleSignInAccount; import com.google.android.gms.auth.api.signin.GoogleSignInOptions; import com.google.android.gms.auth.api.signin.GoogleSignInResult; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.SignInButton; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.Task; import com.google.firebase.auth.AuthCredential; import com.google.firebase.auth.AuthResult; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.auth.GoogleAuthProvider; import java.util.concurrent.Executor; import static android.content.ContentValues.TAG; /** * A simple {@link Fragment} subclass. */ public class LoginFragment extends Fragment implements View.OnClickListener, GoogleApiClient.OnConnectionFailedListener { private Button btn_login; private Button btnLinkToRegister; private EditText inputEmail; private EditText inputPassword; private ProgressBar progress; private LoginActivity loginActivity; private static final int RC_SIGN_IN = 9001; SignInButton signInButton; private GoogleApiClient mGoogleApiClient; private FirebaseAuth mAuth; private FirebaseAuth.AuthStateListener mAuthListener; String prof_name; String prof_email; String prof_img_url; public LoginFragment() { // Required empty public constructor } public void onAttach(Context activity) { super.onAttach(activity); loginActivity = (LoginActivity) activity; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { loginActivity.setActivityBackgroundColor(ContextCompat.getColor(loginActivity, R.color.bg_login)); // Inflate the layout for this fragment View view = inflater.inflate(R.layout.fragment_login, container, false); initViews(view); // Configure sign-in to request the user's ID, email address, and basic // profile. ID and basic profile are included in DEFAULT_SIGN_IN. GoogleSignInOptions gso = new GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN) .requestIdToken(getString(R.string.default_web_client_id)) .requestEmail() .build(); mGoogleApiClient = new GoogleApiClient.Builder(this.loginActivity) .enableAutoManage(this.loginActivity /* FragmentActivity */, this /* OnConnectionFailedListener */) .addApi(Auth.GOOGLE_SIGN_IN_API, gso) .build(); mAuth = FirebaseAuth.getInstance(); mAuthListener = new FirebaseAuth.AuthStateListener() { @Override public void onAuthStateChanged(@NonNull FirebaseAuth firebaseAuth) { FirebaseUser user = firebaseAuth.getCurrentUser(); if (user != null) // User is signed in Log.d(TAG, "onAuthStateChanged:signed_in:" + user.getUid()); else // User is signed out Log.d(TAG, "onAuthStateChanged:signed_out"); // updateUI(user); } }; return view; } private void initViews(View view) { // pref = getActivity().getPreferences(0); progress = (ProgressBar) view.findViewById(R.id.progress); inputEmail = (EditText) view.findViewById(R.id.emailInput); inputPassword = (EditText) view.findViewById(R.id.passwordInput); btn_login = (Button) view.findViewById(R.id.btnLogin); btnLinkToRegister = (Button) view.findViewById(R.id.btnLinkToRegisterScreen); signInButton = (SignInButton) view.findViewById(R.id.sign_in_button); setGooglePlusButtonText(signInButton, getString(R.string.common_signin_button_text_long)); btn_login.setOnClickListener(this); btnLinkToRegister.setOnClickListener(this); signInButton.setOnClickListener(this); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.btnLogin : String email = inputEmail.getText().toString(); String password = inputPassword.getText().toString(); if(!email.isEmpty() && !password.isEmpty()) { progress.setVisibility(View.VISIBLE); loginProcess(email,password); } else Snackbar.make(getView(), "Fields are empty !", Snackbar.LENGTH_LONG).show(); break; case R.id.sign_in_button: signIn(); break; case R.id.btnLinkToRegisterScreen : goToRegister(); break; } } private void loginProcess(String email, String password) { goToMain(); } private void goToMain() { Intent i = new Intent(getContext(), MainActivity2.class); i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); i.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); i.addFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION); i.putExtra("prof_name", prof_name); i.putExtra("prof_email", prof_email); i.putExtra("prof_img", prof_img_url); startActivity(i); loginActivity.finish(); } private void goToRegister() { Fragment register = new RegisterFragment(); FragmentTransaction ft = getFragmentManager().beginTransaction(); ft.replace(R.id.fragment_frame,register); ft.commit(); } @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { } private void signIn() { Intent signInIntent = Auth.GoogleSignInApi.getSignInIntent(mGoogleApiClient); startActivityForResult(signInIntent, RC_SIGN_IN); } private void signOut() { Auth.GoogleSignInApi.signOut(mGoogleApiClient).setResultCallback(new ResultCallback<Status>() { @Override public void onResult(@NonNull Status status) { updateUI(false); } }); } private void handleResult(GoogleSignInResult result) { if (result.isSuccess()) { GoogleSignInAccount account = result.getSignInAccount(); // String name = account.getDisplayName(); // String email = account.getEmail(); // String img_url = account.getPhotoUrl().toString(); // Name.setText(name); // Email.setText(email); // Glide.with(this).load(img_url).into(Prof_Pic); prof_name = account.getDisplayName(); prof_email = account.getEmail(); prof_img_url = account.getPhotoUrl().toString(); firebaseAuthWithGoogle(account); updateUI(true); } else updateUI(false); } private void firebaseAuthWithGoogle(GoogleSignInAccount acct) { Log.d(TAG, "firebaseAuthWithGoogle:" + acct.getId()); AuthCredential credential = GoogleAuthProvider.getCredential(acct.getIdToken(), null); mAuth.signInWithCredential(credential) .addOnCompleteListener(getActivity(), new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { Log.d(TAG, "signInWithCredential:onComplete:" + task.isSuccessful()); // If sign in fails, display a message to the user. If sign in succeeds // the auth state listener will be notified and logic to handle the // signed in user can be handled in the listener. if (!task.isSuccessful()) { Log.w(TAG, "signInWithCredential", task.getException()); Toast.makeText(getActivity(), "Authentication failed.", Toast.LENGTH_SHORT).show(); } // ... } }); } private void updateUI(boolean isLoggedIn) { if (isLoggedIn) { goToMain(); } else { } } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == RC_SIGN_IN) { GoogleSignInResult result = Auth.GoogleSignInApi.getSignInResultFromIntent(data); handleResult(result); } } @Override public void onStart() { super.onStart(); mAuth.addAuthStateListener(mAuthListener); } @Override public void onStop() { super.onStop(); if (mAuthListener != null) { mAuth.removeAuthStateListener(mAuthListener); } } protected void setGooglePlusButtonText(SignInButton signInButton, String buttonText) { for (int i = 0; i < signInButton.getChildCount(); i++) { View v = signInButton.getChildAt(i); if (v instanceof TextView) { TextView tv = (TextView) v; tv.setTextSize(15); tv.setTypeface(null, Typeface.NORMAL); tv.setInputType(InputType.TYPE_TEXT_FLAG_CAP_WORDS); tv.setText(buttonText); return; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.telegram.model; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; /** * Represents a link to an animated GIF file. * * @see <a href="https://core.telegram.org/bots/api#inlinequeryresultgif"> * https://core.telegram.org/bots/api#inlinequeryresultgif</a> */ @JsonInclude(JsonInclude.Include.NON_NULL) public class InlineQueryResultGif extends InlineQueryResult { private static final String TYPE = "gif"; @JsonProperty("gif_url") private String gifUrl; @JsonProperty("gif_width") private String gifWidth; @JsonProperty("gif_height") private Integer gifHeight; @JsonProperty("gif_duration") private Integer duration; @JsonProperty("thumb_url") private String thumbUrl; private String title; private String caption; @JsonProperty("parse_mode") private String parseMode; @JsonProperty("input_message_content") private InputMessageContent inputMessageContext; public InlineQueryResultGif() { super(TYPE); } public static Builder builder() { return new Builder(); } public static final class Builder { private String id; private InlineKeyboardMarkup replyMarkup; private String gifUrl; private String gifWidth; private Integer gifHeight; private Integer duration; private String thumbUrl; private String title; private String caption; private String parseMode; private InputMessageContent inputMessageContext; private Builder() { } public Builder id(String id) { this.id = id; return this; } public Builder replyMarkup(InlineKeyboardMarkup replyMarkup) { this.replyMarkup = replyMarkup; return this; } public Builder gifUrl(String url) { this.gifUrl = url; return this; } public Builder gifWidth(String width) { this.gifWidth = width; return this; } public Builder gifHeight(Integer height) { this.gifHeight = height; return this; } public Builder duration(Integer duration) { this.duration = duration; return this; } public Builder thumbUrl(String thumbUrl) { this.thumbUrl = thumbUrl; return this; } public Builder title(String title) { this.title = title; return this; } public Builder caption(String caption) { this.caption = caption; return this; } public Builder parseMode(String parseMode) { this.parseMode = parseMode; return this; } public Builder inputMessageContext(InputMessageContent inputMessageContext) { this.inputMessageContext = inputMessageContext; return this; } public InlineQueryResultGif build() { InlineQueryResultGif inlineQueryResultGif = new InlineQueryResultGif(); inlineQueryResultGif.setType(TYPE); inlineQueryResultGif.setId(id); inlineQueryResultGif.setReplyMarkup(replyMarkup); inlineQueryResultGif.gifWidth = this.gifWidth; inlineQueryResultGif.gifUrl = this.gifUrl; inlineQueryResultGif.gifHeight = this.gifHeight; inlineQueryResultGif.duration = this.duration; inlineQueryResultGif.caption = this.caption; inlineQueryResultGif.parseMode = this.parseMode; inlineQueryResultGif.thumbUrl = this.thumbUrl; inlineQueryResultGif.title = this.title; inlineQueryResultGif.inputMessageContext = this.inputMessageContext; return inlineQueryResultGif; } } public String getGifUrl() { return gifUrl; } public String getGifWidth() { return gifWidth; } public Integer getGifHeight() { return gifHeight; } public Integer getDuration() { return duration; } public String getThumbUrl() { return thumbUrl; } public String getTitle() { return title; } public String getCaption() { return caption; } public String getParseMode() { return parseMode; } public InputMessageContent getInputMessageContext() { return inputMessageContext; } public void setGifUrl(String gifUrl) { this.gifUrl = gifUrl; } public void setGifWidth(String gifWidth) { this.gifWidth = gifWidth; } public void setGifHeight(Integer gifHeight) { this.gifHeight = gifHeight; } public void setDuration(Integer duration) { this.duration = duration; } public void setThumbUrl(String thumbUrl) { this.thumbUrl = thumbUrl; } public void setTitle(String title) { this.title = title; } public void setCaption(String caption) { this.caption = caption; } public void setParseMode(String parseMode) { this.parseMode = parseMode; } public void setInputMessageContext(InputMessageContent inputMessageContext) { this.inputMessageContext = inputMessageContext; } }
package br.ufrj.g2matricula.web.rest; import br.ufrj.g2matricula.G2MatriculaApp; import br.ufrj.g2matricula.domain.Aluno; import br.ufrj.g2matricula.repository.AlunoRepository; import br.ufrj.g2matricula.service.AlunoService; import br.ufrj.g2matricula.repository.search.AlunoSearchRepository; import br.ufrj.g2matricula.service.dto.AlunoDTO; import br.ufrj.g2matricula.service.mapper.AlunoMapper; import br.ufrj.g2matricula.web.rest.errors.ExceptionTranslator; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockitoAnnotations; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.web.PageableHandlerMethodArgumentResolver; import org.springframework.http.MediaType; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.transaction.annotation.Transactional; import javax.persistence.EntityManager; import java.time.Instant; import java.time.ZonedDateTime; import java.time.ZoneOffset; import java.time.ZoneId; import java.util.List; import static br.ufrj.g2matricula.web.rest.TestUtil.sameInstant; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.Matchers.hasItem; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; import br.ufrj.g2matricula.domain.enumeration.TurnoStatus; import br.ufrj.g2matricula.domain.enumeration.InscricaoStatus; /** * Test class for the AlunoResource REST controller. * * @see AlunoResource */ @RunWith(SpringRunner.class) @SpringBootTest(classes = G2MatriculaApp.class) public class AlunoResourceIntTest { private static final String DEFAULT_DRE = "AAAAAAAAAA"; private static final String UPDATED_DRE = "BBBBBBBBBB"; private static final String DEFAULT_NOME = "AAAAAAAAAA"; private static final String UPDATED_NOME = "BBBBBBBBBB"; private static final ZonedDateTime DEFAULT_DATA_MATRICULA = ZonedDateTime.ofInstant(Instant.ofEpochMilli(0L), ZoneOffset.UTC); private static final ZonedDateTime UPDATED_DATA_MATRICULA = ZonedDateTime.now(ZoneId.systemDefault()).withNano(0); private static final TurnoStatus DEFAULT_TURNO = TurnoStatus.PARCIAL; private static final TurnoStatus UPDATED_TURNO = TurnoStatus.INTEGRAL; private static final InscricaoStatus DEFAULT_STATUS_INSCRICAO = InscricaoStatus.CANDIDATO; private static final InscricaoStatus UPDATED_STATUS_INSCRICAO = InscricaoStatus.INSCRITO; private static final Long DEFAULT_CPF = 1L; private static final Long UPDATED_CPF = 2L; private static final String DEFAULT_NOME_MAE = "AAAAAAAAAA"; private static final String UPDATED_NOME_MAE = "BBBBBBBBBB"; private static final String DEFAULT_NOME_PAI = "AAAAAAAAAA"; private static final String UPDATED_NOME_PAI = "BBBBBBBBBB"; private static final String DEFAULT_ENDERECO = "AAAAAAAAAA"; private static final String UPDATED_ENDERECO = "BBBBBBBBBB"; @Autowired private AlunoRepository alunoRepository; @Autowired private AlunoMapper alunoMapper; @Autowired private AlunoService alunoService; @Autowired private AlunoSearchRepository alunoSearchRepository; @Autowired private MappingJackson2HttpMessageConverter jacksonMessageConverter; @Autowired private PageableHandlerMethodArgumentResolver pageableArgumentResolver; @Autowired private ExceptionTranslator exceptionTranslator; @Autowired private EntityManager em; private MockMvc restAlunoMockMvc; private Aluno aluno; @Before public void setup() { MockitoAnnotations.initMocks(this); final AlunoResource alunoResource = new AlunoResource(alunoService); this.restAlunoMockMvc = MockMvcBuilders.standaloneSetup(alunoResource) .setCustomArgumentResolvers(pageableArgumentResolver) .setControllerAdvice(exceptionTranslator) .setMessageConverters(jacksonMessageConverter).build(); } /** * Create an entity for this test. * * This is a static method, as tests for other entities might also need it, * if they test an entity which requires the current entity. */ public static Aluno createEntity(EntityManager em) { Aluno aluno = new Aluno() .dre(DEFAULT_DRE) .nome(DEFAULT_NOME) .dataMatricula(DEFAULT_DATA_MATRICULA) .turno(DEFAULT_TURNO) .statusInscricao(DEFAULT_STATUS_INSCRICAO) .cpf(DEFAULT_CPF) .nomeMae(DEFAULT_NOME_MAE) .nomePai(DEFAULT_NOME_PAI) .endereco(DEFAULT_ENDERECO); return aluno; } @Before public void initTest() { alunoSearchRepository.deleteAll(); aluno = createEntity(em); } @Test @Transactional public void createAluno() throws Exception { int databaseSizeBeforeCreate = alunoRepository.findAll().size(); // Create the Aluno AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isCreated()); // Validate the Aluno in the database List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeCreate + 1); Aluno testAluno = alunoList.get(alunoList.size() - 1); assertThat(testAluno.getDre()).isEqualTo(DEFAULT_DRE); assertThat(testAluno.getNome()).isEqualTo(DEFAULT_NOME); assertThat(testAluno.getDataMatricula()).isEqualTo(DEFAULT_DATA_MATRICULA); assertThat(testAluno.getTurno()).isEqualTo(DEFAULT_TURNO); assertThat(testAluno.getStatusInscricao()).isEqualTo(DEFAULT_STATUS_INSCRICAO); assertThat(testAluno.getCpf()).isEqualTo(DEFAULT_CPF); assertThat(testAluno.getNomeMae()).isEqualTo(DEFAULT_NOME_MAE); assertThat(testAluno.getNomePai()).isEqualTo(DEFAULT_NOME_PAI); assertThat(testAluno.getEndereco()).isEqualTo(DEFAULT_ENDERECO); // Validate the Aluno in Elasticsearch Aluno alunoEs = alunoSearchRepository.findOne(testAluno.getId()); assertThat(alunoEs).isEqualToComparingFieldByField(testAluno); } @Test @Transactional public void createAlunoWithExistingId() throws Exception { int databaseSizeBeforeCreate = alunoRepository.findAll().size(); // Create the Aluno with an existing ID aluno.setId(1L); AlunoDTO alunoDTO = alunoMapper.toDto(aluno); // An entity with an existing ID cannot be created, so this API call must fail restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); // Validate the Aluno in the database List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeCreate); } @Test @Transactional public void checkDreIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setDre(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void checkNomeIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setNome(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void checkDataMatriculaIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setDataMatricula(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void checkTurnoIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setTurno(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void checkStatusInscricaoIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setStatusInscricao(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void checkCpfIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setCpf(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void checkNomeMaeIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setNomeMae(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void checkEnderecoIsRequired() throws Exception { int databaseSizeBeforeTest = alunoRepository.findAll().size(); // set the field null aluno.setEndereco(null); // Create the Aluno, which fails. AlunoDTO alunoDTO = alunoMapper.toDto(aluno); restAlunoMockMvc.perform(post("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isBadRequest()); List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeTest); } @Test @Transactional public void getAllAlunos() throws Exception { // Initialize the database alunoRepository.saveAndFlush(aluno); // Get all the alunoList restAlunoMockMvc.perform(get("/api/alunos?sort=id,desc")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$.[*].id").value(hasItem(aluno.getId().intValue()))) .andExpect(jsonPath("$.[*].dre").value(hasItem(DEFAULT_DRE.toString()))) .andExpect(jsonPath("$.[*].nome").value(hasItem(DEFAULT_NOME.toString()))) .andExpect(jsonPath("$.[*].dataMatricula").value(hasItem(sameInstant(DEFAULT_DATA_MATRICULA)))) .andExpect(jsonPath("$.[*].turno").value(hasItem(DEFAULT_TURNO.toString()))) .andExpect(jsonPath("$.[*].statusInscricao").value(hasItem(DEFAULT_STATUS_INSCRICAO.toString()))) .andExpect(jsonPath("$.[*].cpf").value(hasItem(DEFAULT_CPF.intValue()))) .andExpect(jsonPath("$.[*].nomeMae").value(hasItem(DEFAULT_NOME_MAE.toString()))) .andExpect(jsonPath("$.[*].nomePai").value(hasItem(DEFAULT_NOME_PAI.toString()))) .andExpect(jsonPath("$.[*].endereco").value(hasItem(DEFAULT_ENDERECO.toString()))); } @Test @Transactional public void getAluno() throws Exception { // Initialize the database alunoRepository.saveAndFlush(aluno); // Get the aluno restAlunoMockMvc.perform(get("/api/alunos/{id}", aluno.getId())) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$.id").value(aluno.getId().intValue())) .andExpect(jsonPath("$.dre").value(DEFAULT_DRE.toString())) .andExpect(jsonPath("$.nome").value(DEFAULT_NOME.toString())) .andExpect(jsonPath("$.dataMatricula").value(sameInstant(DEFAULT_DATA_MATRICULA))) .andExpect(jsonPath("$.turno").value(DEFAULT_TURNO.toString())) .andExpect(jsonPath("$.statusInscricao").value(DEFAULT_STATUS_INSCRICAO.toString())) .andExpect(jsonPath("$.cpf").value(DEFAULT_CPF.intValue())) .andExpect(jsonPath("$.nomeMae").value(DEFAULT_NOME_MAE.toString())) .andExpect(jsonPath("$.nomePai").value(DEFAULT_NOME_PAI.toString())) .andExpect(jsonPath("$.endereco").value(DEFAULT_ENDERECO.toString())); } @Test @Transactional public void getNonExistingAluno() throws Exception { // Get the aluno restAlunoMockMvc.perform(get("/api/alunos/{id}", Long.MAX_VALUE)) .andExpect(status().isNotFound()); } @Test @Transactional public void updateAluno() throws Exception { // Initialize the database alunoRepository.saveAndFlush(aluno); alunoSearchRepository.save(aluno); int databaseSizeBeforeUpdate = alunoRepository.findAll().size(); // Update the aluno Aluno updatedAluno = alunoRepository.findOne(aluno.getId()); updatedAluno .dre(UPDATED_DRE) .nome(UPDATED_NOME) .dataMatricula(UPDATED_DATA_MATRICULA) .turno(UPDATED_TURNO) .statusInscricao(UPDATED_STATUS_INSCRICAO) .cpf(UPDATED_CPF) .nomeMae(UPDATED_NOME_MAE) .nomePai(UPDATED_NOME_PAI) .endereco(UPDATED_ENDERECO); AlunoDTO alunoDTO = alunoMapper.toDto(updatedAluno); restAlunoMockMvc.perform(put("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isOk()); // Validate the Aluno in the database List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeUpdate); Aluno testAluno = alunoList.get(alunoList.size() - 1); assertThat(testAluno.getDre()).isEqualTo(UPDATED_DRE); assertThat(testAluno.getNome()).isEqualTo(UPDATED_NOME); assertThat(testAluno.getDataMatricula()).isEqualTo(UPDATED_DATA_MATRICULA); assertThat(testAluno.getTurno()).isEqualTo(UPDATED_TURNO); assertThat(testAluno.getStatusInscricao()).isEqualTo(UPDATED_STATUS_INSCRICAO); assertThat(testAluno.getCpf()).isEqualTo(UPDATED_CPF); assertThat(testAluno.getNomeMae()).isEqualTo(UPDATED_NOME_MAE); assertThat(testAluno.getNomePai()).isEqualTo(UPDATED_NOME_PAI); assertThat(testAluno.getEndereco()).isEqualTo(UPDATED_ENDERECO); // Validate the Aluno in Elasticsearch Aluno alunoEs = alunoSearchRepository.findOne(testAluno.getId()); assertThat(alunoEs).isEqualToComparingFieldByField(testAluno); } @Test @Transactional public void updateNonExistingAluno() throws Exception { int databaseSizeBeforeUpdate = alunoRepository.findAll().size(); // Create the Aluno AlunoDTO alunoDTO = alunoMapper.toDto(aluno); // If the entity doesn't have an ID, it will be created instead of just being updated restAlunoMockMvc.perform(put("/api/alunos") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(alunoDTO))) .andExpect(status().isCreated()); // Validate the Aluno in the database List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeUpdate + 1); } @Test @Transactional public void deleteAluno() throws Exception { // Initialize the database alunoRepository.saveAndFlush(aluno); alunoSearchRepository.save(aluno); int databaseSizeBeforeDelete = alunoRepository.findAll().size(); // Get the aluno restAlunoMockMvc.perform(delete("/api/alunos/{id}", aluno.getId()) .accept(TestUtil.APPLICATION_JSON_UTF8)) .andExpect(status().isOk()); // Validate Elasticsearch is empty boolean alunoExistsInEs = alunoSearchRepository.exists(aluno.getId()); assertThat(alunoExistsInEs).isFalse(); // Validate the database is empty List<Aluno> alunoList = alunoRepository.findAll(); assertThat(alunoList).hasSize(databaseSizeBeforeDelete - 1); } @Test @Transactional public void searchAluno() throws Exception { // Initialize the database alunoRepository.saveAndFlush(aluno); alunoSearchRepository.save(aluno); // Search the aluno restAlunoMockMvc.perform(get("/api/_search/alunos?query=id:" + aluno.getId())) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$.[*].id").value(hasItem(aluno.getId().intValue()))) .andExpect(jsonPath("$.[*].dre").value(hasItem(DEFAULT_DRE.toString()))) .andExpect(jsonPath("$.[*].nome").value(hasItem(DEFAULT_NOME.toString()))) .andExpect(jsonPath("$.[*].dataMatricula").value(hasItem(sameInstant(DEFAULT_DATA_MATRICULA)))) .andExpect(jsonPath("$.[*].turno").value(hasItem(DEFAULT_TURNO.toString()))) .andExpect(jsonPath("$.[*].statusInscricao").value(hasItem(DEFAULT_STATUS_INSCRICAO.toString()))) .andExpect(jsonPath("$.[*].cpf").value(hasItem(DEFAULT_CPF.intValue()))) .andExpect(jsonPath("$.[*].nomeMae").value(hasItem(DEFAULT_NOME_MAE.toString()))) .andExpect(jsonPath("$.[*].nomePai").value(hasItem(DEFAULT_NOME_PAI.toString()))) .andExpect(jsonPath("$.[*].endereco").value(hasItem(DEFAULT_ENDERECO.toString()))); } @Test @Transactional public void equalsVerifier() throws Exception { TestUtil.equalsVerifier(Aluno.class); Aluno aluno1 = new Aluno(); aluno1.setId(1L); Aluno aluno2 = new Aluno(); aluno2.setId(aluno1.getId()); assertThat(aluno1).isEqualTo(aluno2); aluno2.setId(2L); assertThat(aluno1).isNotEqualTo(aluno2); aluno1.setId(null); assertThat(aluno1).isNotEqualTo(aluno2); } @Test @Transactional public void dtoEqualsVerifier() throws Exception { TestUtil.equalsVerifier(AlunoDTO.class); AlunoDTO alunoDTO1 = new AlunoDTO(); alunoDTO1.setId(1L); AlunoDTO alunoDTO2 = new AlunoDTO(); assertThat(alunoDTO1).isNotEqualTo(alunoDTO2); alunoDTO2.setId(alunoDTO1.getId()); assertThat(alunoDTO1).isEqualTo(alunoDTO2); alunoDTO2.setId(2L); assertThat(alunoDTO1).isNotEqualTo(alunoDTO2); alunoDTO1.setId(null); assertThat(alunoDTO1).isNotEqualTo(alunoDTO2); } @Test @Transactional public void testEntityFromId() { assertThat(alunoMapper.fromId(42L).getId()).isEqualTo(42); assertThat(alunoMapper.fromId(null)).isNull(); } }
/** * Copyright (c) 2010 Yahoo! Inc. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. See accompanying LICENSE file. */ package org.apache.oozie.command.wf; import java.util.Date; import javax.servlet.jsp.el.ELException; import org.apache.hadoop.conf.Configuration; import org.apache.oozie.ErrorCode; import org.apache.oozie.FaultInjection; import org.apache.oozie.WorkflowActionBean; import org.apache.oozie.WorkflowJobBean; import org.apache.oozie.XException; import org.apache.oozie.action.ActionExecutor; import org.apache.oozie.action.ActionExecutorException; import org.apache.oozie.client.OozieClient; import org.apache.oozie.client.WorkflowAction; import org.apache.oozie.client.WorkflowJob; import org.apache.oozie.client.SLAEvent.SlaAppType; import org.apache.oozie.client.SLAEvent.Status; import org.apache.oozie.command.CommandException; import org.apache.oozie.command.coord.CoordActionUpdateCommand; import org.apache.oozie.service.ActionService; import org.apache.oozie.service.Services; import org.apache.oozie.service.UUIDService; import org.apache.oozie.store.StoreException; import org.apache.oozie.store.WorkflowStore; import org.apache.oozie.util.ELEvaluationException; import org.apache.oozie.util.Instrumentation; import org.apache.oozie.util.XLog; import org.apache.oozie.util.XmlUtils; import org.apache.oozie.util.db.SLADbOperations; public class ActionStartCommand extends ActionCommand<Void> { public static final String EL_ERROR = "EL_ERROR"; public static final String EL_EVAL_ERROR = "EL_EVAL_ERROR"; public static final String COULD_NOT_START = "COULD_NOT_START"; public static final String START_DATA_MISSING = "START_DATA_MISSING"; public static final String EXEC_DATA_MISSING = "EXEC_DATA_MISSING"; private String id; private String jobId; public ActionStartCommand(String id, String type) { super("action.start", type, 0); this.id = id; } @Override protected Void call(WorkflowStore store) throws StoreException, CommandException { WorkflowJobBean workflow = store.getWorkflow(jobId, false); setLogInfo(workflow); WorkflowActionBean action = store.getAction(id, false); XLog.getLog(getClass()).warn(XLog.STD, "[***" + action.getId() + "***]" + "In call()....status=" + action.getStatusStr()); setLogInfo(action); if (action.isPending() && (action.getStatus() == WorkflowActionBean.Status.PREP || action.getStatus() == WorkflowActionBean.Status.START_RETRY || action.getStatus() == WorkflowActionBean.Status.START_MANUAL)) { if (workflow.getStatus() == WorkflowJob.Status.RUNNING) { ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType()); Configuration conf = workflow.getWorkflowInstance().getConf(); int maxRetries = conf.getInt(OozieClient.ACTION_MAX_RETRIES, executor.getMaxRetries()); long retryInterval = conf.getLong(OozieClient.ACTION_RETRY_INTERVAL, executor.getRetryInterval()); executor.setMaxRetries(maxRetries); executor.setRetryInterval(retryInterval); if (executor != null) { ActionExecutorContext context = null; try { boolean isRetry = false; if (action.getStatus() == WorkflowActionBean.Status.START_RETRY || action.getStatus() == WorkflowActionBean.Status.START_MANUAL) { isRetry = true; } context = new ActionCommand.ActionExecutorContext(workflow, action, isRetry); try { String tmpActionConf = XmlUtils.removeComments(action.getConf()); String actionConf = context.getELEvaluator().evaluate(tmpActionConf, String.class); action.setConf(actionConf); XLog.getLog(getClass()).debug("Start, name [{0}] type [{1}] configuration{E}{E}{2}{E}", action.getName(), action.getType(), actionConf); } catch (ELEvaluationException ex) { throw new ActionExecutorException(ActionExecutorException.ErrorType.TRANSIENT, EL_EVAL_ERROR, ex.getMessage(), ex); } catch (ELException ex) { context.setErrorInfo(EL_ERROR, ex.getMessage()); XLog.getLog(getClass()).warn("ELException in ActionStartCommand ", ex.getMessage(), ex); handleError(context, store, workflow, action); return null; } catch (org.jdom.JDOMException je) { context.setErrorInfo("ParsingError", je.getMessage()); XLog.getLog(getClass()).warn("JDOMException in ActionStartCommand ", je.getMessage(), je); handleError(context, store, workflow, action); return null; } catch (Exception ex) { context.setErrorInfo(EL_ERROR, ex.getMessage()); XLog.getLog(getClass()).warn("Exception in ActionStartCommand ", ex.getMessage(), ex); handleError(context, store, workflow, action); return null; } action.setErrorInfo(null, null); incrActionCounter(action.getType(), 1); Instrumentation.Cron cron = new Instrumentation.Cron(); cron.start(); executor.start(context, action); cron.stop(); FaultInjection.activate("org.apache.oozie.command.SkipCommitFaultInjection"); addActionCron(action.getType(), cron); action.setRetries(0); if (action.isExecutionComplete()) { if (!context.isExecuted()) { XLog.getLog(getClass()).warn(XLog.OPS, "Action Completed, ActionExecutor [{0}] must call setExecutionData()", executor.getType()); action.setErrorInfo(EXEC_DATA_MISSING, "Execution Complete, but Execution Data Missing from Action"); failJob(context); store.updateAction(action); store.updateWorkflow(workflow); return null; } action.setPending(); queueCallable(new ActionEndCommand(action.getId(), action.getType())); } else { if (!context.isStarted()) { XLog.getLog(getClass()).warn(XLog.OPS, "Action Started, ActionExecutor [{0}] must call setStartData()", executor.getType()); action.setErrorInfo(START_DATA_MISSING, "Execution Started, but Start Data Missing from Action"); failJob(context); store.updateAction(action); store.updateWorkflow(workflow); return null; } queueCallable(new NotificationCommand(workflow, action)); } XLog.getLog(getClass()).warn(XLog.STD, "[***" + action.getId() + "***]" + "Action status=" + action.getStatusStr()); store.updateAction(action); store.updateWorkflow(workflow); // Add SLA status event (STARTED) for WF_ACTION // SLADbOperations.writeSlaStatusEvent(eSla, // action.getId(), Status.STARTED, store); SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, Status.STARTED, SlaAppType.WORKFLOW_ACTION); XLog.getLog(getClass()).warn(XLog.STD, "[***" + action.getId() + "***]" + "Action updated in DB!"); } catch (ActionExecutorException ex) { XLog.getLog(getClass()).warn( "Error starting action [{0}]. ErrorType [{1}], ErrorCode [{2}], Message [{3}]", action.getName(), ex.getErrorType(), ex.getErrorCode(), ex.getMessage(), ex); action.setErrorInfo(ex.getErrorCode(), ex.getMessage()); switch (ex.getErrorType()) { case TRANSIENT: if (!handleTransient(context, executor, WorkflowAction.Status.START_RETRY)) { handleNonTransient(store, context, executor, WorkflowAction.Status.START_MANUAL); action.setPendingAge(new Date()); action.setRetries(0); action.setStartTime(null); } break; case NON_TRANSIENT: handleNonTransient(store, context, executor, WorkflowAction.Status.START_MANUAL); break; case ERROR: handleError(context, executor, WorkflowAction.Status.ERROR.toString(), true, WorkflowAction.Status.DONE); break; case FAILED: try { failJob(context); queueCallable(new CoordActionUpdateCommand(workflow)); SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, Status.FAILED, SlaAppType.WORKFLOW_ACTION); SLADbOperations.writeStausEvent(workflow.getSlaXml(), workflow.getId(), store, Status.FAILED, SlaAppType.WORKFLOW_JOB); } catch (XException x) { XLog.getLog(getClass()).warn("ActionStartCommand - case:FAILED ", x.getMessage()); } break; } store.updateAction(action); store.updateWorkflow(workflow); } } else { throw new CommandException(ErrorCode.E0802, action.getType()); } } else { XLog.getLog(getClass()).warn("Job state is not {0}. Skipping Action Execution", WorkflowJob.Status.RUNNING.toString()); } } return null; } private void handleError(ActionExecutorContext context, WorkflowStore store, WorkflowJobBean workflow, WorkflowActionBean action) throws CommandException, StoreException { failJob(context); store.updateAction(action); store.updateWorkflow(workflow); SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, Status.FAILED, SlaAppType.WORKFLOW_ACTION); SLADbOperations.writeStausEvent(workflow.getSlaXml(), workflow.getId(), store, Status.FAILED, SlaAppType.WORKFLOW_JOB); queueCallable(new CoordActionUpdateCommand(workflow)); return; } @Override protected Void execute(WorkflowStore store) throws CommandException, StoreException { try { XLog.getLog(getClass()).debug("STARTED ActionStartCommand for wf actionId=" + id); jobId = Services.get().get(UUIDService.class).getId(id); if (lock(jobId)) { call(store); } else { queueCallable(new ActionStartCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL); XLog.getLog(getClass()).warn("ActionStartCommand lock was not acquired - failed {0}", id); } } catch (InterruptedException e) { queueCallable(new ActionStartCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL); XLog.getLog(getClass()).warn("ActionStartCommand lock was not acquired - interrupted exception failed {0}", id); } XLog.getLog(getClass()).debug("ENDED ActionStartCommand for wf actionId=" + id + ", jobId=" + jobId); return null; } }
package io.advantageous.qbit.meta.swagger; import io.advantageous.boon.core.TypeType; import io.advantageous.boon.core.reflection.MethodAccess; import io.advantageous.qbit.annotation.RequestMethod; import io.advantageous.qbit.meta.*; import io.advantageous.qbit.meta.params.*; import io.advantageous.qbit.meta.swagger.builders.*; import io.advantageous.qbit.reactive.Callback; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; public class MetaTransformerFromQbitMetaToSwagger { private final DefinitionClassCollector definitionClassCollector = new DefinitionClassCollector(); private final Logger logger = LoggerFactory.getLogger(MetaTransformerFromQbitMetaToSwagger.class); public ServiceEndpointInfo serviceEndpointInfo(final ContextMeta contextMeta) { final ServiceEndpointInfoBuilder builder = new ServiceEndpointInfoBuilder(); populateAPIInfo(contextMeta, builder); builder.setBasePath(contextMeta.getRootURI()); builder.setHost(contextMeta.getHostAddress()); final List<ServiceMeta> services = contextMeta.getServices(); final Map<String, PathBuilder> pathBuilderMap = new HashMap<>(); buildDefinitions(builder, services); buildPaths(builder, services, pathBuilderMap); return builder.build(); } private void buildPaths(final ServiceEndpointInfoBuilder builder, final List<ServiceMeta> services, final Map<String, PathBuilder> pathBuilderMap) { for (ServiceMeta serviceMeta : services) { final List<ServiceMethodMeta> methodMetas = serviceMeta.getMethods(); final List<String> serviceMetaRequestPaths = serviceMeta.getRequestPaths(); for (final String servicePath : serviceMetaRequestPaths) { extractPathsFromRequestMetaList(servicePath, pathBuilderMap, methodMetas); } } final Set<Map.Entry<String, PathBuilder>> pathEntries = pathBuilderMap.entrySet(); for (Map.Entry<String, PathBuilder> entry : pathEntries) { builder.addPath(entry.getKey(), entry.getValue().build()); } } private void populateAPIInfo(ContextMeta contextMeta, ServiceEndpointInfoBuilder builder) { builder.getApiInfoBuilder().setTitle(contextMeta.getTitle()); builder.getApiInfoBuilder().getContactBuilder().setEmail(contextMeta.getContactEmail()); builder.getApiInfoBuilder().getContactBuilder().setName(contextMeta.getContactName()); builder.getApiInfoBuilder().getContactBuilder().setUrl(contextMeta.getContactURL()); builder.getApiInfoBuilder().getLicenseBuilder().setName(contextMeta.getLicenseName()); builder.getApiInfoBuilder().getLicenseBuilder().setUrl(contextMeta.getLicenseURL()); builder.getApiInfoBuilder().getContactBuilder().setEmail(contextMeta.getContactEmail()); builder.getApiInfoBuilder().getContactBuilder().setName(contextMeta.getContactName()); builder.getApiInfoBuilder().setDescription(contextMeta.getDescription()); builder.getApiInfoBuilder().getLicenseBuilder().setName(contextMeta.getLicenseName()); builder.getApiInfoBuilder().getLicenseBuilder().setUrl(contextMeta.getLicenseURL()); builder.getApiInfoBuilder().setTitle(contextMeta.getTitle()); builder.getApiInfoBuilder().setVersion(contextMeta.getVersion()); } private void extractPathsFromRequestMetaList(final String servicePath, final Map<String, PathBuilder> pathBuilderMap, final List<ServiceMethodMeta> methodMetas) { for (ServiceMethodMeta methodMeta : methodMetas) { final List<RequestMeta> requestEndpoints = methodMeta.getRequestEndpoints(); final MethodAccess methodAccess = methodMeta.getMethodAccess(); for (RequestMeta requestMeta : requestEndpoints) { final String requestURI = (servicePath + requestMeta.getRequestURI()).replaceAll("//", "/"); final PathBuilder pathBuilder = createPathBuilderIfAbsent(pathBuilderMap, requestURI); final List<RequestMethod> requestMethods = requestMeta.getRequestMethods(); for (RequestMethod requestMethod : requestMethods) { /* If one fails, we want to continue processing. */ try { extractPathFromRequestMeta(methodMeta, methodAccess, requestMeta, pathBuilder, requestMethod); } catch (Exception ex) { logger.warn("Problem processing path {} {}", requestURI, methodAccess.name()); logger.warn("Problem processing path", ex); } } } } } private void extractPathFromRequestMeta(final ServiceMethodMeta methodMeta, final MethodAccess methodAccess, final RequestMeta requestMeta, final PathBuilder pathBuilder, final RequestMethod requestMethod) { final OperationBuilder operationBuilder = new OperationBuilder(); operationBuilder.setDescription(methodMeta.getDescription()); operationBuilder.setSummary(methodMeta.getSummary()); addParameters(operationBuilder, requestMeta.getParameters()); operationBuilder.setOperationId(methodAccess.name()); if (methodMeta.hasReturn()) { final ResponseBuilder responseBuilder = new ResponseBuilder(); if (methodMeta.isReturnMap()) { //TODO } else if (methodMeta.isReturnCollection() || methodMeta.isReturnArray()) { responseBuilder.setSchema(definitionClassCollector.getSchema(methodMeta.getReturnType(), methodMeta.getReturnTypeComponent())); responseBuilder.setDescription(methodMeta.getReturnDescription()); operationBuilder.getResponses().put(200, responseBuilder.build()); operationBuilder.getProduces().add("application/json"); } else { responseBuilder.setSchema(definitionClassCollector.getSchema(methodMeta.getReturnType())); responseBuilder.setDescription(methodMeta.getReturnDescription()); operationBuilder.getResponses().put(200, responseBuilder.build()); operationBuilder.getProduces().add("application/json"); } } else { final ResponseBuilder responseBuilder = new ResponseBuilder(); final SchemaBuilder schemaBuilder = new SchemaBuilder(); schemaBuilder.setType("string"); responseBuilder.setSchema(schemaBuilder.build()); responseBuilder.setDescription("returns success"); operationBuilder.getResponses().put(202, responseBuilder.build()); } switch (requestMethod) { case GET: pathBuilder.setGet(operationBuilder.build()); break; case POST: pathBuilder.setPost(operationBuilder.build()); break; case PUT: pathBuilder.setPut(operationBuilder.build()); break; case OPTIONS: pathBuilder.setOptions(operationBuilder.build()); break; case DELETE: pathBuilder.setDelete(operationBuilder.build()); break; case HEAD: pathBuilder.setHead(operationBuilder.build()); break; } } private PathBuilder createPathBuilderIfAbsent(final Map<String, PathBuilder> pathBuilderMap, final String requestURI) { PathBuilder pathBuilder = pathBuilderMap.get(requestURI); if (pathBuilder == null) { pathBuilder = new PathBuilder(); pathBuilderMap.put(requestURI, pathBuilder); } return pathBuilder; } private void addParameters(final OperationBuilder operationBuilder, final List<ParameterMeta> parameterMetaList) { for (final ParameterMeta parameterMeta : parameterMetaList) { if (parameterMeta.getClassType() == Callback.class) { continue; } final ParameterBuilder parameterBuilder = new ParameterBuilder(); if (parameterMeta.getParam() instanceof NamedParam) { parameterBuilder.setName(((NamedParam) parameterMeta.getParam()).getName()); } if (parameterMeta.getParam() instanceof RequestParam) { parameterBuilder.setIn("query"); } if (parameterMeta.getParam() instanceof URINamedParam) { parameterBuilder.setIn("path"); } if (parameterMeta.getParam() instanceof HeaderParam) { parameterBuilder.setIn("header"); } if (parameterMeta.getParam() instanceof URIPositionalParam) { parameterBuilder.setIn("THIS QBIT FEATURE URI POSITIONAL PARAM IS NOT SUPPORTED BY SWAGGER"); } if (parameterMeta.getParam() instanceof BodyArrayParam) { parameterBuilder.setIn("THIS QBIT FEATURE BodyArrayParam IS NOT SUPPORTED BY SWAGGER"); } if (parameterMeta.getParam() instanceof BodyParam) { parameterBuilder.setIn("body"); parameterBuilder.setName("body"); /** TODO handle generic types */ if (parameterMeta.getType() == TypeType.INSTANCE) { parameterBuilder.setSchema(Schema.definitionRef(parameterMeta.getClassType().getSimpleName())); parameterBuilder.setRequired(parameterMeta.getParam().isRequired()); operationBuilder.addParameter(parameterBuilder.build()); return; } } Schema schema = definitionClassCollector.getSchema(parameterMeta.getClassType()); parameterBuilder.setType(schema.getType()); if ( "array".equals(schema.getType()) ) { parameterBuilder.setItems(schema.getItems()); parameterBuilder.setCollectionFormat("csv"); } parameterBuilder.setDescription(parameterMeta.getParam().getDescription()); parameterBuilder.setRequired(parameterMeta.getParam().isRequired()); operationBuilder.addParameter(parameterBuilder.build()); } } private void buildDefinitions(ServiceEndpointInfoBuilder builder, final List<ServiceMeta> services) { services.forEach(serviceMeta -> { try { populateDefinitionMapByService(serviceMeta); } catch (Exception ex) { logger.warn("Unable to create definitions from service {}", serviceMeta.getName()); logger.warn("Unable to create definitions from service", ex); } }); final Map<String, Definition> definitionMap = definitionClassCollector.getDefinitionMap(); definitionMap.entrySet().forEach(entry -> { builder.addDefinition(entry.getKey(), entry.getValue()); }); } private void populateDefinitionMapByService(final ServiceMeta serviceMeta) { serviceMeta.getMethods().forEach(serviceMethodMeta -> populateDefinitionMapByServiceMethod(serviceMeta, serviceMethodMeta)); } private void populateDefinitionMapByServiceMethod(final ServiceMeta serviceMeta, final ServiceMethodMeta serviceMethodMeta) { try { if (serviceMethodMeta.isReturnMap()) { definitionClassCollector.addClass(serviceMethodMeta.getReturnTypeComponentKey()); definitionClassCollector.addClass(serviceMethodMeta.getReturnTypeComponentValue()); } else if (serviceMethodMeta.isReturnCollection()) { definitionClassCollector.addClass(serviceMethodMeta.getReturnTypeComponent()); } else { definitionClassCollector.addClass(serviceMethodMeta.getReturnType()); } serviceMethodMeta.getRequestEndpoints().forEach(requestMeta -> requestMeta.getParameters() .forEach(parameterMeta -> { if (parameterMeta.isMap()) { definitionClassCollector.addClass(parameterMeta.getComponentClassKey()); definitionClassCollector.addClass(parameterMeta.getComponentClassValue()); } else if (parameterMeta.isCollection() || parameterMeta.isArray()) { definitionClassCollector.addClass(parameterMeta.getComponentClass()); } else { if (parameterMeta.getClassType() != Callback.class) { definitionClassCollector.addClass(parameterMeta.getClassType()); } } })); }catch (Exception ex) { logger.warn("Unable to process service method " + serviceMethodMeta.getName(), ex); logger.warn("Unable to process service method for service {} method name", serviceMeta.getName(), serviceMethodMeta.getName()); } } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.cloudstack.storage.image.db; import com.cloud.storage.DataStoreRole; import com.cloud.utils.db.DB; import com.cloud.utils.db.GenericDaoBase; import com.cloud.utils.db.SearchBuilder; import com.cloud.utils.db.SearchCriteria; import com.cloud.utils.db.SearchCriteria.Op; import com.cloud.utils.db.TransactionLegacy; import com.cloud.utils.db.UpdateBuilder; import org.apache.cloudstack.engine.subsystem.api.storage.DataObjectInStore; import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine; import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine.Event; import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine.State; import org.apache.cloudstack.storage.datastore.db.SnapshotDataStoreDao; import org.apache.cloudstack.storage.datastore.db.SnapshotDataStoreVO; import org.apache.log4j.Logger; import org.springframework.stereotype.Component; import javax.naming.ConfigurationException; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Date; import java.util.List; import java.util.Map; @Component public class SnapshotDataStoreDaoImpl extends GenericDaoBase<SnapshotDataStoreVO, Long> implements SnapshotDataStoreDao { private static final Logger s_logger = Logger.getLogger(SnapshotDataStoreDaoImpl.class); private SearchBuilder<SnapshotDataStoreVO> updateStateSearch; private SearchBuilder<SnapshotDataStoreVO> storeSearch; private SearchBuilder<SnapshotDataStoreVO> destroyedSearch; private SearchBuilder<SnapshotDataStoreVO> cacheSearch; private SearchBuilder<SnapshotDataStoreVO> snapshotSearch; private SearchBuilder<SnapshotDataStoreVO> storeSnapshotSearch; private SearchBuilder<SnapshotDataStoreVO> snapshotIdSearch; private SearchBuilder<SnapshotDataStoreVO> volumeIdSearch; private SearchBuilder<SnapshotDataStoreVO> volumeSearch; private final String parentSearch = "select store_id, store_role, snapshot_id from cloud.snapshot_store_ref where store_id = ? " + " and store_role = ? and volume_id = ? and state = 'Ready'" + " order by created DESC " + " limit 1"; private final String findLatestSnapshot = "select store_id, store_role, snapshot_id from cloud.snapshot_store_ref where " + " store_role = ? and volume_id = ? and state = 'Ready'" + " order by created DESC " + " limit 1"; private final String findOldestSnapshot = "select store_id, store_role, snapshot_id from cloud.snapshot_store_ref where " + " store_role = ? and volume_id = ? and state = 'Ready'" + " order by created ASC " + " limit 1"; @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { super.configure(name, params); // Note that snapshot_store_ref stores snapshots on primary as well as // those on secondary, so we need to // use (store_id, store_role) to search storeSearch = createSearchBuilder(); storeSearch.and("store_id", storeSearch.entity().getDataStoreId(), SearchCriteria.Op.EQ); storeSearch.and("store_role", storeSearch.entity().getRole(), SearchCriteria.Op.EQ); storeSearch.and("state", storeSearch.entity().getState(), SearchCriteria.Op.NEQ); storeSearch.done(); destroyedSearch = createSearchBuilder(); destroyedSearch.and("store_id", destroyedSearch.entity().getDataStoreId(), SearchCriteria.Op.EQ); destroyedSearch.and("store_role", destroyedSearch.entity().getRole(), SearchCriteria.Op.EQ); destroyedSearch.and("state", destroyedSearch.entity().getState(), SearchCriteria.Op.EQ); destroyedSearch.done(); cacheSearch = createSearchBuilder(); cacheSearch.and("store_id", cacheSearch.entity().getDataStoreId(), SearchCriteria.Op.EQ); cacheSearch.and("store_role", cacheSearch.entity().getRole(), SearchCriteria.Op.EQ); cacheSearch.and("state", cacheSearch.entity().getState(), SearchCriteria.Op.NEQ); cacheSearch.and("ref_cnt", cacheSearch.entity().getRefCnt(), SearchCriteria.Op.NEQ); cacheSearch.done(); updateStateSearch = this.createSearchBuilder(); updateStateSearch.and("id", updateStateSearch.entity().getId(), Op.EQ); updateStateSearch.and("state", updateStateSearch.entity().getState(), Op.EQ); updateStateSearch.and("updatedCount", updateStateSearch.entity().getUpdatedCount(), Op.EQ); updateStateSearch.done(); snapshotSearch = createSearchBuilder(); snapshotSearch.and("snapshot_id", snapshotSearch.entity().getSnapshotId(), SearchCriteria.Op.EQ); snapshotSearch.and("store_role", snapshotSearch.entity().getRole(), SearchCriteria.Op.EQ); snapshotSearch.and("state", snapshotSearch.entity().getState(), SearchCriteria.Op.EQ); snapshotSearch.done(); storeSnapshotSearch = createSearchBuilder(); storeSnapshotSearch.and("snapshot_id", storeSnapshotSearch.entity().getSnapshotId(), SearchCriteria.Op.EQ); storeSnapshotSearch.and("store_id", storeSnapshotSearch.entity().getDataStoreId(), SearchCriteria.Op.EQ); storeSnapshotSearch.and("store_role", storeSnapshotSearch.entity().getRole(), SearchCriteria.Op.EQ); storeSnapshotSearch.and("state", storeSnapshotSearch.entity().getState(), SearchCriteria.Op.EQ); storeSnapshotSearch.done(); snapshotIdSearch = createSearchBuilder(); snapshotIdSearch.and("snapshot_id", snapshotIdSearch.entity().getSnapshotId(), SearchCriteria.Op.EQ); snapshotIdSearch.done(); volumeIdSearch = createSearchBuilder(); volumeIdSearch.and("volume_id", volumeIdSearch.entity().getVolumeId(), SearchCriteria.Op.EQ); volumeIdSearch.done(); volumeSearch = createSearchBuilder(); volumeSearch.and("volume_id", volumeSearch.entity().getVolumeId(), SearchCriteria.Op.EQ); volumeSearch.and("store_role", volumeSearch.entity().getRole(), SearchCriteria.Op.EQ); volumeSearch.done(); return true; } @Override public boolean updateState(State currentState, Event event, State nextState, DataObjectInStore vo, Object data) { SnapshotDataStoreVO dataObj = (SnapshotDataStoreVO)vo; Long oldUpdated = dataObj.getUpdatedCount(); Date oldUpdatedTime = dataObj.getUpdated(); SearchCriteria<SnapshotDataStoreVO> sc = updateStateSearch.create(); sc.setParameters("id", dataObj.getId()); sc.setParameters("state", currentState); sc.setParameters("updatedCount", dataObj.getUpdatedCount()); dataObj.incrUpdatedCount(); UpdateBuilder builder = getUpdateBuilder(dataObj); builder.set(dataObj, "state", nextState); builder.set(dataObj, "updated", new Date()); int rows = update(dataObj, sc); if (rows == 0 && s_logger.isDebugEnabled()) { SnapshotDataStoreVO dbVol = findByIdIncludingRemoved(dataObj.getId()); if (dbVol != null) { StringBuilder str = new StringBuilder("Unable to update ").append(dataObj.toString()); str.append(": DB Data={id=") .append(dbVol.getId()) .append("; state=") .append(dbVol.getState()) .append("; updatecount=") .append(dbVol.getUpdatedCount()) .append(";updatedTime=") .append(dbVol.getUpdated()); str.append(": New Data={id=") .append(dataObj.getId()) .append("; state=") .append(nextState) .append("; event=") .append(event) .append("; updatecount=") .append(dataObj.getUpdatedCount()) .append("; updatedTime=") .append(dataObj.getUpdated()); str.append(": stale Data={id=") .append(dataObj.getId()) .append("; state=") .append(currentState) .append("; event=") .append(event) .append("; updatecount=") .append(oldUpdated) .append("; updatedTime=") .append(oldUpdatedTime); } else { s_logger.debug("Unable to update objectIndatastore: id=" + dataObj.getId() + ", as there is no such object exists in the database anymore"); } } return rows > 0; } @Override public List<SnapshotDataStoreVO> listByStoreId(long id, DataStoreRole role) { SearchCriteria<SnapshotDataStoreVO> sc = storeSearch.create(); sc.setParameters("store_id", id); sc.setParameters("store_role", role); sc.setParameters("state", ObjectInDataStoreStateMachine.State.Destroyed); return listBy(sc); } @Override public void deletePrimaryRecordsForStore(long id, DataStoreRole role) { SearchCriteria<SnapshotDataStoreVO> sc = storeSearch.create(); sc.setParameters("store_id", id); sc.setParameters("store_role", role); TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); remove(sc); txn.commit(); } @Override public void deleteSnapshotRecordsOnPrimary() { SearchCriteria<SnapshotDataStoreVO> sc = storeSearch.create(); sc.setParameters("store_role", DataStoreRole.Primary); TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); remove(sc); txn.commit(); } @Override public SnapshotDataStoreVO findByStoreSnapshot(DataStoreRole role, long storeId, long snapshotId) { SearchCriteria<SnapshotDataStoreVO> sc = storeSnapshotSearch.create(); sc.setParameters("store_id", storeId); sc.setParameters("snapshot_id", snapshotId); sc.setParameters("store_role", role); return findOneBy(sc); } @Override public SnapshotDataStoreVO findLatestSnapshotForVolume(Long volumeId, DataStoreRole role) { TransactionLegacy txn = TransactionLegacy.currentTxn(); try ( PreparedStatement pstmt = txn.prepareStatement(findLatestSnapshot); ){ pstmt.setString(1, role.toString()); pstmt.setLong(2, volumeId); try (ResultSet rs = pstmt.executeQuery();) { while (rs.next()) { long sid = rs.getLong(1); long snid = rs.getLong(3); return findByStoreSnapshot(role, sid, snid); } } } catch (SQLException e) { s_logger.debug("Failed to find latest snapshot for volume: " + volumeId + " due to: " + e.toString()); } return null; } @Override public SnapshotDataStoreVO findOldestSnapshotForVolume(Long volumeId, DataStoreRole role) { TransactionLegacy txn = TransactionLegacy.currentTxn(); try ( PreparedStatement pstmt = txn.prepareStatement(findOldestSnapshot); ){ pstmt.setString(1, role.toString()); pstmt.setLong(2, volumeId); try (ResultSet rs = pstmt.executeQuery();) { while (rs.next()) { long sid = rs.getLong(1); long snid = rs.getLong(3); return findByStoreSnapshot(role, sid, snid); } } } catch (SQLException e) { s_logger.debug("Failed to find oldest snapshot for volume: " + volumeId + " due to: " + e.toString()); } return null; } @Override @DB public SnapshotDataStoreVO findParent(DataStoreRole role, Long storeId, Long volumeId) { TransactionLegacy txn = TransactionLegacy.currentTxn(); try ( PreparedStatement pstmt = txn.prepareStatement(parentSearch); ){ pstmt.setLong(1, storeId); pstmt.setString(2, role.toString()); pstmt.setLong(3, volumeId); try (ResultSet rs = pstmt.executeQuery();) { while (rs.next()) { long sid = rs.getLong(1); long snid = rs.getLong(3); return findByStoreSnapshot(role, sid, snid); } } } catch (SQLException e) { s_logger.debug("Failed to find parent snapshot: " + e.toString()); } return null; } @Override public SnapshotDataStoreVO findBySnapshot(long snapshotId, DataStoreRole role) { SearchCriteria<SnapshotDataStoreVO> sc = snapshotSearch.create(); sc.setParameters("snapshot_id", snapshotId); sc.setParameters("store_role", role); sc.setParameters("state", State.Ready); return findOneBy(sc); } @Override public SnapshotDataStoreVO findByVolume(long volumeId, DataStoreRole role) { SearchCriteria<SnapshotDataStoreVO> sc = volumeSearch.create(); sc.setParameters("volume_id", volumeId); sc.setParameters("store_role", role); return findOneBy(sc); } @Override public List<SnapshotDataStoreVO> findBySnapshotId(long snapshotId) { SearchCriteria<SnapshotDataStoreVO> sc = snapshotIdSearch.create(); sc.setParameters("snapshot_id", snapshotId); return listBy(sc); } @Override public List<SnapshotDataStoreVO> listDestroyed(long id) { SearchCriteria<SnapshotDataStoreVO> sc = destroyedSearch.create(); sc.setParameters("store_id", id); sc.setParameters("store_role", DataStoreRole.Image); sc.setParameters("state", ObjectInDataStoreStateMachine.State.Destroyed); return listBy(sc); } @Override public List<SnapshotDataStoreVO> listActiveOnCache(long id) { SearchCriteria<SnapshotDataStoreVO> sc = cacheSearch.create(); sc.setParameters("store_id", id); sc.setParameters("store_role", DataStoreRole.ImageCache); sc.setParameters("state", ObjectInDataStoreStateMachine.State.Destroyed); sc.setParameters("ref_cnt", 0); return listBy(sc); } @Override public void duplicateCacheRecordsOnRegionStore(long storeId) { // find all records on image cache SearchCriteria<SnapshotDataStoreVO> sc = storeSnapshotSearch.create(); sc.setParameters("store_role", DataStoreRole.ImageCache); sc.setParameters("destroyed", false); List<SnapshotDataStoreVO> snapshots = listBy(sc); // create an entry for each record, but with empty install path since the content is not yet on region-wide store yet if (snapshots != null) { s_logger.info("Duplicate " + snapshots.size() + " snapshot cache store records to region store"); for (SnapshotDataStoreVO snap : snapshots) { SnapshotDataStoreVO snapStore = findByStoreSnapshot(DataStoreRole.Image, storeId, snap.getSnapshotId()); if (snapStore != null) { s_logger.info("There is already entry for snapshot " + snap.getSnapshotId() + " on region store " + storeId); continue; } s_logger.info("Persisting an entry for snapshot " + snap.getSnapshotId() + " on region store " + storeId); SnapshotDataStoreVO ss = new SnapshotDataStoreVO(); ss.setSnapshotId(snap.getSnapshotId()); ss.setDataStoreId(storeId); ss.setRole(DataStoreRole.Image); ss.setVolumeId(snap.getVolumeId()); ss.setParentSnapshotId(snap.getParentSnapshotId()); ss.setState(snap.getState()); ss.setSize(snap.getSize()); ss.setPhysicalSize(snap.getPhysicalSize()); ss.setRefCnt(snap.getRefCnt()); persist(ss); // increase ref_cnt so that this will not be recycled before the content is pushed to region-wide store snap.incrRefCnt(); update(snap.getId(), snap); } } } @Override public SnapshotDataStoreVO findReadyOnCache(long snapshotId) { SearchCriteria<SnapshotDataStoreVO> sc = storeSnapshotSearch.create(); sc.setParameters("snapshot_id", snapshotId); sc.setParameters("store_role", DataStoreRole.ImageCache); sc.setParameters("state", ObjectInDataStoreStateMachine.State.Ready); return findOneIncludingRemovedBy(sc); } @Override public List<SnapshotDataStoreVO> listOnCache(long snapshotId) { SearchCriteria<SnapshotDataStoreVO> sc = storeSnapshotSearch.create(); sc.setParameters("snapshot_id", snapshotId); sc.setParameters("store_role", DataStoreRole.ImageCache); return search(sc, null); } @Override public void updateStoreRoleToCache(long storeId) { SearchCriteria<SnapshotDataStoreVO> sc = storeSearch.create(); sc.setParameters("store_id", storeId); sc.setParameters("destroyed", false); List<SnapshotDataStoreVO> snaps = listBy(sc); if (snaps != null) { s_logger.info("Update to cache store role for " + snaps.size() + " entries in snapshot_store_ref"); for (SnapshotDataStoreVO snap : snaps) { snap.setRole(DataStoreRole.ImageCache); update(snap.getId(), snap); } } } @Override public void updateVolumeIds(long oldVolId, long newVolId) { SearchCriteria<SnapshotDataStoreVO> sc = volumeIdSearch.create(); sc.setParameters("volume_id", oldVolId); SnapshotDataStoreVO snapshot = createForUpdate(); snapshot.setVolumeId(newVolId); UpdateBuilder ub = getUpdateBuilder(snapshot); update(ub, sc, null); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.casemgmt.impl.event; import java.util.Iterator; import java.util.List; import java.util.Map; import org.drools.core.event.AbstractEventSupport; import org.jbpm.casemgmt.api.event.CaseCancelEvent; import org.jbpm.casemgmt.api.event.CaseCommentEvent; import org.jbpm.casemgmt.api.event.CaseDataEvent; import org.jbpm.casemgmt.api.event.CaseDestroyEvent; import org.jbpm.casemgmt.api.event.CaseDynamicSubprocessEvent; import org.jbpm.casemgmt.api.event.CaseDynamicTaskEvent; import org.jbpm.casemgmt.api.event.CaseEventListener; import org.jbpm.casemgmt.api.event.CaseReopenEvent; import org.jbpm.casemgmt.api.event.CaseRoleAssignmentEvent; import org.jbpm.casemgmt.api.event.CaseStartEvent; import org.jbpm.casemgmt.api.model.instance.CaseFileInstance; import org.jbpm.casemgmt.api.model.instance.CommentInstance; import org.kie.api.task.model.OrganizationalEntity; /** * Responsible for firing case related events to notify registered CaseEventListeners * */ public class CaseEventSupport extends AbstractEventSupport<CaseEventListener> { public CaseEventSupport(List<CaseEventListener> caseEventListeners) { if (caseEventListeners != null) { caseEventListeners.forEach( cvl -> addEventListener(cvl)); } } /* * fire*CaseStarted */ public void fireBeforeCaseStarted(String caseId, String deploymentId, String caseDefinitionId, CaseFileInstance caseFile) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseStartEvent event = new CaseStartEvent(caseId, deploymentId, caseDefinitionId, caseFile); do{ iter.next().beforeCaseStarted(event); } while (iter.hasNext()); } } public void fireAfterCaseStarted(String caseId, String deploymentId, String caseDefinitionId, CaseFileInstance caseFile, long processInstanceId) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseStartEvent event = new CaseStartEvent(caseId, deploymentId, caseDefinitionId, caseFile, processInstanceId); do { iter.next().afterCaseStarted(event); } while (iter.hasNext()); } } /* * fire*CaseCancelled */ public void fireBeforeCaseCancelled(String caseId, List<Long> processInstanceIds) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCancelEvent event = new CaseCancelEvent(caseId, processInstanceIds); do{ iter.next().beforeCaseCancelled(event); } while (iter.hasNext()); } } public void fireAfterCaseCancelled(String caseId, List<Long> processInstanceIds) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCancelEvent event = new CaseCancelEvent(caseId, processInstanceIds); do { iter.next().afterCaseCancelled(event); } while (iter.hasNext()); } } /* * fire*CaseDestroyed */ public void fireBeforeCaseDestroyed(String caseId, List<Long> processInstanceIds) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDestroyEvent event = new CaseDestroyEvent(caseId, processInstanceIds); do{ iter.next().beforeCaseDestroyed(event); } while (iter.hasNext()); } } public void fireAfterCaseDestroyed(String caseId, List<Long> processInstanceIds) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDestroyEvent event = new CaseDestroyEvent(caseId, processInstanceIds); do { iter.next().afterCaseDestroyed(event); } while (iter.hasNext()); } } /* * fire*CaseReopened */ public void fireBeforeCaseReopened(String caseId, String deploymentId, String caseDefinitionId, Map<String, Object> data) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseReopenEvent event = new CaseReopenEvent(caseId, deploymentId, caseDefinitionId, data); do{ iter.next().beforeCaseReopen(event); } while (iter.hasNext()); } } public void fireAfterCaseReopened(String caseId, String deploymentId, String caseDefinitionId, Map<String, Object> data, long processInstanceId) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseReopenEvent event = new CaseReopenEvent(caseId, deploymentId, caseDefinitionId, data, processInstanceId); do { iter.next().afterCaseReopen(event); } while (iter.hasNext()); } } /* * fire*CaseCommentAdded */ public void fireBeforeCaseCommentAdded(String caseId, CommentInstance commentInstance) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCommentEvent event = new CaseCommentEvent(caseId, commentInstance); do { iter.next().beforeCaseCommentAdded(event); } while (iter.hasNext()); } } public void fireAfterCaseCommentAdded(String caseId, CommentInstance commentInstance) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCommentEvent event = new CaseCommentEvent(caseId, commentInstance); do { iter.next().beforeCaseCommentAdded(event); } while (iter.hasNext()); } } /* * fire*CaseCommentUpdated */ public void fireBeforeCaseCommentUpdated(String caseId, CommentInstance commentInstance) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCommentEvent event = new CaseCommentEvent(caseId, commentInstance); do { iter.next().beforeCaseCommentUpdated(event); } while (iter.hasNext()); } } public void fireAfterCaseCommentUpdated(String caseId, CommentInstance commentInstance) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCommentEvent event = new CaseCommentEvent(caseId, commentInstance); do { iter.next().beforeCaseCommentUpdated(event); } while (iter.hasNext()); } } /* * fire*CaseCommentRemoved */ public void fireBeforeCaseCommentRemoved(String caseId, CommentInstance commentInstance) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCommentEvent event = new CaseCommentEvent(caseId, commentInstance); do { iter.next().beforeCaseCommentRemoved(event); } while (iter.hasNext()); } } public void fireAfterCaseCommentRemoved(String caseId, CommentInstance commentInstance) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseCommentEvent event = new CaseCommentEvent(caseId, commentInstance); do { iter.next().beforeCaseCommentRemoved(event); } while (iter.hasNext()); } } /* * fire*CaseRoleAssignmentAdded */ public void fireBeforeCaseRoleAssignmentAdded(String caseId, String role, OrganizationalEntity entity) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseRoleAssignmentEvent event = new CaseRoleAssignmentEvent(caseId, role, entity); do { iter.next().beforeCaseRoleAssignmentAdded(event); } while (iter.hasNext()); } } public void fireAfterCaseRoleAssignmentAdded(String caseId, String role, OrganizationalEntity entity) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseRoleAssignmentEvent event = new CaseRoleAssignmentEvent(caseId, role, entity); do { iter.next().afterCaseRoleAssignmentAdded(event); } while (iter.hasNext()); } } /* * fire*CaseRoleAssignmentRemoved */ public void fireBeforeCaseRoleAssignmentRemoved(String caseId, String role, OrganizationalEntity entity) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseRoleAssignmentEvent event = new CaseRoleAssignmentEvent(caseId, role, entity); do { iter.next().beforeCaseRoleAssignmentRemoved(event); } while (iter.hasNext()); } } public void fireAfterCaseRoleAssignmentRemoved(String caseId, String role, OrganizationalEntity entity) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseRoleAssignmentEvent event = new CaseRoleAssignmentEvent(caseId, role, entity); do { iter.next().afterCaseRoleAssignmentRemoved(event); } while (iter.hasNext()); } } /* * fire*CaseDataAdded */ public void fireBeforeCaseDataAdded(String caseId, Map<String, Object> data) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDataEvent event = new CaseDataEvent(caseId, data); do { iter.next().beforeCaseDataAdded(event); } while (iter.hasNext()); } } public void fireAfterCaseDataAdded(String caseId, Map<String, Object> data) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDataEvent event = new CaseDataEvent(caseId, data); do { iter.next().afterCaseDataAdded(event); } while (iter.hasNext()); } } /* * fire*CaseDataRemoved */ public void fireBeforeCaseDataRemoved(String caseId, Map<String, Object> data) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDataEvent event = new CaseDataEvent(caseId, data); do { iter.next().beforeCaseDataRemoved(event); } while (iter.hasNext()); } } public void fireAfterCaseDataRemoved(String caseId, Map<String, Object> data) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDataEvent event = new CaseDataEvent(caseId, data); do { iter.next().afterCaseDataRemoved(event); } while (iter.hasNext()); } } /* * fire*CaseDynamicTaskAdded */ public void fireBeforeDynamicTaskAdded(String caseId, long processInstanceId, String nodeType, Map<String, Object> parameters) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDynamicTaskEvent event = new CaseDynamicTaskEvent(caseId, nodeType, parameters, processInstanceId); do { iter.next().beforeDynamicTaskAdded(event); } while (iter.hasNext()); } } public void fireAfterDynamicTaskAdded(String caseId, long processInstanceId, String nodeType, Map<String, Object> parameters) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDynamicTaskEvent event = new CaseDynamicTaskEvent(caseId, nodeType, parameters, processInstanceId); do { iter.next().afterDynamicTaskAdded(event); } while (iter.hasNext()); } } /* * fire*CaseDynamicProcessAdded */ public void fireBeforeDynamicProcessAdded(String caseId, long processInstanceId, String processId, Map<String, Object> parameters) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDynamicSubprocessEvent event = new CaseDynamicSubprocessEvent(caseId, processId, parameters, processInstanceId); do { iter.next().beforeDynamicProcessAdded(event); } while (iter.hasNext()); } } public void fireAfterDynamicProcessAdded(String caseId, long processInstanceId, String processId, Map<String, Object> parameters, long subProcessInstanceId) { final Iterator<CaseEventListener> iter = getEventListenersIterator(); if (iter.hasNext()) { final CaseDynamicSubprocessEvent event = new CaseDynamicSubprocessEvent(caseId, processId, parameters, processInstanceId, subProcessInstanceId); do { iter.next().afterDynamicProcessAdded(event); } while (iter.hasNext()); } } public void reset() { this.clear(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.master.RegionState.State; import org.apache.hadoop.hbase.client.TableState; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; /** * Region state accountant. It holds the states of all regions in the memory. * In normal scenario, it should match the meta table and the true region states. * * This map is used by AssignmentManager to track region states. */ @InterfaceAudience.Private public class RegionStates { private static final Log LOG = LogFactory.getLog(RegionStates.class); /** * Regions currently in transition. */ final HashMap<String, RegionState> regionsInTransition = new HashMap<String, RegionState>(); /** * Region encoded name to state map. * All the regions should be in this map. */ private final Map<String, RegionState> regionStates = new HashMap<String, RegionState>(); /** * Server to regions assignment map. * Contains the set of regions currently assigned to a given server. */ private final Map<ServerName, Set<HRegionInfo>> serverHoldings = new HashMap<ServerName, Set<HRegionInfo>>(); /** * Maintains the mapping from the default region to the replica regions. */ private final Map<HRegionInfo, Set<HRegionInfo>> defaultReplicaToOtherReplicas = new HashMap<HRegionInfo, Set<HRegionInfo>>(); /** * Region to server assignment map. * Contains the server a given region is currently assigned to. */ private final TreeMap<HRegionInfo, ServerName> regionAssignments = new TreeMap<HRegionInfo, ServerName>(); /** * Encoded region name to server assignment map for re-assignment * purpose. Contains the server a given region is last known assigned * to, which has not completed log splitting, so not assignable. * If a region is currently assigned, this server info in this * map should be the same as that in regionAssignments. * However the info in regionAssignments is cleared when the region * is offline while the info in lastAssignments is cleared when * the region is closed or the server is dead and processed. */ private final HashMap<String, ServerName> lastAssignments = new HashMap<String, ServerName>(); /** * Encoded region name to server assignment map for the * purpose to clean up serverHoldings when a region is online * on a new server. When the region is offline from the previous * server, we cleaned up regionAssignments so that it has the * latest assignment map. But we didn't clean up serverHoldings * to match the meta. We need this map to find out the old server * whose serverHoldings needs cleanup, given a moved region. */ private final HashMap<String, ServerName> oldAssignments = new HashMap<String, ServerName>(); /** * Map a host port pair string to the latest start code * of a region server which is known to be dead. It is dead * to us, but server manager may not know it yet. */ private final HashMap<String, Long> deadServers = new HashMap<String, Long>(); /** * Map a dead servers to the time when log split is done. * Since log splitting is not ordered, we have to remember * all processed instances. The map is cleaned up based * on a configured time. By default, we assume a dead * server should be done with log splitting in two hours. */ private final HashMap<ServerName, Long> processedServers = new HashMap<ServerName, Long>(); private long lastProcessedServerCleanTime; private final TableStateManager tableStateManager; private final RegionStateStore regionStateStore; private final ServerManager serverManager; private final Server server; // The maximum time to keep a log split info in region states map static final String LOG_SPLIT_TIME = "hbase.master.maximum.logsplit.keeptime"; static final long DEFAULT_LOG_SPLIT_TIME = 7200000L; // 2 hours RegionStates(final Server master, final TableStateManager tableStateManager, final ServerManager serverManager, final RegionStateStore regionStateStore) { this.tableStateManager = tableStateManager; this.regionStateStore = regionStateStore; this.serverManager = serverManager; this.server = master; } /** * @return an unmodifiable the region assignment map */ public synchronized Map<HRegionInfo, ServerName> getRegionAssignments() { return Collections.unmodifiableMap(regionAssignments); } /** * Return the replicas (including default) for the regions grouped by ServerName * @param regions * @return a pair containing the groupings as a map */ synchronized Map<ServerName, List<HRegionInfo>> getRegionAssignments( Collection<HRegionInfo> regions) { Map<ServerName, List<HRegionInfo>> map = new HashMap<ServerName, List<HRegionInfo>>(); for (HRegionInfo region : regions) { HRegionInfo defaultReplica = RegionReplicaUtil.getRegionInfoForDefaultReplica(region); Set<HRegionInfo> allReplicas = defaultReplicaToOtherReplicas.get(defaultReplica); if (allReplicas != null) { for (HRegionInfo hri : allReplicas) { ServerName server = regionAssignments.get(hri); if (server != null) { List<HRegionInfo> regionsOnServer = map.get(server); if (regionsOnServer == null) { regionsOnServer = new ArrayList<HRegionInfo>(1); map.put(server, regionsOnServer); } regionsOnServer.add(hri); } } } } return map; } public synchronized ServerName getRegionServerOfRegion(HRegionInfo hri) { return regionAssignments.get(hri); } /** * Get regions in transition and their states */ @SuppressWarnings("unchecked") public synchronized Map<String, RegionState> getRegionsInTransition() { return (Map<String, RegionState>)regionsInTransition.clone(); } /** * @return True if specified region in transition. */ public synchronized boolean isRegionInTransition(final HRegionInfo hri) { return regionsInTransition.containsKey(hri.getEncodedName()); } /** * @return True if specified region in transition. */ public synchronized boolean isRegionInTransition(final String encodedName) { return regionsInTransition.containsKey(encodedName); } /** * @return True if any region in transition. */ public synchronized boolean isRegionsInTransition() { return !regionsInTransition.isEmpty(); } /** * @return True if specified region assigned, and not in transition. */ public synchronized boolean isRegionOnline(final HRegionInfo hri) { return !isRegionInTransition(hri) && regionAssignments.containsKey(hri); } /** * @return True if specified region offline/closed, but not in transition. * If the region is not in the map, it is offline to us too. */ public synchronized boolean isRegionOffline(final HRegionInfo hri) { return getRegionState(hri) == null || (!isRegionInTransition(hri) && isRegionInState(hri, State.OFFLINE, State.CLOSED)); } /** * @return True if specified region is in one of the specified states. */ public boolean isRegionInState( final HRegionInfo hri, final State... states) { return isRegionInState(hri.getEncodedName(), states); } /** * @return True if specified region is in one of the specified states. */ public boolean isRegionInState( final String encodedName, final State... states) { RegionState regionState = getRegionState(encodedName); return isOneOfStates(regionState, states); } /** * Wait for the state map to be updated by assignment manager. */ public synchronized void waitForUpdate( final long timeout) throws InterruptedException { this.wait(timeout); } /** * Get region transition state */ public RegionState getRegionTransitionState(final HRegionInfo hri) { return getRegionTransitionState(hri.getEncodedName()); } /** * Get region transition state */ public synchronized RegionState getRegionTransitionState(final String encodedName) { return regionsInTransition.get(encodedName); } /** * Add a list of regions to RegionStates. If a region is split * and offline, its state will be SPLIT. Otherwise, its state will * be OFFLINE. Region already in RegionStates will be skipped. */ public void createRegionStates( final List<HRegionInfo> hris) { for (HRegionInfo hri: hris) { createRegionState(hri); } } /** * Add a region to RegionStates. If the region is split * and offline, its state will be SPLIT. Otherwise, its state will * be OFFLINE. If it is already in RegionStates, this call has * no effect, and the original state is returned. */ public RegionState createRegionState(final HRegionInfo hri) { return createRegionState(hri, null, null, null); } /** * Add a region to RegionStates with the specified state. * If the region is already in RegionStates, this call has * no effect, and the original state is returned. * * @param hri the region info to create a state for * @param newState the state to the region in set to * @param serverName the server the region is transitioning on * @param lastHost the last server that hosts the region * @return the current state */ public synchronized RegionState createRegionState(final HRegionInfo hri, State newState, ServerName serverName, ServerName lastHost) { if (newState == null || (newState == State.OPEN && serverName == null)) { newState = State.OFFLINE; } if (hri.isOffline() && hri.isSplit()) { newState = State.SPLIT; serverName = null; } String encodedName = hri.getEncodedName(); RegionState regionState = regionStates.get(encodedName); if (regionState != null) { LOG.warn("Tried to create a state for a region already in RegionStates, " + "used existing: " + regionState + ", ignored new: " + newState); } else { regionState = new RegionState(hri, newState, serverName); regionStates.put(encodedName, regionState); if (newState == State.OPEN) { if (!serverName.equals(lastHost)) { LOG.warn("Open region's last host " + lastHost + " should be the same as the current one " + serverName + ", ignored the last and used the current one"); lastHost = serverName; } lastAssignments.put(encodedName, lastHost); regionAssignments.put(hri, lastHost); } else if (!isOneOfStates(regionState, State.MERGED, State.SPLIT, State.OFFLINE)) { regionsInTransition.put(encodedName, regionState); } if (lastHost != null && newState != State.SPLIT) { addToServerHoldings(lastHost, hri); if (newState != State.OPEN) { oldAssignments.put(encodedName, lastHost); } } } return regionState; } /** * Update a region state. It will be put in transition if not already there. */ public RegionState updateRegionState( final HRegionInfo hri, final State state) { RegionState regionState = getRegionState(hri.getEncodedName()); return updateRegionState(hri, state, regionState == null ? null : regionState.getServerName()); } /** * Update a region state. It will be put in transition if not already there. */ public RegionState updateRegionState( final HRegionInfo hri, final State state, final ServerName serverName) { return updateRegionState(hri, state, serverName, HConstants.NO_SEQNUM); } public void regionOnline( final HRegionInfo hri, final ServerName serverName) { regionOnline(hri, serverName, HConstants.NO_SEQNUM); } /** * A region is online, won't be in transition any more. * We can't confirm it is really online on specified region server * because it hasn't been put in region server's online region list yet. */ public void regionOnline(final HRegionInfo hri, final ServerName serverName, long openSeqNum) { String encodedName = hri.getEncodedName(); if (!serverManager.isServerOnline(serverName)) { // This is possible if the region server dies before master gets a // chance to handle ZK event in time. At this time, if the dead server // is already processed by SSH, we should ignore this event. // If not processed yet, ignore and let SSH deal with it. LOG.warn("Ignored, " + encodedName + " was opened on a dead server: " + serverName); return; } updateRegionState(hri, State.OPEN, serverName, openSeqNum); synchronized (this) { regionsInTransition.remove(encodedName); ServerName oldServerName = regionAssignments.put(hri, serverName); if (!serverName.equals(oldServerName)) { if (LOG.isDebugEnabled()) { LOG.debug("Onlined " + hri.getShortNameToLog() + " on " + serverName + " " + hri); } else { LOG.debug("Onlined " + hri.getShortNameToLog() + " on " + serverName); } addToServerHoldings(serverName, hri); addToReplicaMapping(hri); if (oldServerName == null) { oldServerName = oldAssignments.remove(encodedName); } if (oldServerName != null && serverHoldings.containsKey(oldServerName)) { LOG.info("Offlined " + hri.getShortNameToLog() + " from " + oldServerName); removeFromServerHoldings(oldServerName, hri); } } } } private void addToServerHoldings(ServerName serverName, HRegionInfo hri) { Set<HRegionInfo> regions = serverHoldings.get(serverName); if (regions == null) { regions = new HashSet<HRegionInfo>(); serverHoldings.put(serverName, regions); } regions.add(hri); } private void addToReplicaMapping(HRegionInfo hri) { HRegionInfo defaultReplica = RegionReplicaUtil.getRegionInfoForDefaultReplica(hri); Set<HRegionInfo> replicas = defaultReplicaToOtherReplicas.get(defaultReplica); if (replicas == null) { replicas = new HashSet<HRegionInfo>(); defaultReplicaToOtherReplicas.put(defaultReplica, replicas); } replicas.add(hri); } private void removeFromServerHoldings(ServerName serverName, HRegionInfo hri) { Set<HRegionInfo> oldRegions = serverHoldings.get(serverName); oldRegions.remove(hri); if (oldRegions.isEmpty()) { serverHoldings.remove(serverName); } } private void removeFromReplicaMapping(HRegionInfo hri) { HRegionInfo defaultReplica = RegionReplicaUtil.getRegionInfoForDefaultReplica(hri); Set<HRegionInfo> replicas = defaultReplicaToOtherReplicas.get(defaultReplica); if (replicas != null) { replicas.remove(hri); if (replicas.isEmpty()) { defaultReplicaToOtherReplicas.remove(defaultReplica); } } } /** * A dead server's wals have been split so that all the regions * used to be open on it can be safely assigned now. Mark them assignable. */ public synchronized void logSplit(final ServerName serverName) { for (Iterator<Map.Entry<String, ServerName>> it = lastAssignments.entrySet().iterator(); it.hasNext();) { Map.Entry<String, ServerName> e = it.next(); if (e.getValue().equals(serverName)) { it.remove(); } } long now = System.currentTimeMillis(); if (LOG.isDebugEnabled()) { LOG.debug("Adding to processed servers " + serverName); } processedServers.put(serverName, Long.valueOf(now)); Configuration conf = server.getConfiguration(); long obsoleteTime = conf.getLong(LOG_SPLIT_TIME, DEFAULT_LOG_SPLIT_TIME); // Doesn't have to be very accurate about the clean up time if (now > lastProcessedServerCleanTime + obsoleteTime) { lastProcessedServerCleanTime = now; long cutoff = now - obsoleteTime; for (Iterator<Map.Entry<ServerName, Long>> it = processedServers.entrySet().iterator(); it.hasNext();) { Map.Entry<ServerName, Long> e = it.next(); if (e.getValue().longValue() < cutoff) { if (LOG.isDebugEnabled()) { LOG.debug("Removed from processed servers " + e.getKey()); } it.remove(); } } } } /** * Log split is done for a given region, so it is assignable now. */ public void logSplit(final HRegionInfo region) { clearLastAssignment(region); } public synchronized void clearLastAssignment(final HRegionInfo region) { lastAssignments.remove(region.getEncodedName()); } /** * A region is offline, won't be in transition any more. */ public void regionOffline(final HRegionInfo hri) { regionOffline(hri, null); } /** * A region is offline, won't be in transition any more. Its state * should be the specified expected state, which can only be * Split/Merged/Offline/null(=Offline)/SplittingNew/MergingNew. */ public void regionOffline( final HRegionInfo hri, final State expectedState) { Preconditions.checkArgument(expectedState == null || RegionState.isUnassignable(expectedState), "Offlined region should not be " + expectedState); if (isRegionInState(hri, State.SPLITTING_NEW, State.MERGING_NEW)) { // Remove it from all region maps deleteRegion(hri); return; } State newState = expectedState == null ? State.OFFLINE : expectedState; updateRegionState(hri, newState); String encodedName = hri.getEncodedName(); synchronized (this) { regionsInTransition.remove(encodedName); ServerName oldServerName = regionAssignments.remove(hri); if (oldServerName != null && serverHoldings.containsKey(oldServerName)) { if (newState == State.MERGED || newState == State.SPLIT || hri.isMetaRegion() || tableStateManager.isTableState(hri.getTable(), TableState.State.DISABLED, TableState.State.DISABLING)) { // Offline the region only if it's merged/split, or the table is disabled/disabling. // Otherwise, offline it from this server only when it is online on a different server. LOG.info("Offlined " + hri.getShortNameToLog() + " from " + oldServerName); removeFromServerHoldings(oldServerName, hri); removeFromReplicaMapping(hri); } else { // Need to remember it so that we can offline it from this // server when it is online on a different server. oldAssignments.put(encodedName, oldServerName); } } } } /** * A server is offline, all regions on it are dead. */ public synchronized List<HRegionInfo> serverOffline(final ServerName sn) { // Offline all regions on this server not already in transition. List<HRegionInfo> rits = new ArrayList<HRegionInfo>(); Set<HRegionInfo> assignedRegions = serverHoldings.get(sn); if (assignedRegions == null) { assignedRegions = new HashSet<HRegionInfo>(); } // Offline regions outside the loop to avoid ConcurrentModificationException Set<HRegionInfo> regionsToOffline = new HashSet<HRegionInfo>(); for (HRegionInfo region : assignedRegions) { // Offline open regions, no need to offline if SPLIT/MERGED/OFFLINE if (isRegionOnline(region)) { regionsToOffline.add(region); } else if (isRegionInState(region, State.SPLITTING, State.MERGING)) { LOG.debug("Offline splitting/merging region " + getRegionState(region)); regionsToOffline.add(region); } } for (RegionState state : regionsInTransition.values()) { HRegionInfo hri = state.getRegion(); if (assignedRegions.contains(hri)) { // Region is open on this region server, but in transition. // This region must be moving away from this server, or splitting/merging. // SSH will handle it, either skip assigning, or re-assign. LOG.info("Transitioning " + state + " will be handled by SSH for " + sn); } else if (sn.equals(state.getServerName())) { // Region is in transition on this region server, and this // region is not open on this server. So the region must be // moving to this server from another one (i.e. opening or // pending open on this server, was open on another one. // Offline state is also kind of pending open if the region is in // transition. The region could be in failed_close state too if we have // tried several times to open it while this region server is not reachable) if (isOneOfStates(state, State.OPENING, State.PENDING_OPEN, State.FAILED_OPEN, State.FAILED_CLOSE, State.OFFLINE)) { LOG.info("Found region in " + state + " to be reassigned by SSH for " + sn); rits.add(hri); } else if (isOneOfStates(state, State.SPLITTING_NEW)) { try { if (MetaTableAccessor.getRegion(server.getConnection(), state.getRegion() .getEncodedNameAsBytes()) == null) { regionsToOffline.add(state.getRegion()); FSUtils.deleteRegionDir(server.getConfiguration(), state.getRegion()); } } catch (IOException e) { LOG.warn("Got exception while deleting " + state.getRegion() + " directories from file system.", e); } } else { LOG.warn("THIS SHOULD NOT HAPPEN: unexpected " + state); } } } for (HRegionInfo hri : regionsToOffline) { regionOffline(hri); } this.notifyAll(); return rits; } /** * Gets the online regions of the specified table. * This method looks at the in-memory state. It does not go to <code>hbase:meta</code>. * Only returns <em>online</em> regions. If a region on this table has been * closed during a disable, etc., it will be included in the returned list. * So, the returned list may not necessarily be ALL regions in this table, its * all the ONLINE regions in the table. * @param tableName * @return Online regions from <code>tableName</code> */ public synchronized List<HRegionInfo> getRegionsOfTable(TableName tableName) { List<HRegionInfo> tableRegions = new ArrayList<HRegionInfo>(); // boundary needs to have table's name but regionID 0 so that it is sorted // before all table's regions. HRegionInfo boundary = new HRegionInfo(tableName, null, null, false, 0L); for (HRegionInfo hri: regionAssignments.tailMap(boundary).keySet()) { if(!hri.getTable().equals(tableName)) break; tableRegions.add(hri); } return tableRegions; } /** * Wait on region to clear regions-in-transition. * <p> * If the region isn't in transition, returns immediately. Otherwise, method * blocks until the region is out of transition. */ public synchronized void waitOnRegionToClearRegionsInTransition( final HRegionInfo hri) throws InterruptedException { if (!isRegionInTransition(hri)) return; while(!server.isStopped() && isRegionInTransition(hri)) { RegionState rs = getRegionState(hri); LOG.info("Waiting on " + rs + " to clear regions-in-transition"); waitForUpdate(100); } if (server.isStopped()) { LOG.info("Giving up wait on region in " + "transition because stoppable.isStopped is set"); } } /** * A table is deleted. Remove its regions from all internal maps. * We loop through all regions assuming we don't delete tables too much. */ public void tableDeleted(final TableName tableName) { Set<HRegionInfo> regionsToDelete = new HashSet<HRegionInfo>(); synchronized (this) { for (RegionState state: regionStates.values()) { HRegionInfo region = state.getRegion(); if (region.getTable().equals(tableName)) { regionsToDelete.add(region); } } } for (HRegionInfo region: regionsToDelete) { deleteRegion(region); } } /** * Get a copy of all regions assigned to a server */ public synchronized Set<HRegionInfo> getServerRegions(ServerName serverName) { Set<HRegionInfo> regions = serverHoldings.get(serverName); if (regions == null) return null; return new HashSet<HRegionInfo>(regions); } /** * Remove a region from all state maps. */ @VisibleForTesting public synchronized void deleteRegion(final HRegionInfo hri) { String encodedName = hri.getEncodedName(); regionsInTransition.remove(encodedName); regionStates.remove(encodedName); lastAssignments.remove(encodedName); ServerName sn = regionAssignments.remove(hri); if (sn != null) { Set<HRegionInfo> regions = serverHoldings.get(sn); regions.remove(hri); } } /** * Checking if a region was assigned to a server which is not online now. * If so, we should hold re-assign this region till SSH has split its wals. * Once logs are split, the last assignment of this region will be reset, * which means a null last assignment server is ok for re-assigning. * * A region server could be dead but we don't know it yet. We may * think it's online falsely. Therefore if a server is online, we still * need to confirm it reachable and having the expected start code. */ synchronized boolean wasRegionOnDeadServer(final String encodedName) { ServerName server = lastAssignments.get(encodedName); return isServerDeadAndNotProcessed(server); } synchronized boolean isServerDeadAndNotProcessed(ServerName server) { if (server == null) return false; if (serverManager.isServerOnline(server)) { String hostAndPort = server.getHostAndPort(); long startCode = server.getStartcode(); Long deadCode = deadServers.get(hostAndPort); if (deadCode == null || startCode > deadCode.longValue()) { if (serverManager.isServerReachable(server)) { return false; } // The size of deadServers won't grow unbounded. deadServers.put(hostAndPort, Long.valueOf(startCode)); } // Watch out! If the server is not dead, the region could // remain unassigned. That's why ServerManager#isServerReachable // should use some retry. // // We cache this info since it is very unlikely for that // instance to come back up later on. We don't want to expire // the server since we prefer to let it die naturally. LOG.warn("Couldn't reach online server " + server); } // Now, we know it's dead. Check if it's processed return !processedServers.containsKey(server); } /** * Get the last region server a region was on for purpose of re-assignment, * i.e. should the re-assignment be held back till log split is done? */ synchronized ServerName getLastRegionServerOfRegion(final String encodedName) { return lastAssignments.get(encodedName); } synchronized void setLastRegionServerOfRegions( final ServerName serverName, final List<HRegionInfo> regionInfos) { for (HRegionInfo hri: regionInfos) { setLastRegionServerOfRegion(serverName, hri.getEncodedName()); } } synchronized void setLastRegionServerOfRegion( final ServerName serverName, final String encodedName) { lastAssignments.put(encodedName, serverName); } synchronized boolean isRegionOnServer( final HRegionInfo hri, final ServerName serverName) { Set<HRegionInfo> regions = serverHoldings.get(serverName); return regions == null ? false : regions.contains(hri); } void splitRegion(HRegionInfo p, HRegionInfo a, HRegionInfo b, ServerName sn) throws IOException { regionStateStore.splitRegion(p, a, b, sn); synchronized (this) { // After PONR, split is considered to be done. // Update server holdings to be aligned with the meta. Set<HRegionInfo> regions = serverHoldings.get(sn); if (regions == null) { throw new IllegalStateException(sn + " should host some regions"); } regions.remove(p); regions.add(a); regions.add(b); } } void mergeRegions(HRegionInfo p, HRegionInfo a, HRegionInfo b, ServerName sn) throws IOException { regionStateStore.mergeRegions(p, a, b, sn); synchronized (this) { // After PONR, merge is considered to be done. // Update server holdings to be aligned with the meta. Set<HRegionInfo> regions = serverHoldings.get(sn); if (regions == null) { throw new IllegalStateException(sn + " should host some regions"); } regions.remove(a); regions.remove(b); regions.add(p); } } /** * At cluster clean re/start, mark all user regions closed except those of tables * that are excluded, such as disabled/disabling/enabling tables. All user regions * and their previous locations are returned. */ synchronized Map<HRegionInfo, ServerName> closeAllUserRegions(Set<TableName> excludedTables) { boolean noExcludeTables = excludedTables == null || excludedTables.isEmpty(); Set<HRegionInfo> toBeClosed = new HashSet<HRegionInfo>(regionStates.size()); for(RegionState state: regionStates.values()) { HRegionInfo hri = state.getRegion(); if (state.isSplit() || hri.isSplit()) { continue; } TableName tableName = hri.getTable(); if (!TableName.META_TABLE_NAME.equals(tableName) && (noExcludeTables || !excludedTables.contains(tableName))) { toBeClosed.add(hri); } } Map<HRegionInfo, ServerName> allUserRegions = new HashMap<HRegionInfo, ServerName>(toBeClosed.size()); for (HRegionInfo hri: toBeClosed) { RegionState regionState = updateRegionState(hri, State.CLOSED); allUserRegions.put(hri, regionState.getServerName()); } return allUserRegions; } /** * Compute the average load across all region servers. * Currently, this uses a very naive computation - just uses the number of * regions being served, ignoring stats about number of requests. * @return the average load */ protected synchronized double getAverageLoad() { int numServers = 0, totalLoad = 0; for (Map.Entry<ServerName, Set<HRegionInfo>> e: serverHoldings.entrySet()) { Set<HRegionInfo> regions = e.getValue(); ServerName serverName = e.getKey(); int regionCount = regions.size(); if (serverManager.isServerOnline(serverName)) { totalLoad += regionCount; numServers++; } } if (numServers > 1) { // The master region server holds only a couple regions. // Don't consider this server in calculating the average load // if there are other region servers to avoid possible confusion. Set<HRegionInfo> hris = serverHoldings.get(server.getServerName()); if (hris != null) { totalLoad -= hris.size(); numServers--; } } return numServers == 0 ? 0.0 : (double)totalLoad / (double)numServers; } /** * This is an EXPENSIVE clone. Cloning though is the safest thing to do. * Can't let out original since it can change and at least the load balancer * wants to iterate this exported list. We need to synchronize on regions * since all access to this.servers is under a lock on this.regions. * * @return A clone of current assignments by table. */ protected Map<TableName, Map<ServerName, List<HRegionInfo>>> getAssignmentsByTable() { Map<TableName, Map<ServerName, List<HRegionInfo>>> result = new HashMap<TableName, Map<ServerName,List<HRegionInfo>>>(); synchronized (this) { if (!server.getConfiguration().getBoolean("hbase.master.loadbalance.bytable", false)) { Map<ServerName, List<HRegionInfo>> svrToRegions = new HashMap<ServerName, List<HRegionInfo>>(serverHoldings.size()); for (Map.Entry<ServerName, Set<HRegionInfo>> e: serverHoldings.entrySet()) { svrToRegions.put(e.getKey(), new ArrayList<HRegionInfo>(e.getValue())); } result.put(TableName.valueOf("ensemble"), svrToRegions); } else { for (Map.Entry<ServerName, Set<HRegionInfo>> e: serverHoldings.entrySet()) { for (HRegionInfo hri: e.getValue()) { if (hri.isMetaRegion()) continue; TableName tablename = hri.getTable(); Map<ServerName, List<HRegionInfo>> svrToRegions = result.get(tablename); if (svrToRegions == null) { svrToRegions = new HashMap<ServerName, List<HRegionInfo>>(serverHoldings.size()); result.put(tablename, svrToRegions); } List<HRegionInfo> regions = svrToRegions.get(e.getKey()); if (regions == null) { regions = new ArrayList<HRegionInfo>(); svrToRegions.put(e.getKey(), regions); } regions.add(hri); } } } } Map<ServerName, ServerLoad> onlineSvrs = serverManager.getOnlineServers(); // Take care of servers w/o assignments, and remove servers in draining mode List<ServerName> drainingServers = this.serverManager.getDrainingServersList(); for (Map<ServerName, List<HRegionInfo>> map: result.values()) { for (ServerName svr: onlineSvrs.keySet()) { if (!map.containsKey(svr)) { map.put(svr, new ArrayList<HRegionInfo>()); } } map.keySet().removeAll(drainingServers); } return result; } protected RegionState getRegionState(final HRegionInfo hri) { return getRegionState(hri.getEncodedName()); } /** * Returns a clone of region assignments per server * @return a Map of ServerName to a List of HRegionInfo's */ protected synchronized Map<ServerName, List<HRegionInfo>> getRegionAssignmentsByServer() { Map<ServerName, List<HRegionInfo>> regionsByServer = new HashMap<ServerName, List<HRegionInfo>>(serverHoldings.size()); for (Map.Entry<ServerName, Set<HRegionInfo>> e: serverHoldings.entrySet()) { regionsByServer.put(e.getKey(), new ArrayList<HRegionInfo>(e.getValue())); } return regionsByServer; } protected synchronized RegionState getRegionState(final String encodedName) { return regionStates.get(encodedName); } /** * Get the HRegionInfo from cache, if not there, from the hbase:meta table * @param regionName * @return HRegionInfo for the region */ @SuppressWarnings("deprecation") protected HRegionInfo getRegionInfo(final byte [] regionName) { String encodedName = HRegionInfo.encodeRegionName(regionName); RegionState regionState = getRegionState(encodedName); if (regionState != null) { return regionState.getRegion(); } try { Pair<HRegionInfo, ServerName> p = MetaTableAccessor.getRegion(server.getConnection(), regionName); HRegionInfo hri = p == null ? null : p.getFirst(); if (hri != null) { createRegionState(hri); } return hri; } catch (IOException e) { server.abort("Aborting because error occoured while reading " + Bytes.toStringBinary(regionName) + " from hbase:meta", e); return null; } } static boolean isOneOfStates(RegionState regionState, State... states) { State s = regionState != null ? regionState.getState() : null; for (State state: states) { if (s == state) return true; } return false; } /** * Update a region state. It will be put in transition if not already there. */ private RegionState updateRegionState(final HRegionInfo hri, final RegionState.State state, final ServerName serverName, long openSeqNum) { if (state == RegionState.State.FAILED_CLOSE || state == RegionState.State.FAILED_OPEN) { LOG.warn("Failed to open/close " + hri.getShortNameToLog() + " on " + serverName + ", set to " + state); } String encodedName = hri.getEncodedName(); RegionState regionState = new RegionState( hri, state, System.currentTimeMillis(), serverName); RegionState oldState = getRegionState(encodedName); if (!regionState.equals(oldState)) { LOG.info("Transition " + oldState + " to " + regionState); // Persist region state before updating in-memory info, if needed regionStateStore.updateRegionState(openSeqNum, regionState, oldState); } synchronized (this) { regionsInTransition.put(encodedName, regionState); regionStates.put(encodedName, regionState); // For these states, region should be properly closed. // There should be no log splitting issue. if ((state == State.CLOSED || state == State.MERGED || state == State.SPLIT) && lastAssignments.containsKey(encodedName)) { ServerName last = lastAssignments.get(encodedName); if (last.equals(serverName)) { lastAssignments.remove(encodedName); } else { LOG.warn(encodedName + " moved to " + state + " on " + serverName + ", expected " + last); } } // Once a region is opened, record its last assignment right away. if (serverName != null && state == State.OPEN) { ServerName last = lastAssignments.get(encodedName); if (!serverName.equals(last)) { lastAssignments.put(encodedName, serverName); if (last != null && isServerDeadAndNotProcessed(last)) { LOG.warn(encodedName + " moved to " + serverName + ", while it's previous host " + last + " is dead but not processed yet"); } } } // notify the change this.notifyAll(); } return regionState; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes a launch template. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/FleetLaunchTemplateSpecification" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class FleetLaunchTemplateSpecification implements Serializable, Cloneable { /** * <p> * The ID of the launch template. You must specify either a template ID or a template name. * </p> */ private String launchTemplateId; /** * <p> * The name of the launch template. You must specify either a template name or a template ID. * </p> */ private String launchTemplateName; /** * <p> * The version number of the launch template. You must specify a version number. * </p> */ private String version; /** * <p> * The ID of the launch template. You must specify either a template ID or a template name. * </p> * * @param launchTemplateId * The ID of the launch template. You must specify either a template ID or a template name. */ public void setLaunchTemplateId(String launchTemplateId) { this.launchTemplateId = launchTemplateId; } /** * <p> * The ID of the launch template. You must specify either a template ID or a template name. * </p> * * @return The ID of the launch template. You must specify either a template ID or a template name. */ public String getLaunchTemplateId() { return this.launchTemplateId; } /** * <p> * The ID of the launch template. You must specify either a template ID or a template name. * </p> * * @param launchTemplateId * The ID of the launch template. You must specify either a template ID or a template name. * @return Returns a reference to this object so that method calls can be chained together. */ public FleetLaunchTemplateSpecification withLaunchTemplateId(String launchTemplateId) { setLaunchTemplateId(launchTemplateId); return this; } /** * <p> * The name of the launch template. You must specify either a template name or a template ID. * </p> * * @param launchTemplateName * The name of the launch template. You must specify either a template name or a template ID. */ public void setLaunchTemplateName(String launchTemplateName) { this.launchTemplateName = launchTemplateName; } /** * <p> * The name of the launch template. You must specify either a template name or a template ID. * </p> * * @return The name of the launch template. You must specify either a template name or a template ID. */ public String getLaunchTemplateName() { return this.launchTemplateName; } /** * <p> * The name of the launch template. You must specify either a template name or a template ID. * </p> * * @param launchTemplateName * The name of the launch template. You must specify either a template name or a template ID. * @return Returns a reference to this object so that method calls can be chained together. */ public FleetLaunchTemplateSpecification withLaunchTemplateName(String launchTemplateName) { setLaunchTemplateName(launchTemplateName); return this; } /** * <p> * The version number of the launch template. You must specify a version number. * </p> * * @param version * The version number of the launch template. You must specify a version number. */ public void setVersion(String version) { this.version = version; } /** * <p> * The version number of the launch template. You must specify a version number. * </p> * * @return The version number of the launch template. You must specify a version number. */ public String getVersion() { return this.version; } /** * <p> * The version number of the launch template. You must specify a version number. * </p> * * @param version * The version number of the launch template. You must specify a version number. * @return Returns a reference to this object so that method calls can be chained together. */ public FleetLaunchTemplateSpecification withVersion(String version) { setVersion(version); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getLaunchTemplateId() != null) sb.append("LaunchTemplateId: ").append(getLaunchTemplateId()).append(","); if (getLaunchTemplateName() != null) sb.append("LaunchTemplateName: ").append(getLaunchTemplateName()).append(","); if (getVersion() != null) sb.append("Version: ").append(getVersion()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof FleetLaunchTemplateSpecification == false) return false; FleetLaunchTemplateSpecification other = (FleetLaunchTemplateSpecification) obj; if (other.getLaunchTemplateId() == null ^ this.getLaunchTemplateId() == null) return false; if (other.getLaunchTemplateId() != null && other.getLaunchTemplateId().equals(this.getLaunchTemplateId()) == false) return false; if (other.getLaunchTemplateName() == null ^ this.getLaunchTemplateName() == null) return false; if (other.getLaunchTemplateName() != null && other.getLaunchTemplateName().equals(this.getLaunchTemplateName()) == false) return false; if (other.getVersion() == null ^ this.getVersion() == null) return false; if (other.getVersion() != null && other.getVersion().equals(this.getVersion()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getLaunchTemplateId() == null) ? 0 : getLaunchTemplateId().hashCode()); hashCode = prime * hashCode + ((getLaunchTemplateName() == null) ? 0 : getLaunchTemplateName().hashCode()); hashCode = prime * hashCode + ((getVersion() == null) ? 0 : getVersion().hashCode()); return hashCode; } @Override public FleetLaunchTemplateSpecification clone() { try { return (FleetLaunchTemplateSpecification) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package org.xtuml.bp.als.oal.test; import java.io.StringReader; import java.util.UUID; import org.eclipse.jface.preference.IPreferenceStore; import antlr.RecognitionException; import antlr.TokenStreamException; import antlr.TokenStreamRecognitionException; import org.xtuml.bp.als.oal.OalLexer; import org.xtuml.bp.als.oal.OalParser; import org.xtuml.bp.als.oal.Oal_validate; import org.xtuml.bp.core.ActionHome_c; import org.xtuml.bp.core.Action_c; import org.xtuml.bp.core.Block_c; import org.xtuml.bp.core.ClassStateMachine_c; import org.xtuml.bp.core.Component_c; import org.xtuml.bp.core.CorePlugin; import org.xtuml.bp.core.DomainAsComponent_c; import org.xtuml.bp.core.Domain_c; import org.xtuml.bp.core.Gd_c; import org.xtuml.bp.core.ModelClass_c; import org.xtuml.bp.core.MooreActionHome_c; import org.xtuml.bp.core.Oalconstants_c; import org.xtuml.bp.core.Operation_c; import org.xtuml.bp.core.StateMachineState_c; import org.xtuml.bp.core.StateMachine_c; import org.xtuml.bp.core.Subsystem_c; import org.xtuml.bp.core.TransitionActionHome_c; import org.xtuml.bp.core.Transition_c; import org.xtuml.bp.core.common.BridgePointPreferencesStore; import org.xtuml.bp.core.common.ClassQueryInterface_c; import org.xtuml.bp.core.util.DomainUtil; import org.xtuml.bp.test.common.BaseTest; public class ComponentParamTest extends BaseTest { private static String m_workspace_path = ""; //$NON-NLS-1$ private static String m_comp_pkg_name = "ComponentSyntaxTest"; //$NON-NLS-1$ public ComponentParamTest() { super("Models", null); } /* (non-Javadoc) * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); if (m_workspace_path.equals(""))//$NON-NLS-1$ { m_workspace_path = System.getProperty("WORKSPACE_PATH");//$NON-NLS-1$ } assertNotNull( m_workspace_path ); if (m_logfile_path == null || m_logfile_path.equals("")) { m_logfile_path = System.getProperty("LOGFILE_PATH"); } assertNotNull( m_logfile_path ); ensureAvailableAndLoaded("Models", m_comp_pkg_name, false, false, "Component Package"); IPreferenceStore store = CorePlugin.getDefault().getPreferenceStore(); store.setValue( BridgePointPreferencesStore.ALLOW_IMPLICIT_COMPONENT_ADDRESSING, true); // Now set up all the needed state and transition actions for this test populateStateMachineActivityInstances(); } public void testNoIncomingTransitions() { String x = parseAction("testVar = rcvd_evt.a;", STATE_NO_INCOMING_TRANSITIONS); assertEquals("Unexpected error:", ":1:20-20: Attempted to access parameter ->a<- when there are no incoming transitions.\nline 1:22: unexpected token: null\nline 1:22: expecting Semicolon, found 'null'\n", x); x = parseAction("testVar = param.a;", STATE_NO_INCOMING_TRANSITIONS); assertEquals("Unexpected error:", ":1:17-17: Attempted to access parameter ->a<- when there are no incoming transitions.\nline 1:19: unexpected token: null\nline 1:19: expecting Semicolon, found 'null'\n", x); } public void testNoParms() { String x = parseAction("testVar = rcvd_evt.a;", STATE_NO_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:20-20: The following incoming messages do not carry required parameter ->a<- noParms1, noParms2\nline 1:22: unexpected token: null\nline 1:22: expecting Semicolon, found 'null'\n", x); x = parseAction("testVar = param.a;", STATE_NO_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:17-17: The following incoming messages do not carry required parameter ->a<- noParms1, noParms2\nline 1:19: unexpected token: null\nline 1:19: expecting Semicolon, found 'null'\n", x); } public void testMatchingParms() { String x = parseAction("testVar1 = rcvd_evt.a; testVar2 = rcvd_evt.b;", STATE_MATCHING_SIGNAL_PARMS); assertEquals("Unexpected error:", "", x); x = parseAction("testVar1 = param.a; testVar2 = param.b;", STATE_MATCHING_SIGNAL_PARMS); assertEquals("Unexpected error:", "", x); } public void testMatchingParmsTestParmInSubset() { String x = parseAction("testVar = rcvd_evt.a;", STATE_NON_MATCHING_WITH_SUBSET_SIGNAL_PARMS); assertEquals("Unexpected error:", "", x); x = parseAction("testVar = param.a;", STATE_NON_MATCHING_WITH_SUBSET_SIGNAL_PARMS); assertEquals("Unexpected error:", "", x); } public void testMatchingParmsTestParmNotInSubset() { String x = parseAction("testVar = rcvd_evt.b;", STATE_NON_MATCHING_WITH_SUBSET_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:20-20: The following incoming messages do not carry required parameter ->b<- TwoParms2\nline 1:22: unexpected token: null\nline 1:22: expecting Semicolon, found 'null'\n", x); x = parseAction("testVar = param.b;", STATE_NON_MATCHING_WITH_SUBSET_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:17-17: The following incoming messages do not carry required parameter ->b<- TwoParms2\nline 1:19: unexpected token: null\nline 1:19: expecting Semicolon, found 'null'\n", x); } public void testOneNoEventTransition() { String x = parseAction("testVar = rcvd_evt.a;", STATE_ONE_NO_EVENT_TRANSITION); assertEquals("Unexpected error:", ":1:20-20: Attempted to access parameter ->a<- when one or more incoming transitions do not have events assigned.\nline 1:22: unexpected token: null\nline 1:22: expecting Semicolon, found 'null'\n", x); x = parseAction("testVar = param.a;", STATE_ONE_NO_EVENT_TRANSITION); assertEquals("Unexpected error:", ":1:17-17: Attempted to access parameter ->a<- when one or more incoming transitions do not have events assigned.\nline 1:19: unexpected token: null\nline 1:19: expecting Semicolon, found 'null'\n", x); } public void testAllNoEventTransitions() { String x = parseAction("testVar = rcvd_evt.a;", STATE_ALL_NO_EVENT_TRANSITIONS); assertEquals("Unexpected error:", ":1:20-20: Attempted to access parameter ->a<- when one or more incoming transitions do not have events assigned.\nline 1:22: unexpected token: null\nline 1:22: expecting Semicolon, found 'null'\n", x); x = parseAction("testVar = param.a;", STATE_ALL_NO_EVENT_TRANSITIONS); assertEquals("Unexpected error:", ":1:17-17: Attempted to access parameter ->a<- when one or more incoming transitions do not have events assigned.\nline 1:19: unexpected token: null\nline 1:19: expecting Semicolon, found 'null'\n", x); } public void testNoEventTransitionAction() { String x = parseAction("testVar = rcvd_evt.a; testVar2 = testVar;", TRANS_NO_SIGNAL); assertEquals("Unexpected error:", ":1:20-20: Attempted to access parameter ->a<- when associated transition does not have an event assigned.\nline 1:23: expecting Semicolon, found 'testVar2'\nline 1:34: unexpected token: testVar\n", x); x = parseAction("testVar = param.a; testVar2 = testVar;", TRANS_NO_SIGNAL); assertEquals("Unexpected error:", ":1:17-17: Attempted to access parameter ->a<- when associated transition does not have an event assigned.\nline 1:20: expecting Semicolon, found 'testVar2'\nline 1:31: unexpected token: testVar\n", x); } public void testNoParmTransitionAction() { String x = parseAction("testVar = rcvd_evt.a; testVar2 = testVar;", TRANS_NO_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:20-20: Parameter ->a<- is not carried by signal noParms1\nline 1:23: expecting Semicolon, found 'testVar2'\nline 1:34: unexpected token: testVar\n", x); x = parseAction("testVar = param.a; testVar2 = testVar;", TRANS_NO_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:17-17: Parameter ->a<- is not carried by signal noParms1\nline 1:20: expecting Semicolon, found 'testVar2'\nline 1:31: unexpected token: testVar\n", x); } public void testOneParmTransitionActionParmExists() { String x = parseAction("testVar = rcvd_evt.a; testVar2 = testVar;", TRANS_ONE_SIGNAL_PARM); assertEquals("Unexpected error:", "", x); x = parseAction("testVar = param.a; testVar2 = testVar;", TRANS_ONE_SIGNAL_PARM); assertEquals("Unexpected error:", "", x); } public void testOneParmTransitionActionParmDoesNotExist() { String x = parseAction("testVar = rcvd_evt.b; testVar2 = testVar;", TRANS_ONE_SIGNAL_PARM); assertEquals("Unexpected error:", ":1:20-20: Parameter ->b<- is not carried by signal OneParm\nline 1:23: expecting Semicolon, found 'testVar2'\nline 1:34: unexpected token: testVar\n", x); x = parseAction("testVar = param.b; testVar2 = testVar;", TRANS_ONE_SIGNAL_PARM); assertEquals("Unexpected error:", ":1:17-17: Parameter ->b<- is not carried by signal OneParm\nline 1:20: expecting Semicolon, found 'testVar2'\nline 1:31: unexpected token: testVar\n", x); } public void testTwoParmTransitionActionParmsExist() { String x = parseAction("testVar1 = rcvd_evt.a; testVar2 = rcvd_evt.b; testVar3 = testVar1 + testVar2;", TRANS_TWO_SIGNAL_PARMS); assertEquals("Unexpected error:", "", x); x = parseAction("testVar1 = param.a; testVar2 = param.b; testVar3 = testVar1 + testVar2;", TRANS_TWO_SIGNAL_PARMS); assertEquals("Unexpected error:", "", x); } public void testTwoParmTransitionActionOneParmDoesNotExist() { String x = parseAction("testVar1 = rcvd_evt.a; testVar2 = rcvd_evt.c; testVar3 = testVar1 + testVar2;", TRANS_TWO_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:44-44: Parameter ->c<- is not carried by signal TwoParms1\nline 1:47: expecting Semicolon, found 'testVar3'\nline 1:58: unexpected token: testVar1\nline 1:69: unexpected token: testVar2\n", x); x = parseAction("testVar1 = param.a; testVar2 = param.c; testVar3 = testVar1 + testVar2;", TRANS_TWO_SIGNAL_PARMS); assertEquals("Unexpected error:", ":1:38-38: Parameter ->c<- is not carried by signal TwoParms1\nline 1:41: expecting Semicolon, found 'testVar3'\nline 1:52: unexpected token: testVar1\nline 1:63: unexpected token: testVar2\n", x); } private String parseAction(String stmts, int actNum) { Action_c act = acts[actNum]; OalLexer lexer = new OalLexer(new StringReader(stmts)); OalParser parser = new OalParser(modelRoot, lexer); parser.m_oal_context = new Oal_validate(DomainUtil.getDomain(act)); UUID actID = Gd_c.Null_unique_id(); act.setAction_semantics_internal(stmts); actID = act.getAct_id(); TransitionActionHome_c tah = TransitionActionHome_c. getOneSM_TAHOnR513(ActionHome_c.getOneSM_AHOnR514(act)); int opType = tah != null ? Oalconstants_c.TRANSITION_TYPE : Oalconstants_c.STATE_TYPE; try { parser.action(actID, opType); } catch (TokenStreamException e) { Block_c.Clearcurrentscope(modelRoot, parser.m_oal_context.m_act_id); if ( e instanceof TokenStreamRecognitionException ) { TokenStreamRecognitionException tsre = (TokenStreamRecognitionException)e; parser.reportError(tsre.recog); } else { fail("Token stream exception in parser"); } } catch (RecognitionException e) { Block_c.Clearcurrentscope(modelRoot, parser.m_oal_context.m_act_id); parser.reportError(e); } catch (InterruptedException ie){ } return parser.m_output; } Action_c [] acts = null; final int STATE_NO_INCOMING_TRANSITIONS = 0; final int STATE_NO_SIGNAL_PARMS = 1; final int STATE_NON_MATCHING_WITH_SUBSET_SIGNAL_PARMS = 2; final int STATE_NON_MATCHING_WITH_NO_SUBSET_SIGNAL_PARMS = 3; final int STATE_MATCHING_SIGNAL_PARMS = 4; final int STATE_ONE_NO_EVENT_TRANSITION = 5; final int STATE_ALL_NO_EVENT_TRANSITIONS = 6; final int TRANS_NO_SIGNAL = 7; final int TRANS_NO_SIGNAL_PARMS = 8; final int TRANS_ONE_SIGNAL_PARM = 9; final int TRANS_TWO_SIGNAL_PARMS = 10; String [] testStateNames = { "No Incoming Transitions", "No Signal Parameters", "Non Matching with Common Subset", "Non Matching with no Subset", "Matching Signal Parameters", "One No Event Transition", "All No Event Transitions"}; String [] testTransitionNames = { "No Event Assigned", "noParms1", "oneParm", "twoParms1"}; private void populateStateMachineActivityInstances() { acts = new Action_c[testStateNames.length + testTransitionNames.length]; // Get the test class from the model class ClassByNameQuery implements ClassQueryInterface_c { ClassByNameQuery(String p) { m_p = p; } private String m_p; public boolean evaluate(Object inst) { ModelClass_c selected = (ModelClass_c)inst; return selected.getName().equals(m_p); } } final String testClassName = "Signal Parameter Test"; ModelClass_c testClass = ModelClass_c.ModelClassInstance(modelRoot, new ClassByNameQuery(testClassName)); assertNotNull("Test Class '" + testClassName + "' not found in ComponentSyntaxTest model.", testClass); // Get all the test state actions . . . class StateByNameQuery implements ClassQueryInterface_c { StateByNameQuery(String p) { m_p = p; } private String m_p; public boolean evaluate(Object inst) { StateMachineState_c selected = (StateMachineState_c)inst; return selected.getName().equals(m_p); } } StateMachine_c sm = StateMachine_c.getOneSM_SMOnR517( ClassStateMachine_c.getOneSM_ASMOnR519(testClass)); for (int i=0; i < testStateNames.length; i++) { StateMachineState_c state = StateMachineState_c.getOneSM_STATEOnR501(sm, new StateByNameQuery(testStateNames[i])); assertNotNull("Could not find test state '" + testStateNames[i] + "'.", state ); Action_c act = Action_c.getOneSM_ACTOnR514( ActionHome_c.getOneSM_AHOnR513( MooreActionHome_c.getOneSM_MOAHOnR511(state))); assertNotNull("Could not find test action for state '" + testStateNames[i] + "'.", act ); acts[i] = act; } // Get all the test transition actions class TransitionByNameQuery implements ClassQueryInterface_c { TransitionByNameQuery(String p) { m_p = p; } private String m_p; public boolean evaluate(Object inst) { Transition_c selected = (Transition_c)inst; return selected.getName().contains(m_p); } } for (int i=0; i < testTransitionNames.length; i++) { Transition_c trans = Transition_c.getOneSM_TXNOnR505(sm, new TransitionByNameQuery(testTransitionNames[i])); assertNotNull("Could not find test transition '" + testTransitionNames[i] + "'.", trans ); Action_c act = Action_c.getOneSM_ACTOnR514( ActionHome_c.getOneSM_AHOnR513( TransitionActionHome_c.getOneSM_TAHOnR530(trans))); assertNotNull("Could not find test action for transition '" + testTransitionNames[i] + "'.", act ); acts[testStateNames.length + i] = act; } } }
package com.hubspot.jinjava.lib.tag.eager; import com.hubspot.jinjava.interpret.DeferredValue; import com.hubspot.jinjava.interpret.DeferredValueException; import com.hubspot.jinjava.interpret.InterpretException; import com.hubspot.jinjava.interpret.JinjavaInterpreter; import com.hubspot.jinjava.interpret.OutputTooBigException; import com.hubspot.jinjava.interpret.TemplateError; import com.hubspot.jinjava.interpret.TemplateSyntaxException; import com.hubspot.jinjava.lib.tag.ElseIfTag; import com.hubspot.jinjava.lib.tag.ElseTag; import com.hubspot.jinjava.lib.tag.IfTag; import com.hubspot.jinjava.tree.Node; import com.hubspot.jinjava.tree.TagNode; import com.hubspot.jinjava.util.EagerExpressionResolver.EagerExpressionResult; import com.hubspot.jinjava.util.EagerReconstructionUtils; import com.hubspot.jinjava.util.LengthLimitingStringBuilder; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; public class EagerIfTag extends EagerTagDecorator<IfTag> { public EagerIfTag() { super(new IfTag()); } public EagerIfTag(IfTag ifTag) { super(ifTag); } @Override public String interpret(TagNode tagNode, JinjavaInterpreter interpreter) { try { return getTag().interpret(tagNode, interpreter); } catch (DeferredValueException | TemplateSyntaxException e) { try { return EagerReconstructionUtils.wrapInAutoEscapeIfNeeded( eagerInterpret(tagNode, interpreter, e), interpreter ); } catch (OutputTooBigException e1) { interpreter.addError(TemplateError.fromOutputTooBigException(e1)); throw new DeferredValueException( String.format("Output too big for eager execution: %s", e1.getMessage()) ); } } } @Override public String eagerInterpret( TagNode tagNode, JinjavaInterpreter interpreter, InterpretException e ) { if (StringUtils.isBlank(tagNode.getHelpers())) { throw new TemplateSyntaxException( interpreter, tagNode.getMaster().getImage(), "Tag 'if' expects expression" ); } LengthLimitingStringBuilder result = new LengthLimitingStringBuilder( interpreter.getConfig().getMaxOutputSize() ); result.append( EagerReconstructionUtils .executeInChildContext( eagerInterpreter -> EagerExpressionResult.fromString( eagerRenderBranches(tagNode, eagerInterpreter, e) ), interpreter, false, false, true ) .asTemplateString() ); tagNode.getMaster().setRightTrimAfterEnd(false); result.append(EagerReconstructionUtils.reconstructEnd(tagNode)); return result.toString(); } public String eagerRenderBranches( TagNode tagNode, JinjavaInterpreter interpreter, InterpretException e ) { // line number of the last attempted resolveELExpression final int deferredLineNumber = interpreter.getLineNumber(); final int deferredPosition = interpreter.getPosition(); // If the branch is impossible, it should be removed. boolean definitelyDrop = shouldDropBranch( tagNode, interpreter, deferredLineNumber, deferredPosition ); // If an ("elseif") branch would definitely get executed, // change it to an "else" tag and drop all the subsequent branches. // We know this has to start as false otherwise IfTag would have chosen // the first branch. boolean definitelyExecuted = false; StringBuilder sb = new StringBuilder(); sb.append( getEagerImage( buildToken(tagNode, e, deferredLineNumber, deferredPosition), interpreter ) ); int branchStart = 0; int childrenSize = tagNode.getChildren().size(); while (branchStart < childrenSize) { int branchEnd = findNextElseToken(tagNode, branchStart); if (!definitelyDrop) { int finalBranchStart = branchStart; EagerExecutionResult result = EagerReconstructionUtils.executeInChildContext( eagerInterpreter -> EagerExpressionResult.fromString( evaluateBranch(tagNode, finalBranchStart, branchEnd, interpreter) ), interpreter, false, false, true ); sb.append(result.getResult()); resetBindingsForNextBranch(interpreter, result); } if (branchEnd >= childrenSize || definitelyExecuted) { break; } TagNode caseNode = (TagNode) tagNode.getChildren().get(branchEnd); definitelyDrop = caseNode.getName().equals(ElseIfTag.TAG_NAME) && shouldDropBranch(caseNode, interpreter, deferredLineNumber, deferredPosition); if (!definitelyDrop) { definitelyExecuted = caseNode.getName().equals(ElseTag.TAG_NAME) || isDefinitelyExecuted(caseNode, interpreter, deferredLineNumber); if (definitelyExecuted) { sb.append( String.format( "%s else %s", caseNode.getSymbols().getExpressionStartWithTag(), caseNode.getSymbols().getExpressionEndWithTag() ) ); } else { sb.append( getEagerImage( buildToken(caseNode, e, deferredLineNumber, deferredPosition), interpreter ) ); } } branchStart = branchEnd + 1; } return sb.toString(); } private void resetBindingsForNextBranch( JinjavaInterpreter interpreter, EagerExecutionResult result ) { Set<Entry<String, Object>> nonDeferredBindingsToRevert = result .getSpeculativeBindings() .entrySet() .stream() .filter( entry -> interpreter.getContext().containsKey(entry.getKey()) && !(interpreter.getContext().get(entry.getKey()) instanceof DeferredValue) ) .collect(Collectors.toSet()); if (!nonDeferredBindingsToRevert.isEmpty()) { if (!interpreter.getConfig().getExecutionMode().useEagerContextReverting()) { throw new DeferredValueException("Cannot revert value"); } nonDeferredBindingsToRevert.forEach( entry -> interpreter.getContext().put(entry.getKey(), entry.getValue()) ); } result .getSpeculativeBindings() .keySet() .stream() .filter( key -> interpreter.getContext().containsKey(key) && interpreter.getContext().get(key) instanceof DeferredValue ) .forEach( key -> { if ( ((DeferredValue) interpreter.getContext().get(key)).getOriginalValue() != null ) { interpreter .getContext() .put( key, ((DeferredValue) interpreter.getContext().get(key)).getOriginalValue() ); } } ); } private String evaluateBranch( TagNode tagNode, int startIdx, int endIdx, JinjavaInterpreter interpreter ) { StringBuilder sb = new StringBuilder(); for (int i = startIdx; i < endIdx; i++) { Node child = tagNode.getChildren().get(i); sb.append(child.render(interpreter).getValue()); } return sb.toString(); } private int findNextElseToken(TagNode tagNode, int startIdx) { int i; for (i = startIdx; i < tagNode.getChildren().size(); i++) { Node childNode = tagNode.getChildren().get(i); if ( (TagNode.class.isAssignableFrom(childNode.getClass())) && childNode.getName().equals(ElseIfTag.TAG_NAME) || childNode.getName().equals(ElseTag.TAG_NAME) ) { return i; } } return i; } private boolean shouldDropBranch( TagNode tagNode, JinjavaInterpreter eagerInterpreter, int deferredLineNumber, int deferredPosition ) { if (deferredLineNumber > tagNode.getLineNumber()) { return true; // Deferred value thrown on a later branch so we can drop this one. } else if ( deferredLineNumber == tagNode.getLineNumber() && deferredPosition >= tagNode.getStartPosition() ) { return deferredPosition > tagNode.getStartPosition(); // false if they are equal } // the tag node is after the deferred exception location try { return !getTag().isPositiveIfElseNode(tagNode, eagerInterpreter); } catch (DeferredValueException e) { return false; } } private boolean isDefinitelyExecuted( TagNode tagNode, JinjavaInterpreter eagerInterpreter, int deferredLineNumber ) { if (deferredLineNumber == tagNode.getLineNumber()) { return false; // Deferred value thrown when checking if this branch would be executed. } try { return getTag().isPositiveIfElseNode(tagNode, eagerInterpreter); } catch (DeferredValueException e) { return false; } } }
package com.example.android.sunshine.app; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.v4.app.Fragment; import android.text.format.Time; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.Toast; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; /** * Created by Alucard on 6/12/2015. */ public class ForecastFragment extends Fragment { ArrayAdapter<String> mForecastAdapter; public ForecastFragment() { } // ============= Fragment Initialisation ============= @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Enable options menu on fragment setHasOptionsMenu(true); } // ============= Menu Initialisation =================== @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // Inflate the menu; this adds items to the action bar if it is present. inflater.inflate(R.menu.forecastfragment, menu); } // ============= Menu Item actions ================== @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_refresh) { updateWeather(); return true; } else if (id == R.id.action_preferred_location){ openInMaps(); } return super.onOptionsItemSelected(item); } private void openInMaps() { // get preferences SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(getActivity().getApplicationContext()); // build location from preferences Uri location = Uri.parse("geo:0,0?q=" + sharedPrefs.getString("location", "94043")); // build intent Intent intent = new Intent(Intent.ACTION_VIEW, location); // make sure theres an app available to respond by checking list of apps that can handle it is > 0 PackageManager packageManager = getActivity().getPackageManager(); List activities = packageManager.queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY); boolean isIntentSafe = activities.size() > 0; // start the intent only if its safe if(isIntentSafe){ startActivity(intent); }else{ Toast.makeText(getActivity().getApplicationContext(), "Not apps to launch activity", Toast.LENGTH_LONG).show(); } } @Override public void onStart(){ super.onStart(); updateWeather(); } private void updateWeather() { FetchWeatherTask weatherTask = new FetchWeatherTask(); // get settings SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(getActivity().getApplicationContext()); // get city / zip code from settings String location = sharedPrefs.getString("location","94043"); // get unit type String unit = sharedPrefs.getString("unit","metric"); // run task and get weather for specified location weatherTask.execute(location, unit); } // ============= UI Initialisation =================== @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Create adapter of type ArrayAdapter because the data is stored in an ArrayList mForecastAdapter = new ArrayAdapter<String>(getActivity(), // ID of list item layout R.layout.list_item_forecast, // ID of the textview to populate R.id.list_item_forecast_textview, // data to use new ArrayList<String>()); View rootView = inflater.inflate(R.layout.fragment_main, container, false); // Get ListView reference then set adapter on it ListView listView = (ListView) rootView.findViewById(R.id.listview_forecast); listView.setAdapter(mForecastAdapter); // set List view click listener listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { Context context = getActivity().getApplicationContext(); // Toast.makeText(context,parent.getItemAtPosition(position).toString(),Toast.LENGTH_SHORT).show(); String forecast = parent.getItemAtPosition(position).toString(); Intent intent = new Intent(context, DetailActivity.class) .putExtra(Intent.EXTRA_TEXT, forecast); // key-value pair startActivity(intent); } }); return rootView; } public class FetchWeatherTask extends AsyncTask<String, Void, String[]> { private final String LOG_TAG = FetchWeatherTask.class.getSimpleName(); // ============ PARSE DATA FROM SERVER ================================= /* The date/time conversion code is going to be moved outside the asynctask later, * so for convenience we're breaking it out into its own method now. */ private String getReadableDateString(long time) { // Because the API returns a unix timestamp (measured in seconds), // it must be converted to milliseconds in order to be converted to valid date. SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd"); return shortenedDateFormat.format(time); } /** * Prepare the weather high/lows for presentation. */ private String formatHighLows(double high, double low) { // For presentation, assume the user doesn't care about tenths of a degree. long roundedHigh = Math.round(high); long roundedLow = Math.round(low); String highLowStr = roundedHigh + "/" + roundedLow; return highLowStr; } /** * Take the String representing the complete forecast in JSON Format and * pull out the data we need to construct the Strings needed for the wireframes. * <p/> * Fortunately parsing is easy: constructor takes the JSON string and converts it * into an Object hierarchy for us. */ private String[] getWeatherDataFromJson(String forecastJsonStr, int numDays) throws JSONException { // These are the names of the JSON objects that need to be extracted. final String OWM_LIST = "list"; final String OWM_WEATHER = "weather"; final String OWM_TEMPERATURE = "temp"; final String OWM_MAX = "max"; final String OWM_MIN = "min"; final String OWM_DESCRIPTION = "main"; JSONObject forecastJson = new JSONObject(forecastJsonStr); JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST); // OWM returns daily forecasts based upon the local time of the city that is being // asked for, which means that we need to know the GMT offset to translate this data // properly. // Since this data is also sent in-order and the first day is always the // current day, we're going to take advantage of that to get a nice // normalized UTC date for all of our weather. Time dayTime = new Time(); dayTime.setToNow(); // we start at the day returned by local time. Otherwise this is a mess. int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff); // now we work exclusively in UTC dayTime = new Time(); String[] resultStrs = new String[numDays]; for (int i = 0; i < weatherArray.length(); i++) { // For now, using the format "Day, description, hi/low" String day; String description; String highAndLow; // Get the JSON object representing the day JSONObject dayForecast = weatherArray.getJSONObject(i); // The date/time is returned as a long. We need to convert that // into something human-readable, since most people won't read "1400356800" as // "this saturday". long dateTime; // Cheating to convert this to UTC time, which is what we want anyhow dateTime = dayTime.setJulianDay(julianStartDay + i); day = getReadableDateString(dateTime); // description is in a child array called "weather", which is 1 element long. JSONObject weatherObject = dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0); description = weatherObject.getString(OWM_DESCRIPTION); // Temperatures are in a child object called "temp". Try not to name variables // "temp" when working with temperature. It confuses everybody. JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE); double high = temperatureObject.getDouble(OWM_MAX); double low = temperatureObject.getDouble(OWM_MIN); highAndLow = formatHighLows(high, low); resultStrs[i] = day + " - " + description + " - " + highAndLow; } return resultStrs; } @Override protected String[] doInBackground(String... params) { /* ================ Start HTTP request snippet ============================== */ // These two need to be declared outside the try/catch // so that they can be closed in the finally block. HttpURLConnection urlConnection = null; BufferedReader reader = null; // Will contain the raw JSON response as a string. String forecastJsonStr = null; try { // Construct the URL for the OpenWeatherMap query // Possible parameters are avaiable at OWM's forecast API page, at // http://openweathermap.org/API#forecast Uri.Builder uriBuilder = new Uri.Builder(); uriBuilder.scheme("http") .authority("api.openweathermap.org") .appendPath("data") .appendPath("2.5") .appendPath("forecast") .appendPath("daily") .appendQueryParameter("q", params[0]) .appendQueryParameter("mode", "json") .appendQueryParameter("units", params[1]) .appendQueryParameter("cnt", "7") .build(); URL url = new URL(uriBuilder.toString()); // Create the request to OpenWeatherMap, and open the connection urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.connect(); // Read the input stream into a String InputStream inputStream = urlConnection.getInputStream(); StringBuffer buffer = new StringBuffer(); if (inputStream == null) { // Nothing to do. return null; } reader = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = reader.readLine()) != null) { // Since it's JSON, adding a newline isn't necessary (it won't affect parsing) // But it does make debugging a *lot* easier if you print out the completed // buffer for debugging. buffer.append(line + "\n"); } if (buffer.length() == 0) { // Stream was empty. No point in parsing. return null; } // Save JSON as string forecastJsonStr = buffer.toString(); // Log.v(LOG_TAG, "Forecast string: " + forecastJsonStr); } catch (IOException e) { Log.e(LOG_TAG, "Error ", e); // If the code didn't successfully get the weather data, there's no point in attemping // to parse it. return null; } finally { if (urlConnection != null) { urlConnection.disconnect(); } if (reader != null) { try { reader.close(); } catch (final IOException e) { Log.e(LOG_TAG, "Error closing stream", e); } } try { return getWeatherDataFromJson(forecastJsonStr, 7); } catch (JSONException e) { Log.e(LOG_TAG, e.getMessage(), e); e.printStackTrace(); } return null; } } @Override protected void onPostExecute(String[] result) { mForecastAdapter.clear(); for (String dayForecastStr : result) { mForecastAdapter.add(dayForecastStr); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.client.cli; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.EnumMap; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Scanner; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.time.DateFormatUtils; import org.apache.commons.lang.time.DurationFormatUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueStatistics; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.codehaus.jettison.json.JSONObject; public class TopCLI extends YarnCLI { private static final Log LOG = LogFactory.getLog(TopCLI.class); private String CLEAR = "\u001b[2J"; private String CLEAR_LINE = "\u001b[2K"; private String SET_CURSOR_HOME = "\u001b[H"; private String CHANGE_BACKGROUND = "\u001b[7m"; private String RESET_BACKGROUND = "\u001b[0m"; private String SET_CURSOR_LINE_6_COLUMN_0 = "\u001b[6;0f"; // guava cache for getapplications call protected Cache<GetApplicationsRequest, List<ApplicationReport>> applicationReportsCache = CacheBuilder.newBuilder().maximumSize(1000) .expireAfterWrite(5, TimeUnit.SECONDS).build(); enum DisplayScreen { TOP, HELP, SORT, FIELDS } enum Columns { // in the order in which they should be displayed APPID, USER, TYPE, QUEUE, PRIORITY, CONT, RCONT, VCORES, RVCORES, MEM, RMEM, VCORESECS, MEMSECS, PROGRESS, TIME, NAME } static class ColumnInformation { String header; String format; boolean display; // should we show this field or not String description; String key; // key to press for sorting/toggling field public ColumnInformation(String header, String format, boolean display, String description, String key) { this.header = header; this.format = format; this.display = display; this.description = description; this.key = key; } } private static class ApplicationInformation { final String appid; final String user; final String type; final int priority; final int usedContainers; final int reservedContainers; final long usedMemory; final long reservedMemory; final int usedVirtualCores; final int reservedVirtualCores; final int attempts; final float progress; final String state; long runningTime; final String time; final String name; final int nodes; final String queue; final long memorySeconds; final long vcoreSeconds; final EnumMap<Columns, String> displayStringsMap; ApplicationInformation(ApplicationReport appReport) { displayStringsMap = new EnumMap<>(Columns.class); appid = appReport.getApplicationId().toString(); displayStringsMap.put(Columns.APPID, appid); user = appReport.getUser(); displayStringsMap.put(Columns.USER, user); type = appReport.getApplicationType().toLowerCase(); displayStringsMap.put(Columns.TYPE, type); state = appReport.getYarnApplicationState().toString().toLowerCase(); name = appReport.getName(); displayStringsMap.put(Columns.NAME, name); queue = appReport.getQueue(); displayStringsMap.put(Columns.QUEUE, queue); priority = 0; usedContainers = appReport.getApplicationResourceUsageReport().getNumUsedContainers(); displayStringsMap.put(Columns.CONT, String.valueOf(usedContainers)); reservedContainers = appReport.getApplicationResourceUsageReport() .getNumReservedContainers(); displayStringsMap.put(Columns.RCONT, String.valueOf(reservedContainers)); usedVirtualCores = appReport.getApplicationResourceUsageReport().getUsedResources() .getVirtualCores(); displayStringsMap.put(Columns.VCORES, String.valueOf(usedVirtualCores)); usedMemory = appReport.getApplicationResourceUsageReport().getUsedResources() .getMemory() / 1024; displayStringsMap.put(Columns.MEM, String.valueOf(usedMemory) + "G"); reservedVirtualCores = appReport.getApplicationResourceUsageReport().getReservedResources() .getVirtualCores(); displayStringsMap.put(Columns.RVCORES, String.valueOf(reservedVirtualCores)); reservedMemory = appReport.getApplicationResourceUsageReport().getReservedResources() .getMemory() / 1024; displayStringsMap.put(Columns.RMEM, String.valueOf(reservedMemory) + "G"); attempts = appReport.getCurrentApplicationAttemptId().getAttemptId(); nodes = 0; runningTime = Time.now() - appReport.getStartTime(); time = DurationFormatUtils.formatDuration(runningTime, "dd:HH:mm"); displayStringsMap.put(Columns.TIME, String.valueOf(time)); progress = appReport.getProgress() * 100; displayStringsMap.put(Columns.PROGRESS, String.format("%.2f", progress)); // store in GBSeconds memorySeconds = appReport.getApplicationResourceUsageReport().getMemorySeconds() / 1024; displayStringsMap.put(Columns.MEMSECS, String.valueOf(memorySeconds)); vcoreSeconds = appReport.getApplicationResourceUsageReport().getVcoreSeconds(); displayStringsMap.put(Columns.VCORESECS, String.valueOf(vcoreSeconds)); } } // all the sort comparators public static final Comparator<ApplicationInformation> AppIDComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.appid.compareTo(a2.appid); } }; public static final Comparator<ApplicationInformation> UserComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.user.compareTo(a2.user); } }; public static final Comparator<ApplicationInformation> AppTypeComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.type.compareTo(a2.type); } }; public static final Comparator<ApplicationInformation> QueueNameComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.queue.compareTo(a2.queue); } }; public static final Comparator<ApplicationInformation> UsedContainersComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.usedContainers - a2.usedContainers; } }; public static final Comparator<ApplicationInformation> ReservedContainersComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.reservedContainers - a2.reservedContainers; } }; public static final Comparator<ApplicationInformation> UsedMemoryComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return Long.valueOf(a1.usedMemory).compareTo(a2.usedMemory); } }; public static final Comparator<ApplicationInformation> ReservedMemoryComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return Long.valueOf(a1.reservedMemory).compareTo(a2.reservedMemory); } }; public static final Comparator<ApplicationInformation> UsedVCoresComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.usedVirtualCores - a2.usedVirtualCores; } }; public static final Comparator<ApplicationInformation> ReservedVCoresComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.reservedVirtualCores - a2.reservedVirtualCores; } }; public static final Comparator<ApplicationInformation> VCoreSecondsComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return Long.valueOf(a1.vcoreSeconds).compareTo(a2.vcoreSeconds); } }; public static final Comparator<ApplicationInformation> MemorySecondsComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return Long.valueOf(a1.memorySeconds).compareTo(a2.memorySeconds); } }; public static final Comparator<ApplicationInformation> ProgressComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return Float.compare(a1.progress, a2.progress); } }; public static final Comparator<ApplicationInformation> RunningTimeComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return Long.valueOf(a1.runningTime).compareTo(a2.runningTime); } }; public static final Comparator<ApplicationInformation> AppNameComparator = new Comparator<ApplicationInformation>() { @Override public int compare(ApplicationInformation a1, ApplicationInformation a2) { return a1.name.compareTo(a2.name); } }; private static class NodesInformation { int totalNodes; int runningNodes; int unhealthyNodes; int decommissionedNodes; int lostNodes; int rebootedNodes; } private static class QueueMetrics { long appsSubmitted; long appsRunning; long appsPending; long appsCompleted; long appsKilled; long appsFailed; long activeUsers; long availableMemoryGB; long allocatedMemoryGB; long pendingMemoryGB; long reservedMemoryGB; long availableVCores; long allocatedVCores; long pendingVCores; long reservedVCores; } private class KeyboardMonitor extends Thread { public void run() { Scanner keyboard = new Scanner(System.in, "UTF-8"); while (runKeyboardMonitor.get()) { String in = keyboard.next(); try { if (displayScreen == DisplayScreen.SORT) { handleSortScreenKeyPress(in); } else if (displayScreen == DisplayScreen.TOP) { handleTopScreenKeyPress(in); } else if (displayScreen == DisplayScreen.FIELDS) { handleFieldsScreenKeyPress(in); } else { handleHelpScreenKeyPress(); } } catch (Exception e) { LOG.error("Caught exception", e); } } } } long refreshPeriod = 3 * 1000; int terminalWidth = -1; int terminalHeight = -1; String appsHeader; boolean ascendingSort; long rmStartTime; Comparator<ApplicationInformation> comparator; Options opts; CommandLine cliParser; Set<String> queues; Set<String> users; Set<String> types; DisplayScreen displayScreen; AtomicBoolean showingTopScreen; AtomicBoolean runMainLoop; AtomicBoolean runKeyboardMonitor; final Object lock = new Object(); String currentSortField; Map<String, Columns> keyFieldsMap; List<String> sortedKeys; Thread displayThread; final EnumMap<Columns, ColumnInformation> columnInformationEnumMap; public TopCLI() throws IOException, InterruptedException { super(); queues = new HashSet<>(); users = new HashSet<>(); types = new HashSet<>(); comparator = UsedContainersComparator; ascendingSort = false; displayScreen = DisplayScreen.TOP; showingTopScreen = new AtomicBoolean(); showingTopScreen.set(true); currentSortField = "c"; keyFieldsMap = new HashMap<>(); runKeyboardMonitor = new AtomicBoolean(); runMainLoop = new AtomicBoolean(); runKeyboardMonitor.set(true); runMainLoop.set(true); displayThread = Thread.currentThread(); columnInformationEnumMap = new EnumMap<>(Columns.class); generateColumnInformationMap(); generateKeyFieldsMap(); sortedKeys = new ArrayList<>(keyFieldsMap.keySet()); Collections.sort(sortedKeys); setTerminalSequences(); } public static void main(String[] args) throws Exception { TopCLI topImp = new TopCLI(); topImp.setSysOutPrintStream(System.out); topImp.setSysErrPrintStream(System.err); int res = ToolRunner.run(topImp, args); topImp.stop(); System.exit(res); } @Override public int run(String[] args) throws Exception { try { parseOptions(args); if (cliParser.hasOption("help")) { printUsage(); return 0; } } catch (Exception e) { LOG.error("Unable to parse options", e); return 1; } setAppsHeader(); Thread keyboardMonitor = new KeyboardMonitor(); keyboardMonitor.start(); rmStartTime = getRMStartTime(); clearScreen(); while (runMainLoop.get()) { if (displayScreen == DisplayScreen.TOP) { showTopScreen(); try { Thread.sleep(refreshPeriod); } catch (InterruptedException ie) { break; } } else if (displayScreen == DisplayScreen.SORT) { showSortScreen(); Thread.sleep(100); } else if (displayScreen == DisplayScreen.FIELDS) { showFieldsScreen(); Thread.sleep(100); } if (rmStartTime == -1) { // we were unable to get it the first time, try again rmStartTime = getRMStartTime(); } } clearScreen(); return 0; } private void parseOptions(String[] args) throws ParseException, IOException, InterruptedException { // Command line options opts = new Options(); opts.addOption("queues", true, "Comma separated list of queues to restrict applications"); opts.addOption("users", true, "Comma separated list of users to restrict applications"); opts.addOption("types", true, "Comma separated list of types to restrict" + " applications, case sensitive(though the display is lower case)"); opts.addOption("cols", true, "Number of columns on the terminal"); opts.addOption("rows", true, "Number of rows on the terminal"); opts.addOption("help", false, "Print usage; for help while the tool is running press 'h' + Enter"); opts.addOption("delay", true, "The refresh delay(in seconds), default is 3 seconds"); cliParser = new GnuParser().parse(opts, args); if (cliParser.hasOption("queues")) { String clqueues = cliParser.getOptionValue("queues"); String[] queuesArray = clqueues.split(","); queues.addAll(Arrays.asList(queuesArray)); } if (cliParser.hasOption("users")) { String clusers = cliParser.getOptionValue("users"); users.addAll(Arrays.asList(clusers.split(","))); } if (cliParser.hasOption("types")) { String cltypes = cliParser.getOptionValue("types"); types.addAll(Arrays.asList(cltypes.split(","))); } if (cliParser.hasOption("cols")) { terminalWidth = Integer.parseInt(cliParser.getOptionValue("cols")); } else { setTerminalWidth(); } if (cliParser.hasOption("rows")) { terminalHeight = Integer.parseInt(cliParser.getOptionValue("rows")); } else { setTerminalHeight(); } if (cliParser.hasOption("delay")) { int delay = Integer.parseInt(cliParser.getOptionValue("delay")); if (delay < 1) { LOG.warn("Delay set too low, using default"); } else { refreshPeriod = delay * 1000; } } } private void printUsage() { new HelpFormatter().printHelp("yarn top", opts); System.out.println(""); System.out.println("'yarn top' is a tool to help cluster administrators" + " understand cluster usage better."); System.out.println("Some notes about the implementation:"); System.out.println(" 1. Fetching information for all the apps is an" + " expensive call for the RM."); System.out.println(" To prevent a performance degradation, the results" + " are cached for 5 seconds,"); System.out.println(" irrespective of the delay value. Information about" + " the NodeManager(s) and queue"); System.out.println(" utilization stats are fetched at the specified" + " delay interval. Once we have a"); System.out.println(" better understanding of the performance impact," + " this might change."); System.out.println(" 2. Since the tool is implemented in Java, you must" + " hit Enter for key presses to"); System.out.println(" be processed."); } private void setAppsHeader() { List<String> formattedStrings = new ArrayList<>(); for (EnumMap.Entry<Columns, ColumnInformation> entry : columnInformationEnumMap.entrySet()) { if (entry.getValue().display) { formattedStrings.add(String.format(entry.getValue().format, entry.getValue().header)); } } appsHeader = StringUtils.join(formattedStrings.toArray(), " "); if (appsHeader.length() > terminalWidth) { appsHeader = appsHeader.substring(0, terminalWidth - System.lineSeparator().length()); } else { appsHeader += StringUtils.repeat(" ", terminalWidth - appsHeader.length() - System.lineSeparator().length()); } appsHeader += System.lineSeparator(); } private void setTerminalWidth() throws IOException, InterruptedException { if (terminalWidth != -1) { return; } String[] command = { "tput", "cols" }; String op = getCommandOutput(command).trim(); try { terminalWidth = Integer.parseInt(op); } catch (NumberFormatException ne) { LOG.warn("Couldn't determine terminal width, setting to 80", ne); terminalWidth = 80; } } private void setTerminalHeight() throws IOException, InterruptedException { if (terminalHeight != -1) { return; } String[] command = { "tput", "lines" }; String op = getCommandOutput(command).trim(); try { terminalHeight = Integer.parseInt(op); } catch (NumberFormatException ne) { LOG.warn("Couldn't determine terminal height, setting to 24", ne); terminalHeight = 24; } } protected void setTerminalSequences() throws IOException, InterruptedException { String[] tput_cursor_home = { "tput", "cup", "0", "0" }; String[] tput_clear = { "tput", "clear" }; String[] tput_clear_line = { "tput", "el" }; String[] tput_set_cursor_line_6_column_0 = { "tput", "cup", "5", "0" }; String[] tput_change_background = { "tput", "smso" }; String[] tput_reset_background = { "tput", "rmso" }; SET_CURSOR_HOME = getCommandOutput(tput_cursor_home); CLEAR = getCommandOutput(tput_clear); CLEAR_LINE = getCommandOutput(tput_clear_line); SET_CURSOR_LINE_6_COLUMN_0 = getCommandOutput(tput_set_cursor_line_6_column_0); CHANGE_BACKGROUND = getCommandOutput(tput_change_background); RESET_BACKGROUND = getCommandOutput(tput_reset_background); } private void generateColumnInformationMap() { columnInformationEnumMap.put(Columns.APPID, new ColumnInformation( "APPLICATIONID", "%31s", true, "Application Id", "a")); columnInformationEnumMap.put(Columns.USER, new ColumnInformation("USER", "%-10s", true, "Username", "u")); columnInformationEnumMap.put(Columns.TYPE, new ColumnInformation("TYPE", "%10s", true, "Application type", "t")); columnInformationEnumMap.put(Columns.QUEUE, new ColumnInformation("QUEUE", "%10s", true, "Application queue", "q")); columnInformationEnumMap.put(Columns.CONT, new ColumnInformation("#CONT", "%7s", true, "Number of containers", "c")); columnInformationEnumMap.put(Columns.RCONT, new ColumnInformation("#RCONT", "%7s", true, "Number of reserved containers", "r")); columnInformationEnumMap.put(Columns.VCORES, new ColumnInformation( "VCORES", "%7s", true, "Allocated vcores", "v")); columnInformationEnumMap.put(Columns.RVCORES, new ColumnInformation( "RVCORES", "%7s", true, "Reserved vcores", "o")); columnInformationEnumMap.put(Columns.MEM, new ColumnInformation("MEM", "%7s", true, "Allocated memory", "m")); columnInformationEnumMap.put(Columns.RMEM, new ColumnInformation("RMEM", "%7s", true, "Reserved memory", "w")); columnInformationEnumMap.put(Columns.VCORESECS, new ColumnInformation( "VCORESECS", "%10s", true, "Vcore seconds", "s")); columnInformationEnumMap.put(Columns.MEMSECS, new ColumnInformation( "MEMSECS", "%10s", true, "Memory seconds(in GBseconds)", "y")); columnInformationEnumMap.put(Columns.PROGRESS, new ColumnInformation( "%PROGR", "%6s", true, "Progress(percentage)", "p")); columnInformationEnumMap.put(Columns.TIME, new ColumnInformation("TIME", "%10s", true, "Running time", "i")); columnInformationEnumMap.put(Columns.NAME, new ColumnInformation("NAME", "%s", true, "Application name", "n")); } private void generateKeyFieldsMap() { for (EnumMap.Entry<Columns, ColumnInformation> entry : columnInformationEnumMap.entrySet()) { keyFieldsMap.put(entry.getValue().key, entry.getKey()); } } protected NodesInformation getNodesInfo() { NodesInformation nodeInfo = new NodesInformation(); YarnClusterMetrics yarnClusterMetrics; try { yarnClusterMetrics = client.getYarnClusterMetrics(); } catch (IOException ie) { LOG.error("Unable to fetch cluster metrics", ie); return nodeInfo; } catch (YarnException ye) { LOG.error("Unable to fetch cluster metrics", ye); return nodeInfo; } nodeInfo.decommissionedNodes = yarnClusterMetrics.getNumDecommissionedNodeManagers(); nodeInfo.totalNodes = yarnClusterMetrics.getNumNodeManagers(); nodeInfo.runningNodes = yarnClusterMetrics.getNumActiveNodeManagers(); nodeInfo.lostNodes = yarnClusterMetrics.getNumLostNodeManagers(); nodeInfo.unhealthyNodes = yarnClusterMetrics.getNumUnhealthyNodeManagers(); nodeInfo.rebootedNodes = yarnClusterMetrics.getNumRebootedNodeManagers(); return nodeInfo; } protected QueueMetrics getQueueMetrics() { QueueMetrics queueMetrics = new QueueMetrics(); List<QueueInfo> queuesInfo; if (queues.isEmpty()) { try { queuesInfo = client.getRootQueueInfos(); } catch (Exception ie) { LOG.error("Unable to get queue information", ie); return queueMetrics; } } else { queuesInfo = new ArrayList<>(); for (String queueName : queues) { try { QueueInfo qInfo = client.getQueueInfo(queueName); queuesInfo.add(qInfo); } catch (Exception ie) { LOG.error("Unable to get queue information", ie); return queueMetrics; } } } for (QueueInfo childInfo : queuesInfo) { QueueStatistics stats = childInfo.getQueueStatistics(); if (stats != null) { queueMetrics.appsSubmitted += stats.getNumAppsSubmitted(); queueMetrics.appsRunning += stats.getNumAppsRunning(); queueMetrics.appsPending += stats.getNumAppsPending(); queueMetrics.appsCompleted += stats.getNumAppsCompleted(); queueMetrics.appsKilled += stats.getNumAppsKilled(); queueMetrics.appsFailed += stats.getNumAppsFailed(); queueMetrics.activeUsers += stats.getNumActiveUsers(); queueMetrics.availableMemoryGB += stats.getAvailableMemoryMB(); queueMetrics.allocatedMemoryGB += stats.getAllocatedMemoryMB(); queueMetrics.pendingMemoryGB += stats.getPendingMemoryMB(); queueMetrics.reservedMemoryGB += stats.getReservedMemoryMB(); queueMetrics.availableVCores += stats.getAvailableVCores(); queueMetrics.allocatedVCores += stats.getAllocatedVCores(); queueMetrics.pendingVCores += stats.getPendingVCores(); queueMetrics.reservedVCores += stats.getReservedVCores(); } } queueMetrics.availableMemoryGB = queueMetrics.availableMemoryGB / 1024; queueMetrics.allocatedMemoryGB = queueMetrics.allocatedMemoryGB / 1024; queueMetrics.pendingMemoryGB = queueMetrics.pendingMemoryGB / 1024; queueMetrics.reservedMemoryGB = queueMetrics.reservedMemoryGB / 1024; return queueMetrics; } long getRMStartTime() { try { URL url = new URL("http://" + client.getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS) + "/ws/v1/cluster/info"); URLConnection conn = url.openConnection(); conn.connect(); InputStream in = conn.getInputStream(); String encoding = conn.getContentEncoding(); encoding = encoding == null ? "UTF-8" : encoding; String body = IOUtils.toString(in, encoding); JSONObject obj = new JSONObject(body); JSONObject clusterInfo = obj.getJSONObject("clusterInfo"); return clusterInfo.getLong("startedOn"); } catch (Exception e) { LOG.error("Could not fetch RM start time", e); } return -1; } String getHeader(QueueMetrics queueMetrics, NodesInformation nodes) { StringBuilder ret = new StringBuilder(); String queue = "root"; if (!queues.isEmpty()) { queue = StringUtils.join(queues, ","); } long now = Time.now(); long uptime = now - rmStartTime; long days = TimeUnit.MILLISECONDS.toDays(uptime); long hours = TimeUnit.MILLISECONDS.toHours(uptime) - TimeUnit.DAYS.toHours(TimeUnit.MILLISECONDS.toDays(uptime)); long minutes = TimeUnit.MILLISECONDS.toMinutes(uptime) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(uptime)); String uptimeStr = String.format("%dd, %d:%d", days, hours, minutes); String currentTime = DateFormatUtils.ISO_TIME_NO_T_FORMAT.format(now); ret.append(CLEAR_LINE); ret.append(limitLineLength(String.format( "YARN top - %s, up %s, %d active users, queue(s): %s%n", currentTime, uptimeStr, queueMetrics.activeUsers, queue), terminalWidth, true)); ret.append(CLEAR_LINE); ret.append(limitLineLength(String.format( "NodeManager(s): %d total, %d active, %d unhealthy, %d decommissioned," + " %d lost, %d rebooted%n", nodes.totalNodes, nodes.runningNodes, nodes.unhealthyNodes, nodes.decommissionedNodes, nodes.lostNodes, nodes.rebootedNodes), terminalWidth, true)); ret.append(CLEAR_LINE); ret.append(limitLineLength(String.format( "Queue(s) Applications: %d running, %d submitted, %d pending," + " %d completed, %d killed, %d failed%n", queueMetrics.appsRunning, queueMetrics.appsSubmitted, queueMetrics.appsPending, queueMetrics.appsCompleted, queueMetrics.appsKilled, queueMetrics.appsFailed), terminalWidth, true)); ret.append(CLEAR_LINE); ret.append(limitLineLength(String.format("Queue(s) Mem(GB): %d available," + " %d allocated, %d pending, %d reserved%n", queueMetrics.availableMemoryGB, queueMetrics.allocatedMemoryGB, queueMetrics.pendingMemoryGB, queueMetrics.reservedMemoryGB), terminalWidth, true)); ret.append(CLEAR_LINE); ret.append(limitLineLength(String.format("Queue(s) VCores: %d available," + " %d allocated, %d pending, %d reserved%n", queueMetrics.availableVCores, queueMetrics.allocatedVCores, queueMetrics.pendingVCores, queueMetrics.reservedVCores), terminalWidth, true)); return ret.toString(); } String getPrintableAppInformation(List<ApplicationInformation> appsInfo) { StringBuilder ret = new StringBuilder(); int limit = terminalHeight - 8; List<String> columns = new ArrayList<>(); for (int i = 0; i < limit; ++i) { ret.append(CLEAR_LINE); if(i < appsInfo.size()) { ApplicationInformation appInfo = appsInfo.get(i); columns.clear(); for (EnumMap.Entry<Columns, ColumnInformation> entry : columnInformationEnumMap.entrySet()) { if (entry.getValue().display) { String value = ""; if (appInfo.displayStringsMap.containsKey(entry.getKey())) { value = appInfo.displayStringsMap.get(entry.getKey()); } columns.add(String.format(entry.getValue().format, value)); } } ret.append(limitLineLength( (StringUtils.join(columns.toArray(), " ") + System.lineSeparator()), terminalWidth, true)); } else { ret.append(System.lineSeparator()); } } return ret.toString(); } protected void clearScreen() { System.out.print(CLEAR); System.out.flush(); } protected void clearScreenWithoutScroll() { System.out.print(SET_CURSOR_HOME); for(int i = 0; i < terminalHeight; ++i) { System.out.println(CLEAR_LINE); } } protected void printHeader(String header) { System.out.print(SET_CURSOR_HOME); System.out.print(header); System.out.println(""); } protected void printApps(String appInfo) { System.out.print(CLEAR_LINE); System.out.print(CHANGE_BACKGROUND + appsHeader + RESET_BACKGROUND); System.out.print(appInfo); } private void showHelpScreen() { synchronized (lock) { if (!showingTopScreen.get()) { // we've already printed the help screen return; } showingTopScreen.set(false); clearScreenWithoutScroll(); System.out.print(SET_CURSOR_HOME); System.out.println("Help for yarn top."); System.out.println("Delay: " + (refreshPeriod / 1000) + " secs; Secure mode: " + UserGroupInformation.isSecurityEnabled()); System.out.println(""); System.out.println(" s + Enter: Select sort field"); System.out.println(" f + Enter: Select fields to display"); System.out.println(" R + Enter: Reverse current sort order"); System.out.println(" h + Enter: Display this screen"); System.out.println(" q + Enter: Quit"); System.out.println(""); System.out.println("Press any key followed by Enter to continue"); } } private void showSortScreen() { synchronized (lock) { showingTopScreen.set(false); System.out.print(SET_CURSOR_HOME); System.out.println(CLEAR_LINE + "Current Sort Field: " + currentSortField); System.out.println(CLEAR_LINE + "Select sort field via letter followed by" + " Enter, type any other key followed by Enter to return"); System.out.println(CLEAR_LINE); for (String key : sortedKeys) { String prefix = " "; if (key.equals(currentSortField)) { prefix = "*"; } ColumnInformation value = columnInformationEnumMap.get(keyFieldsMap.get(key)); System.out.print(CLEAR_LINE); System.out.println(String.format("%s %s: %-15s = %s", prefix, key, value.header, value.description)); } } } protected void showFieldsScreen() { synchronized (lock) { showingTopScreen.set(false); System.out.print(SET_CURSOR_HOME); System.out.println(CLEAR_LINE + "Current Fields: "); System.out.println(CLEAR_LINE + "Toggle fields via field letter followed" + " by Enter, type any other key followed by Enter to return"); for (String key : sortedKeys) { ColumnInformation info = columnInformationEnumMap.get(keyFieldsMap.get(key)); String prefix = " "; String letter = key; if (info.display) { prefix = "*"; letter = key.toUpperCase(); } System.out.print(CLEAR_LINE); System.out.println(String.format("%s %s: %-15s = %s", prefix, letter, info.header, info.description)); } } } protected void showTopScreen() { List<ApplicationInformation> appsInfo = new ArrayList<>(); List<ApplicationReport> apps; try { apps = fetchAppReports(); } catch (Exception e) { LOG.error("Unable to get application information", e); return; } for (ApplicationReport appReport : apps) { ApplicationInformation appInfo = new ApplicationInformation(appReport); appsInfo.add(appInfo); } if (ascendingSort) { Collections.sort(appsInfo, comparator); } else { Collections.sort(appsInfo, Collections.reverseOrder(comparator)); } NodesInformation nodesInfo = getNodesInfo(); QueueMetrics queueMetrics = getQueueMetrics(); String header = getHeader(queueMetrics, nodesInfo); String appsStr = getPrintableAppInformation(appsInfo); synchronized (lock) { printHeader(header); printApps(appsStr); System.out.print(SET_CURSOR_LINE_6_COLUMN_0); System.out.print(CLEAR_LINE); } } private void handleSortScreenKeyPress(String input) { String f = currentSortField; currentSortField = input.toLowerCase(); switch (input.toLowerCase()) { case "a": comparator = AppIDComparator; break; case "u": comparator = UserComparator; break; case "t": comparator = AppTypeComparator; break; case "q": comparator = QueueNameComparator; break; case "c": comparator = UsedContainersComparator; break; case "r": comparator = ReservedContainersComparator; break; case "v": comparator = UsedVCoresComparator; break; case "o": comparator = ReservedVCoresComparator; break; case "m": comparator = UsedMemoryComparator; break; case "w": comparator = ReservedMemoryComparator; break; case "s": comparator = VCoreSecondsComparator; break; case "y": comparator = MemorySecondsComparator; break; case "p": comparator = ProgressComparator; break; case "i": comparator = RunningTimeComparator; break; case "n": comparator = AppNameComparator; break; default: // it wasn't a sort key currentSortField = f; showTopScreen(); showingTopScreen.set(true); displayScreen = DisplayScreen.TOP; } } private void handleFieldsScreenKeyPress(String input) { if (keyFieldsMap.containsKey(input.toLowerCase())) { toggleColumn(keyFieldsMap.get(input.toLowerCase())); setAppsHeader(); } else { showTopScreen(); showingTopScreen.set(true); displayScreen = DisplayScreen.TOP; } } private void handleTopScreenKeyPress(String input) { switch (input.toLowerCase()) { case "q": runMainLoop.set(false); runKeyboardMonitor.set(false); // wake up if it's sleeping displayThread.interrupt(); break; case "s": displayScreen = DisplayScreen.SORT; showSortScreen(); break; case "f": displayScreen = DisplayScreen.FIELDS; showFieldsScreen(); break; case "r": ascendingSort = !ascendingSort; break; case "h": displayScreen = DisplayScreen.HELP; showHelpScreen(); break; default: break; } } private void handleHelpScreenKeyPress() { showTopScreen(); showingTopScreen.set(true); displayScreen = DisplayScreen.TOP; } String limitLineLength(String line, int length, boolean addNewline) { if (line.length() > length) { String tmp; if (addNewline) { tmp = line.substring(0, length - System.lineSeparator().length()); tmp += System.lineSeparator(); } else { tmp = line.substring(0, length); } return tmp; } return line; } void toggleColumn(Columns col) { columnInformationEnumMap.get(col).display = !columnInformationEnumMap.get(col).display; } protected List<ApplicationReport> fetchAppReports() throws YarnException, IOException { List<ApplicationReport> ret; EnumSet<YarnApplicationState> states = EnumSet.of(YarnApplicationState.ACCEPTED, YarnApplicationState.RUNNING); GetApplicationsRequest req = GetApplicationsRequest.newInstance(types, states); req.setQueues(queues); req.setUsers(users); ret = applicationReportsCache.getIfPresent(req); if (ret != null) { return ret; } ret = client.getApplications(queues, users, types, states); applicationReportsCache.put(req, ret); return ret; } private String getCommandOutput(String[] command) throws IOException, InterruptedException { Process p = Runtime.getRuntime().exec(command); p.waitFor(); byte[] output = IOUtils.toByteArray(p.getInputStream()); return new String(output, "ASCII"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.webconsole.internal.servlet; import java.util.Enumeration; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.NoSuchElementException; import java.util.ResourceBundle; import javax.servlet.Servlet; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import org.apache.felix.webconsole.AbstractWebConsolePlugin; import org.apache.felix.webconsole.WebConsoleConstants; import org.apache.felix.webconsole.internal.OsgiManagerPlugin; import org.apache.felix.webconsole.internal.WebConsolePluginAdapter; import org.apache.felix.webconsole.internal.i18n.ResourceBundleManager; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceEvent; import org.osgi.framework.ServiceListener; import org.osgi.framework.ServiceReference; import org.osgi.service.log.LogService; /** * The <code>PluginHolder</code> class implements the maintenance and lazy * access to web console plugin services. */ class PluginHolder implements ServiceListener { // The Web Console's bundle context to access the plugin services private final BundleContext bundleContext; // registered plugins (Map<String label, Plugin plugin>) private final Map plugins; // The servlet context used to initialize plugin services private ServletContext servletContext; // the label of the default plugin private String defaultPluginLabel; PluginHolder( final BundleContext context ) { this.bundleContext = context; this.plugins = new HashMap(); } //---------- OsgiManager support API /** * Start using the plugin manager with registration as a service listener * and getting references to all plugins already registered in the * framework. */ void open() { try { bundleContext.addServiceListener( this, "(" + Constants.OBJECTCLASS + "=" + WebConsoleConstants.SERVICE_NAME + ")" ); } catch ( InvalidSyntaxException ise ) { // not expected, thus fail hard throw new InternalError( "Failed registering for Servlet service events: " + ise.getMessage() ); } try { ServiceReference[] refs = bundleContext.getServiceReferences( WebConsoleConstants.SERVICE_NAME, null ); if ( refs != null ) { for ( int i = 0; i < refs.length; i++ ) { serviceAdded( refs[i] ); } } } catch ( InvalidSyntaxException ise ) { // not expected, thus fail hard throw new InternalError( "Failed getting existing Servlet services: " + ise.getMessage() ); } } /** * Stop using the plugin manager by removing as a service listener and * releasing all held plugins, which includes ungetting and destroying any * held plugin services. */ void close() { bundleContext.removeServiceListener( this ); Plugin[] plugin = getPlugins(); for ( int i = 0; i < plugin.length; i++ ) { plugin[i].dispose(); } plugins.clear(); defaultPluginLabel = null; } /** * Returns label of the default plugin * @return label of the default plugin */ String getDefaultPluginLabel() { return defaultPluginLabel; } /** * Sets the label of the default plugin * @param defaultPluginLabel */ void setDefaultPluginLabel( String defaultPluginLabel ) { this.defaultPluginLabel = defaultPluginLabel; } void addInternalPlugin( final OsgiManager osgiManager, final String pluginClassName, final String label) { final Plugin plugin = new InternalPlugin(this, osgiManager, pluginClassName, label); addPlugin( label, plugin ); } /** * Adds an internal Web Console plugin * @param consolePlugin The internal Web Console plugin to add */ void addOsgiManagerPlugin( final AbstractWebConsolePlugin consolePlugin ) { final String label = consolePlugin.getLabel(); final Plugin plugin = new Plugin( this, consolePlugin, label ); addPlugin( label, plugin ); } /** * Remove the internal Web Console plugin registered under the given label * @param label The label of the Web Console internal plugin to remove */ void removeOsgiManagerPlugin( final String label ) { removePlugin( label ); } /** * Returns the plugin registered under the given label or <code>null</code> * if none is registered under that label. If the label is <code>null</code> * or empty, any registered plugin is returned or <code>null</code> if * no plugin is registered * * @param label The label of the plugin to return * @return The plugin or <code>null</code> if no plugin is registered with * the given label. */ AbstractWebConsolePlugin getPlugin( final String label ) { AbstractWebConsolePlugin consolePlugin = null; if ( label != null && label.length() > 0 ) { final Plugin plugin; synchronized ( plugins ) { plugin = ( Plugin ) plugins.get( label ); } if ( plugin != null ) { consolePlugin = plugin.getConsolePlugin(); } } else { Plugin[] plugins = getPlugins(); for ( int i = 0; i < plugins.length && consolePlugin == null; i++ ) { consolePlugin = plugins[i].getConsolePlugin(); } } return consolePlugin; } /** * Builds the map of labels to plugin titles to be stored as the * <code>felix.webconsole.labelMap</code> request attribute. This map * optionally localizes the plugin title using the providing bundle's * resource bundle if the first character of the title is a percent * sign (%). Titles not prefixed with a percent sign are added to the * map unmodified. * * @param resourceBundleManager The ResourceBundleManager providing * localized titles * @param locale The locale to which the titles are to be localized * * @return The localized map of labels to titles */ Map getLocalizedLabelMap( final ResourceBundleManager resourceBundleManager, final Locale locale ) { final Map map = new HashMap(); Plugin[] plugins = getPlugins(); for ( int i = 0; i < plugins.length; i++ ) { final Plugin plugin = plugins[i]; if (!plugin.isEnabled()) { continue; } final String label = plugin.getLabel(); String title = plugin.getTitle(); if ( title.startsWith( "%" ) ) { try { final ResourceBundle resourceBundle = resourceBundleManager.getResourceBundle( plugin.getBundle(), locale ); title = resourceBundle.getString( title.substring( 1 ) ); } catch ( Throwable e ) { /* ignore missing resource - use default title */ } } map.put( label, title ); } return map; } /** * Returns the bundle context of the Web Console itself. * @return the bundle context of the Web Console itself. */ BundleContext getBundleContext() { return bundleContext; } /** * Sets the servlet context to be used to initialize plugin services * @param servletContext */ void setServletContext( ServletContext servletContext ) { final Plugin[] plugin = getPlugins(); if ( servletContext != null ) { this.servletContext = servletContext; for ( int i = 0; i < plugin.length; i++ ) { try { plugin[i].init(); } catch ( ServletException se ) { // TODO: log !! } } } else { for ( int i = 0; i < plugin.length; i++ ) { try { plugin[i].destroy(); } catch (Throwable t) { // TODO: log !! } } this.servletContext = null; } } /** * Returns the servlet context to be used to initialize plugin services * @return the servlet context to be used to initialize plugin services */ ServletContext getServletContext() { return servletContext; } //---------- ServletListener /** * Called when plugin services are registered or unregistered (or modified, * which is currently ignored) * * @see org.osgi.framework.ServiceListener#serviceChanged(org.osgi.framework.ServiceEvent) */ public void serviceChanged( ServiceEvent event ) { switch ( event.getType() ) { case ServiceEvent.REGISTERED: // add service serviceAdded( event.getServiceReference() ); break; case ServiceEvent.UNREGISTERING: // remove service serviceRemoved( event.getServiceReference() ); break; default: // update service break; } } private void serviceAdded( final ServiceReference serviceReference ) { final String label = getProperty( serviceReference, WebConsoleConstants.PLUGIN_LABEL ); if ( label != null ) { addPlugin( label, new ServletPlugin( this, serviceReference, label ) ); } } private void serviceRemoved( final ServiceReference serviceReference ) { final String label = getProperty( serviceReference, WebConsoleConstants.PLUGIN_LABEL ); if ( label != null ) { removePlugin( label ); } } private void addPlugin( final String label, final Plugin plugin ) { synchronized ( plugins ) { plugins.put( label, plugin ); } } private void removePlugin( final String label ) { final Plugin oldPlugin; synchronized ( plugins ) { oldPlugin = ( Plugin ) plugins.remove( label ); } if ( oldPlugin != null ) { oldPlugin.dispose(); } } private Plugin[] getPlugins() { synchronized ( plugins ) { return ( Plugin[] ) plugins.values().toArray( new Plugin[plugins.size()] ); } } static String getProperty( final ServiceReference service, final String propertyName ) { final Object property = service.getProperty( propertyName ); if ( property instanceof String ) { return ( String ) property; } return null; } private static class Plugin implements ServletConfig { private final PluginHolder holder; private final String label; private String title; private AbstractWebConsolePlugin consolePlugin; protected Plugin( final PluginHolder holder, final String label ) { this.holder = holder; this.label = label; } protected Plugin( final PluginHolder holder, final AbstractWebConsolePlugin plugin, final String label ) { this( holder, label ); if ( plugin == null ) { throw new NullPointerException( "plugin" ); } this.consolePlugin = plugin; } void init() throws ServletException { if (consolePlugin != null) { consolePlugin.init( this ); } } void destroy() { if (consolePlugin != null) { consolePlugin.destroy(); } } /** * Cleans up this plugin when it is not used any longer. This means * destroying the plugin servlet and, if it was registered as an OSGi * service, ungetting the service. */ final void dispose() { if ( consolePlugin != null ) { try { consolePlugin.destroy(); } catch ( Exception e ) { // TODO: handle } doUngetConsolePlugin( consolePlugin ); consolePlugin = null; } } protected PluginHolder getHolder() { return holder; } Bundle getBundle() { return getHolder().getBundleContext().getBundle(); } final String getLabel() { return label; } protected void setTitle( String title ) { this.title = title; } final String getTitle() { if ( title == null ) { final String title = doGetTitle(); this.title = ( title == null ) ? getLabel() : title; } return title; } protected String doGetTitle() { // get the service now final AbstractWebConsolePlugin consolePlugin = getConsolePlugin(); // reset the title: // - null if the servlet cannot be loaded // - to the servlet's actual title if the servlet is loaded return ( consolePlugin != null ) ? consolePlugin.getTitle() : null; } final AbstractWebConsolePlugin getConsolePlugin() { if ( consolePlugin == null ) { final AbstractWebConsolePlugin consolePlugin = doGetConsolePlugin(); if ( consolePlugin != null ) { try { this.consolePlugin = consolePlugin; init(); } catch ( ServletException se ) { // TODO: log this.consolePlugin = null; } } else { // TODO: log !! } } return consolePlugin; } protected boolean isEnabled() { return true; } protected AbstractWebConsolePlugin doGetConsolePlugin() { return consolePlugin; } protected void doUngetConsolePlugin( AbstractWebConsolePlugin consolePlugin ) { } //---------- ServletConfig interface public String getInitParameter( String name ) { return null; } public Enumeration getInitParameterNames() { return new Enumeration() { public boolean hasMoreElements() { return false; } public Object nextElement() { throw new NoSuchElementException(); } }; } public ServletContext getServletContext() { return getHolder().getServletContext(); } public String getServletName() { return getTitle(); } } private static class ServletPlugin extends Plugin { private final ServiceReference serviceReference; ServletPlugin( final PluginHolder holder, final ServiceReference serviceReference, final String label ) { super(holder, label); this.serviceReference = serviceReference; } Bundle getBundle() { return serviceReference.getBundle(); } protected String doGetTitle() { // check service Reference final String title = getProperty( serviceReference, WebConsoleConstants.PLUGIN_TITLE ); if ( title != null ) { return title; } // temporarily set the title to a non-null value to prevent // recursion issues if this method or the getServletName // method is called while the servlet is being acquired setTitle(getLabel()); return super.doGetTitle(); } protected AbstractWebConsolePlugin doGetConsolePlugin() { Object service = getHolder().getBundleContext().getService( serviceReference ); if ( service instanceof Servlet ) { final AbstractWebConsolePlugin servlet; if ( service instanceof AbstractWebConsolePlugin ) { servlet = ( AbstractWebConsolePlugin ) service; } else { servlet = new WebConsolePluginAdapter( getLabel(), ( Servlet ) service, serviceReference ); } return servlet; } return null; } protected void doUngetConsolePlugin( AbstractWebConsolePlugin consolePlugin ) { getHolder().getBundleContext().ungetService( serviceReference ); } //---------- ServletConfig overwrite (based on ServletReference) public String getInitParameter( String name ) { Object property = serviceReference.getProperty( name ); if ( property != null && !property.getClass().isArray() ) { return property.toString(); } return super.getInitParameter( name ); } public Enumeration getInitParameterNames() { final String[] keys = serviceReference.getPropertyKeys(); return new Enumeration() { int idx = 0; public boolean hasMoreElements() { return idx < keys.length; } public Object nextElement() { if ( hasMoreElements() ) { return keys[idx++]; } throw new NoSuchElementException(); } }; } } static class InternalPlugin extends Plugin { final String pluginClassName; final OsgiManager osgiManager; AbstractWebConsolePlugin plugin; boolean doLog = true; protected InternalPlugin(PluginHolder holder, OsgiManager osgiManager, String pluginClassName, String label) { super(holder, label); this.osgiManager = osgiManager; this.pluginClassName = pluginClassName; } protected final boolean isEnabled() { // check if the plugin is enabled return !osgiManager.isPluginDisabled(pluginClassName); } protected AbstractWebConsolePlugin doGetConsolePlugin() { if (null == plugin) { if (!isEnabled()) { if (doLog) { osgiManager.log( LogService.LOG_INFO, "Ignoring plugin " + pluginClassName + ": Disabled by configuration" ); doLog = false; } return null; } try { Class pluginClass = getClass().getClassLoader().loadClass(pluginClassName); plugin = (AbstractWebConsolePlugin) pluginClass.newInstance(); if (plugin instanceof OsgiManagerPlugin) { ((OsgiManagerPlugin) plugin).activate(getBundle().getBundleContext()); } doLog = true; // reset logging if it succeeded } catch (Throwable t) { plugin = null; // in case only activate has faled! if (doLog) { osgiManager.log( LogService.LOG_WARNING, "Failed to instantiate plugin " + pluginClassName, t ); doLog = false; } } } return plugin; } protected void doUngetConsolePlugin(AbstractWebConsolePlugin consolePlugin) { if (consolePlugin == plugin) plugin = null; if (consolePlugin instanceof OsgiManagerPlugin) { ((OsgiManagerPlugin) consolePlugin).deactivate(); } super.doUngetConsolePlugin(consolePlugin); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.cpp; import static com.google.devtools.build.lib.packages.ImplicitOutputsFunction.fromTemplates; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.ALWAYS_LINK_LIBRARY; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.ALWAYS_LINK_PIC_LIBRARY; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.ARCHIVE; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.ASSEMBLER; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.ASSEMBLER_WITH_C_PREPROCESSOR; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.CPP_HEADER; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.CPP_SOURCE; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.C_SOURCE; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.INTERFACE_SHARED_LIBRARY; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.OBJECT_FILE; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.PIC_ARCHIVE; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.PIC_OBJECT_FILE; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.SHARED_LIBRARY; import static com.google.devtools.build.lib.rules.cpp.CppFileTypes.VERSIONED_SHARED_LIBRARY; import com.google.devtools.build.lib.analysis.LanguageDependentFragment.LibraryLanguage; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.packages.Attribute.LateBoundLabel; import com.google.devtools.build.lib.packages.Attribute.Transition; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SafeImplicitOutputsFunction; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.rules.test.InstrumentedFilesCollector.InstrumentationSpec; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.FileTypeSet; import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig.LipoMode; /** * Rule class definitions for C++ rules. */ public class CppRuleClasses { /** Returns true if this rule should create a dynamic library. */ public static boolean shouldCreateDynamicLibrary(AttributeMap rule) { return !rule.get("linkstatic", Type.BOOLEAN) && CcLibrary.appearsToHaveObjectFiles(rule); } /** * Implementation for the :lipo_context_collector attribute. */ public static final LateBoundLabel<BuildConfiguration> LIPO_CONTEXT_COLLECTOR = new LateBoundLabel<BuildConfiguration>() { @Override public Label resolve(Rule rule, AttributeMap attributes, BuildConfiguration configuration) { // This attribute connects a target to the LIPO context target configured with the // lipo input collector configuration. CppConfiguration cppConfiguration = configuration.getFragment(CppConfiguration.class); return !cppConfiguration.isLipoContextCollector() && (cppConfiguration.getLipoMode() == LipoMode.BINARY) ? cppConfiguration.getLipoContextLabel() : null; } }; /** * Configuration transitions required by LIPO. */ public enum LipoTransition implements Transition { /** * LIPO context collector. * * <p>This configuration transition leads into a configuration that is used for collecting * C++ compilation contexts for LIPO builds so that e.g. an include path entry required by an * inlined function is there when the place is compiled where it is inlined at. */ LIPO_COLLECTOR, /** * Transition used for switching back to the LIPO-optimized configuration. */ TARGET_CONFIG_FOR_LIPO; @Override public boolean defaultsToSelf() { return true; } } // Artifacts of these types are discarded from the 'hdrs' attribute in cc rules static final FileTypeSet DISALLOWED_HDRS_FILES = FileTypeSet.of( ARCHIVE, PIC_ARCHIVE, ALWAYS_LINK_LIBRARY, ALWAYS_LINK_PIC_LIBRARY, SHARED_LIBRARY, INTERFACE_SHARED_LIBRARY, VERSIONED_SHARED_LIBRARY, OBJECT_FILE, PIC_OBJECT_FILE); /** * The set of instrumented source file types; keep this in sync with the list above. Note that * extension-less header files cannot currently be declared, so we cannot collect coverage for * those. */ static final InstrumentationSpec INSTRUMENTATION_SPEC = new InstrumentationSpec( FileTypeSet.of(CPP_SOURCE, C_SOURCE, CPP_HEADER, ASSEMBLER_WITH_C_PREPROCESSOR, ASSEMBLER)) .withSourceAttributes("srcs", "hdrs") .withDependencyAttributes("deps", "data"); public static final LibraryLanguage LANGUAGE = new LibraryLanguage("C++"); /** * Implicit outputs for cc_binary rules. */ public static final SafeImplicitOutputsFunction CC_BINARY_STRIPPED = fromTemplates("%{name}.stripped"); // Used for requesting dwp "debug packages". public static final SafeImplicitOutputsFunction CC_BINARY_DEBUG_PACKAGE = fromTemplates("%{name}.dwp"); /** * Path of the build_interface_so script in the Blaze binary. */ public static final String BUILD_INTERFACE_SO = "build_interface_so"; /** * A string constant for the parse_headers feature. */ public static final String PARSE_HEADERS = "parse_headers"; /** * A string constant for the preprocess_headers feature. */ public static final String PREPROCESS_HEADERS = "preprocess_headers"; /** * A string constant for the module_maps feature; this is a precondition to the layering_check and * header_modules features. */ public static final String MODULE_MAPS = "module_maps"; /** * A string constant for the random_seed feature. This is used by gcc and Clangfor the * randomization of symbol names that are in the anonymous namespace but have external linkage. */ public static final String RANDOM_SEED = "random_seed"; /** * A string constant for the compile_action_flags_in_flag_set feature. This feature is just a * transitional feature which helps telling whether -c and -o options are already in flag_set of * action_config in CROSSTOOL file. Once the transition is done, it should be removed. */ public static final String COMPILE_ACTION_FLAGS_IN_FLAG_SET = "compile_action_flags_in_flag_set"; /** * A string constant for the dependency_file feature. This feature generates the .d file. */ public static final String DEPENDENCY_FILE = "dependency_file"; /** * A string constant for the module_map_home_cwd feature. */ public static final String MODULE_MAP_HOME_CWD = "module_map_home_cwd"; /** * A string constant for the module_map_without_extern_module feature. * * <p>This features is a transitional feature; enabling it means that generated module maps * will not have "extern module" declarations inside them; instead, the module maps need * to be passed via the dependent_module_map_files build variable. * * <p>This variable is phrased negatively to aid the roll-out: currently, the default is that * "extern module" declarations are generated. */ public static final String MODULE_MAP_WITHOUT_EXTERN_MODULE = "module_map_without_extern_module"; /** * A string constant for the layering_check feature. */ public static final String LAYERING_CHECK = "layering_check"; /** A string constant for the header_modules feature. */ public static final String HEADER_MODULES = "header_modules"; /** A string constant for the header_modules_compile feature. */ public static final String HEADER_MODULE_COMPILE = "header_module_compile"; /** * A string constant for the compile_all_modules feature. */ public static final String COMPILE_ALL_MODULES = "compile_all_modules"; /** * A string constant for the exclude_private_headers_in_module_maps feature. */ public static final String EXCLUDE_PRIVATE_HEADERS_IN_MODULE_MAPS = "exclude_private_headers_in_module_maps"; /** * A string constant for the use_header_modules feature. * * <p>This feature is only used during rollout; we expect to default enable this once we * have verified that module-enabled compilation is stable enough. */ public static final String USE_HEADER_MODULES = "use_header_modules"; /** * A string constant for the generate_submodules feature. * * <p>This feature is only used temporarily to make the switch to using submodules easier. With * submodules, each header of a cc_library is placed into a submodule of the module generated for * the appropriate target. As this influences the layering_check semantics and needs to be synced * with a clang release, we want to be able to switch back and forth easily. */ public static final String GENERATE_SUBMODULES = "generate_submodules"; /** * A string constant for the only_doth_headers_in_module_maps. * * <p>This feature filters any headers without a ".h" suffix from generated module maps. */ public static final String ONLY_DOTH_HEADERS_IN_MODULE_MAPS = "only_doth_headers_in_module_maps"; /** * A string constant for the no_legacy_features feature. * * <p>If this feature is enabled, Bazel will not extend the crosstool configuration with the * default legacy feature set. */ public static final String NO_LEGACY_FEATURES = "no_legacy_features"; /** * A string constant for the feature that makes us build per-object debug info files. */ public static final String PER_OBJECT_DEBUG_INFO = "per_object_debug_info"; /** * A string constant for the PIC feature. * * <p>If this feature is active (currently it cannot be switched off) and PIC compilation is * requested, the "pic" build variable will be defined with an empty string as its value. */ public static final String PIC = "pic"; /** * A string constant for the feature the represents preprocessor defines. */ public static final String PREPROCESSOR_DEFINES = "preprocessor_defines"; /** * A string constant for the include_paths feature. */ public static final String INCLUDE_PATHS = "include_paths"; /** * A string constant for the ThinLTO feature. */ public static final String THIN_LTO = "thin_lto"; /* * A string constant for the fdo_instrument feature. */ public static final String FDO_INSTRUMENT = "fdo_instrument"; /** * A string constant for the fdo_optimize feature. */ public static final String FDO_OPTIMIZE = "fdo_optimize"; /** * A string constant for the autofdo feature. */ public static final String AUTOFDO = "autofdo"; /** * A string constant for the lipo feature. */ public static final String LIPO = "lipo"; /** * A string constant for the coverage feature. */ public static final String COVERAGE = "coverage"; /** A string constant for the match-clif feature. */ public static final String MATCH_CLIF = "match_clif"; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.webmonitor.history; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.HistoryServerOptions; import org.apache.flink.core.fs.FileStatus; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.history.FsJobArchivist; import org.apache.flink.runtime.jobgraph.JobStatus; import org.apache.flink.runtime.messages.webmonitor.JobDetails; import org.apache.flink.runtime.messages.webmonitor.MultipleJobsDetails; import org.apache.flink.runtime.rest.messages.JobsOverviewHeaders; import org.apache.flink.runtime.util.ExecutorThreadFactory; import org.apache.flink.util.FileUtils; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonFactory; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.StringWriter; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.TimerTask; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import static org.apache.flink.util.Preconditions.checkNotNull; /** * This class is used by the {@link HistoryServer} to fetch the job archives that are located at * {@link HistoryServerOptions#HISTORY_SERVER_ARCHIVE_DIRS}. The directories are polled in regular intervals, defined * by {@link HistoryServerOptions#HISTORY_SERVER_ARCHIVE_REFRESH_INTERVAL}. * * <p>The archives are downloaded and expanded into a file structure analog to the REST API. */ class HistoryServerArchiveFetcher { private static final Logger LOG = LoggerFactory.getLogger(HistoryServerArchiveFetcher.class); private static final JsonFactory jacksonFactory = new JsonFactory(); private static final ObjectMapper mapper = new ObjectMapper(); private final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor( new ExecutorThreadFactory("Flink-HistoryServer-ArchiveFetcher")); private final JobArchiveFetcherTask fetcherTask; private final long refreshIntervalMillis; HistoryServerArchiveFetcher(long refreshIntervalMillis, List<HistoryServer.RefreshLocation> refreshDirs, File webDir, CountDownLatch numFinishedPolls) { this.refreshIntervalMillis = refreshIntervalMillis; this.fetcherTask = new JobArchiveFetcherTask(refreshDirs, webDir, numFinishedPolls); if (LOG.isInfoEnabled()) { for (HistoryServer.RefreshLocation refreshDir : refreshDirs) { LOG.info("Monitoring directory {} for archived jobs.", refreshDir.getPath()); } } } void start() { executor.scheduleWithFixedDelay(fetcherTask, 0, refreshIntervalMillis, TimeUnit.MILLISECONDS); } void stop() { executor.shutdown(); try { if (!executor.awaitTermination(1, TimeUnit.SECONDS)) { executor.shutdownNow(); } } catch (InterruptedException ignored) { executor.shutdownNow(); } } /** * {@link TimerTask} that polls the directories configured as {@link HistoryServerOptions#HISTORY_SERVER_ARCHIVE_DIRS} for * new job archives. */ static class JobArchiveFetcherTask extends TimerTask { private final List<HistoryServer.RefreshLocation> refreshDirs; private final CountDownLatch numFinishedPolls; /** Cache of all available jobs identified by their id. */ private final Set<String> cachedArchives; private final File webDir; private final File webJobDir; private final File webOverviewDir; private static final String JSON_FILE_ENDING = ".json"; JobArchiveFetcherTask(List<HistoryServer.RefreshLocation> refreshDirs, File webDir, CountDownLatch numFinishedPolls) { this.refreshDirs = checkNotNull(refreshDirs); this.numFinishedPolls = numFinishedPolls; this.cachedArchives = new HashSet<>(); this.webDir = checkNotNull(webDir); this.webJobDir = new File(webDir, "jobs"); webJobDir.mkdir(); this.webOverviewDir = new File(webDir, "overviews"); webOverviewDir.mkdir(); } @Override public void run() { try { for (HistoryServer.RefreshLocation refreshLocation : refreshDirs) { Path refreshDir = refreshLocation.getPath(); FileSystem refreshFS = refreshLocation.getFs(); // contents of /:refreshDir FileStatus[] jobArchives; try { jobArchives = refreshFS.listStatus(refreshDir); } catch (IOException e) { LOG.error("Failed to access job archive location for path {}.", refreshDir, e); continue; } if (jobArchives == null) { continue; } boolean updateOverview = false; for (FileStatus jobArchive : jobArchives) { Path jobArchivePath = jobArchive.getPath(); String jobID = jobArchivePath.getName(); try { JobID.fromHexString(jobID); } catch (IllegalArgumentException iae) { LOG.debug("Archive directory {} contained file with unexpected name {}. Ignoring file.", refreshDir, jobID, iae); continue; } if (cachedArchives.add(jobID)) { try { for (ArchivedJson archive : FsJobArchivist.getArchivedJsons(jobArchive.getPath())) { String path = archive.getPath(); String json = archive.getJson(); File target; if (path.equals(JobsOverviewHeaders.URL)) { target = new File(webOverviewDir, jobID + JSON_FILE_ENDING); } else if (path.equals("/joboverview")) { // legacy path json = convertLegacyJobOverview(json); target = new File(webOverviewDir, jobID + JSON_FILE_ENDING); } else { target = new File(webDir, path + JSON_FILE_ENDING); } java.nio.file.Path parent = target.getParentFile().toPath(); try { Files.createDirectories(parent); } catch (FileAlreadyExistsException ignored) { // there may be left-over directories from the previous attempt } java.nio.file.Path targetPath = target.toPath(); // We overwrite existing files since this may be another attempt at fetching this archive. // Existing files may be incomplete/corrupt. Files.deleteIfExists(targetPath); Files.createFile(target.toPath()); try (FileWriter fw = new FileWriter(target)) { fw.write(json); fw.flush(); } } updateOverview = true; } catch (IOException e) { LOG.error("Failure while fetching/processing job archive for job {}.", jobID, e); // Make sure we attempt to fetch the archive again cachedArchives.remove(jobID); // Make sure we do not include this job in the overview try { Files.delete(new File(webOverviewDir, jobID + JSON_FILE_ENDING).toPath()); } catch (IOException ioe) { LOG.debug("Could not delete file from overview directory.", ioe); } // Clean up job files we may have created File jobDirectory = new File(webJobDir, jobID); try { FileUtils.deleteDirectory(jobDirectory); } catch (IOException ioe) { LOG.debug("Could not clean up job directory.", ioe); } } } } if (updateOverview) { updateJobOverview(webOverviewDir, webDir); } } } catch (Exception e) { LOG.error("Critical failure while fetching/processing job archives.", e); } numFinishedPolls.countDown(); } } private static String convertLegacyJobOverview(String legacyOverview) throws IOException { JsonNode root = mapper.readTree(legacyOverview); JsonNode finishedJobs = root.get("finished"); JsonNode job = finishedJobs.get(0); JobID jobId = JobID.fromHexString(job.get("jid").asText()); String name = job.get("name").asText(); JobStatus state = JobStatus.valueOf(job.get("state").asText()); long startTime = job.get("start-time").asLong(); long endTime = job.get("end-time").asLong(); long duration = job.get("duration").asLong(); long lastMod = job.get("last-modification").asLong(); JsonNode tasks = job.get("tasks"); int numTasks = tasks.get("total").asInt(); int pending = tasks.get("pending").asInt(); int running = tasks.get("running").asInt(); int finished = tasks.get("finished").asInt(); int canceling = tasks.get("canceling").asInt(); int canceled = tasks.get("canceled").asInt(); int failed = tasks.get("failed").asInt(); int[] tasksPerState = new int[ExecutionState.values().length]; // pending is a mix of CREATED/SCHEDULED/DEPLOYING // to maintain the correct number of task states we have to pick one of them tasksPerState[ExecutionState.SCHEDULED.ordinal()] = pending; tasksPerState[ExecutionState.RUNNING.ordinal()] = running; tasksPerState[ExecutionState.FINISHED.ordinal()] = finished; tasksPerState[ExecutionState.CANCELING.ordinal()] = canceling; tasksPerState[ExecutionState.CANCELED.ordinal()] = canceled; tasksPerState[ExecutionState.FAILED.ordinal()] = failed; JobDetails jobDetails = new JobDetails(jobId, name, startTime, endTime, duration, state, lastMod, tasksPerState, numTasks); MultipleJobsDetails multipleJobsDetails = new MultipleJobsDetails(Collections.singleton(jobDetails)); StringWriter sw = new StringWriter(); mapper.writeValue(sw, multipleJobsDetails); return sw.toString(); } /** * This method replicates the JSON response that would be given by the JobsOverviewHandler when * listing both running and finished jobs. * * <p>Every job archive contains a joboverview.json file containing the same structure. Since jobs are archived on * their own however the list of finished jobs only contains a single job. * * <p>For the display in the HistoryServer WebFrontend we have to combine these overviews. */ private static void updateJobOverview(File webOverviewDir, File webDir) { try (JsonGenerator gen = jacksonFactory.createGenerator(HistoryServer.createOrGetFile(webDir, JobsOverviewHeaders.URL))) { File[] overviews = new File(webOverviewDir.getPath()).listFiles(); if (overviews != null) { Collection<JobDetails> allJobs = new ArrayList<>(overviews.length); for (File overview : overviews) { MultipleJobsDetails subJobs = mapper.readValue(overview, MultipleJobsDetails.class); allJobs.addAll(subJobs.getJobs()); } mapper.writeValue(gen, new MultipleJobsDetails(allJobs)); } } catch (IOException ioe) { LOG.error("Failed to update job overview.", ioe); } } }
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.spi.coordinator; import lombok.Getter; import org.jboss.pnc.model.BuildConfigSetRecord; import org.jboss.pnc.model.BuildConfiguration; import org.jboss.pnc.model.BuildConfigurationAudited; import org.jboss.pnc.model.ProductMilestone; import org.jboss.pnc.model.User; import org.jboss.pnc.spi.BuildCoordinationStatus; import org.jboss.pnc.spi.BuildOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Date; import java.util.HashSet; import java.util.Set; /** * Created by <a href="mailto:matejonnet@gmail.com">Matej Lazar</a> on 2014-12-23. */ public class BuildTask { private static final Logger log = LoggerFactory.getLogger(BuildTask.class); private final Integer id; private final BuildConfiguration buildConfiguration; //TODO decouple DB entity private final BuildConfigurationAudited buildConfigurationAudited; //TODO decouple DB entity @Getter private final BuildOptions buildOptions; private final User user; private final Date submitTime; private Date startTime; private Date endTime; private BuildCoordinationStatus status = BuildCoordinationStatus.NEW; private String statusDescription; /** * A list of builds waiting for this build to complete. */ private final Set<BuildTask> dependants = new HashSet<>(); /** * The builds which must be completed before this build can start */ private Set<BuildTask> dependencies = new HashSet<>(); private final BuildSetTask buildSetTask; private ProductMilestone productMilestone; private boolean hasFailed = false; //called when all dependencies are built private final Integer buildConfigSetRecordId; private BuildTask(BuildConfiguration buildConfiguration, BuildConfigurationAudited buildConfigurationAudited, BuildOptions buildOptions, User user, Date submitTime, BuildSetTask buildSetTask, int id, Integer buildConfigSetRecordId, ProductMilestone productMilestone) { this.id = id; this.buildConfiguration = buildConfiguration; this.buildConfigurationAudited = buildConfigurationAudited; this.buildOptions = buildOptions; this.user = user; this.submitTime = submitTime; this.buildSetTask = buildSetTask; this.buildConfigSetRecordId = buildConfigSetRecordId; this.productMilestone = productMilestone; } public void setStatus(BuildCoordinationStatus status) { this.status = status; setHasFailed(status.hasFailed()); } public ProductMilestone getProductMilestone() { return productMilestone; } public Set<BuildTask> getDependencies() { return dependencies; } public void addDependency(BuildTask buildTask) { if (!dependencies.contains(buildTask)) { dependencies.add(buildTask); buildTask.addDependant(this); } } /** * @return current status */ public BuildCoordinationStatus getStatus() { return status; } /** * @return Description of current status. Eg. WAITING: there is no available executor; FAILED: exceptionMessage */ public String getStatusDescription() { return statusDescription; } public BuildConfiguration getBuildConfiguration() { return buildConfiguration; } public BuildConfigurationAudited getBuildConfigurationAudited() { return buildConfigurationAudited; } /** * Check if this build task has a build configuration dependency on the given build task * * @param buildTask The buildTask with the config to check * @return true if this task's build config has a dependency on the build config of the given task, otherwise false */ public boolean hasConfigDependencyOn(BuildTask buildTask) { if (buildTask == null || this.equals(buildTask)) { return false; } if (buildConfiguration == null || buildConfiguration.getAllDependencies() == null) { return false; } return buildConfiguration.dependsOn(buildTask.getBuildConfiguration()); } public void addDependant(BuildTask buildTask) { if (!dependants.contains(buildTask)) { dependants.add(buildTask); buildTask.addDependency(this); } } /** * A build task is equal to another build task if they are using the same * build configuration ID and version. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BuildTask buildTask = (BuildTask) o; return buildConfigurationAudited.equals(buildTask.getBuildConfigurationAudited()); } @Override public int hashCode() { return buildConfigurationAudited.hashCode(); } public void setStatusDescription(String statusDescription) { this.statusDescription = statusDescription; } public boolean hasFailed(){ return this.hasFailed; } void setHasFailed(boolean hasFailed){ this.hasFailed = hasFailed; } public int getId() { return id; } public Date getSubmitTime() { return submitTime; } public Date getStartTime() { return startTime; } public void setStartTime(Date startTime) { this.startTime = startTime; } public Date getEndTime() { return endTime; } public void setEndTime(Date endTime) { this.endTime = endTime; } public User getUser() { return user; } public BuildSetTask getBuildSetTask() { return buildSetTask; } /** * Check if this build is ready to build, for example if all dependency builds * are complete. * * @return true if already built, false otherwise */ public boolean readyToBuild() { for (BuildTask buildTask : dependencies) { if(!buildTask.getStatus().isCompleted()) { return false; } } return true; } @Override public String toString() { return "Build Task id:" + id + ", name: " + buildConfigurationAudited.getName() + ", project name: " + buildConfigurationAudited.getProject().getName() + ", status: " + status; } public static BuildTask build(BuildConfiguration buildConfiguration, BuildConfigurationAudited buildConfigAudited, BuildOptions buildOptions, User user, int buildTaskId, BuildSetTask buildSetTask, Date submitTime, ProductMilestone productMilestone) { Integer buildConfigSetRecordId = null; if (buildSetTask != null) { buildConfigSetRecordId = buildSetTask.getBuildConfigSetRecord().map(BuildConfigSetRecord::getId).orElse(null); } return new BuildTask( buildConfiguration, buildConfigAudited, buildOptions, user, submitTime, buildSetTask, buildTaskId, buildConfigSetRecordId, productMilestone); } public Integer getBuildConfigSetRecordId() { return buildConfigSetRecordId; } }
package floobits; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.ide.plugins.PluginManager; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.components.Service; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.fileChooser.impl.FileChooserUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.WindowManager; import com.intellij.platform.PlatformProjectOpenProcessor; import com.intellij.projectImport.ProjectAttachProcessor; import floobits.common.*; import floobits.dialogs.CreateAccount; import floobits.impl.ContextImpl; import floobits.utilities.Flog; import floobits.utilities.IntelliBrowserOpener; import floobits.utilities.SelectFolder; import org.jetbrains.annotations.NotNull; import java.awt.*; import java.io.File; import java.net.URI; @Service public class FloobitsApplicationService { private Boolean createAccount = true; public static FloobitsApplicationService getInstance() { return ServiceManager.getService(FloobitsApplicationService.class); } public FloobitsApplicationService () { BrowserOpener.replaceSingleton(new IntelliBrowserOpener()); ApplicationInfo instance = ApplicationInfo.getInstance(); PluginId pluginId = PluginManager.getPluginByClassName("com.floobits.unique.plugin.id"); IdeaPluginDescriptor[] plugins = PluginManager.getPlugins(); String version = "Unknown version"; for (IdeaPluginDescriptor plugin : plugins) { if (plugin.getPluginId() == pluginId) { version = plugin.getVersion(); } } createAccount = Bootstrap.bootstrap(instance.getVersionName(), instance.getMajorVersion(), instance.getMinorVersion(), version); } public synchronized void setupAccount(@NotNull ContextImpl context, @NotNull Runnable afterSetup) { if (!createAccount) { context.mainThread(afterSetup); return; } PersistentJson p = PersistentJson.getInstance(); if (p.disable_account_creation) { context.statusMessage("Please create a Floobits account and/or make a ~/.floorc.json (https://floobits.com/help/floorc)"); return; } createAccount = false; CreateAccount createAccountDialog = new CreateAccount(context.project, afterSetup); createAccountDialog.createCenterPanel(); createAccountDialog.show(); } public void joinWorkspace(final String url) { final FlooUrl f; try { f = new FlooUrl(url); } catch (Exception e) { Flog.errorMessage(String.format("Invalid url: %s", e), null); return; } SelectFolder.build(f.owner, f.workspace, new RunLater<String>() { @Override public void run(final String location) { Project projectForPath = getProject(location); if (projectForPath == null) { Flog.errorMessage("The editor could not open the project :(", null); return; } final ContextImpl context = ServiceManager.getService(projectForPath, FloobitsPlugin.class).context; context.writeThread(new Runnable() { @Override public void run() { context.project.save(); Window window = WindowManager.getInstance().suggestParentWindow(context.project); if (window != null) { window.toFront(); } context.joinWorkspace(f, location, false, null); } }); } }); } public void joinWorkspace(ContextImpl context, final FlooUrl flooUrl, final String location) { Project projectForPath = getProject(location); if (context == null || projectForPath != context.project) { if (projectForPath == null) { Flog.errorMessage("The editor could not open the project :(", null); return; } context = ServiceManager.getService(projectForPath, FloobitsPlugin.class).context; } // not gonna work here final ContextImpl finalContext = context; context.writeThread(new Runnable() { @Override public void run() { Window window = WindowManager.getInstance().suggestParentWindow(finalContext.project); if (window != null) { window.toFront(); } finalContext.joinWorkspace(flooUrl, location, false, null); } }); } public void joinWorkspace(final ContextImpl context, final String url) { final FlooUrl f; try { f = new FlooUrl(url); } catch (Throwable e) { Flog.errorMessage(String.format("Invalid url: %s", e), context != null ? context.project : null); return; } PersistentJson persistentJson = PersistentJson.getInstance(); Workspace workspace; try { workspace = persistentJson.workspaces.get(f.owner).get(f.workspace); } catch (Throwable e) { workspace = null; } if (workspace != null) { joinWorkspace(context, f, workspace.path); return; } if (context != null) { // Can be null if started from quick menu. FlooUrl flooUrl = DotFloo.read(context.project.getBasePath()); if (flooUrl != null) { URI uri = URI.create(flooUrl.toString()); URI normalizedURL = URI.create(url); if (uri.getPath().equals(normalizedURL.getPath())) { joinWorkspace(context, flooUrl, context.project.getBasePath()); return; } } } SelectFolder.build(f.owner, f.workspace, new RunLater<String>() { @Override public void run(String path) { joinWorkspace(context, f, path); } }); } private Project getProject(String path) { ProjectManager pm = ProjectManager.getInstance(); Project[] openProjects = pm.getOpenProjects(); for (Project project : openProjects) { if (path.equals(project.getBasePath())) { return project; } } VirtualFile file = LocalFileSystem.getInstance().findFileByIoFile(new File(path)); Project openedProject; if (ProjectAttachProcessor.canAttachToProject() && file != null) { PlatformProjectOpenProcessor platformProjectOpenProcessor = PlatformProjectOpenProcessor.getInstance(); openedProject = platformProjectOpenProcessor.doOpenProject(file, null, false); } else { openedProject = ProjectUtil.openOrImport(path, null, false); } if (openedProject == null) { try { String projectFilePath = ".idea/misc.xml"; if (path.endsWith(projectFilePath)) { Flog.error("Attempted to open the project misc.xml file instead of the directory."); path = path.replace(projectFilePath, ""); } openedProject = ProjectManager.getInstance().loadAndOpenProject(path); } catch (Throwable e) { Flog.error(e); API.uploadCrash(null, null, e); return null; } } // This is something Intellij does when a user opens a project from the menu: FileChooserUtil.setLastOpenedFile(openedProject, file); return openedProject; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.util; import static org.apache.beam.runners.dataflow.util.TimeUtil.fromCloudTime; import com.google.api.services.dataflow.Dataflow; import com.google.api.services.dataflow.model.JobMessage; import com.google.api.services.dataflow.model.ListJobMessagesResponse; import com.google.common.base.MoreObjects; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import org.apache.beam.runners.dataflow.DataflowClient; import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; import org.apache.beam.sdk.PipelineResult.State; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A helper class for monitoring jobs submitted to the service. */ public class MonitoringUtil { private static final String GCLOUD_DATAFLOW_PREFIX = "gcloud beta dataflow"; private static final String ENDPOINT_OVERRIDE_ENV_VAR = "CLOUDSDK_API_ENDPOINT_OVERRIDES_DATAFLOW"; private static final Map<String, State> DATAFLOW_STATE_TO_JOB_STATE = ImmutableMap .<String, State>builder() .put("JOB_STATE_UNKNOWN", State.UNKNOWN) .put("JOB_STATE_STOPPED", State.STOPPED) .put("JOB_STATE_RUNNING", State.RUNNING) .put("JOB_STATE_DONE", State.DONE) .put("JOB_STATE_FAILED", State.FAILED) .put("JOB_STATE_CANCELLED", State.CANCELLED) .put("JOB_STATE_UPDATED", State.UPDATED) // A DRAINING job is still running - the closest mapping is RUNNING. .put("JOB_STATE_DRAINING", State.RUNNING) // A DRAINED job has successfully terminated - the closest mapping is DONE. .put("JOB_STATE_DRAINED", State.DONE) .build(); private static final String JOB_MESSAGE_ERROR = "JOB_MESSAGE_ERROR"; private static final String JOB_MESSAGE_WARNING = "JOB_MESSAGE_WARNING"; private static final String JOB_MESSAGE_BASIC = "JOB_MESSAGE_BASIC"; private static final String JOB_MESSAGE_DETAILED = "JOB_MESSAGE_DETAILED"; private static final String JOB_MESSAGE_DEBUG = "JOB_MESSAGE_DEBUG"; private final DataflowClient dataflowClient; /** * An interface that can be used for defining callbacks to receive a list * of JobMessages containing monitoring information. */ public interface JobMessagesHandler { /** Process the rows. */ void process(List<JobMessage> messages); } /** A handler that logs monitoring messages. */ public static class LoggingHandler implements JobMessagesHandler { private static final Logger LOG = LoggerFactory.getLogger(LoggingHandler.class); @Override public void process(List<JobMessage> messages) { for (JobMessage message : messages) { if (Strings.isNullOrEmpty(message.getMessageText())) { continue; } @Nullable Instant time = TimeUtil.fromCloudTime(message.getTime()); String logMessage = (time == null ? "UNKNOWN TIMESTAMP: " : time + ": ") + message.getMessageText(); switch (message.getMessageImportance()) { case JOB_MESSAGE_ERROR: LOG.error(logMessage); break; case JOB_MESSAGE_WARNING: LOG.warn(logMessage); break; case JOB_MESSAGE_BASIC: case JOB_MESSAGE_DETAILED: LOG.info(logMessage); break; case JOB_MESSAGE_DEBUG: LOG.debug(logMessage); break; default: LOG.trace(logMessage); } } } } /** Construct a helper for monitoring. */ public MonitoringUtil(DataflowClient dataflowClient) { this.dataflowClient = dataflowClient; } /** * Comparator for sorting rows in increasing order based on timestamp. */ public static class TimeStampComparator implements Comparator<JobMessage> { @Override public int compare(JobMessage o1, JobMessage o2) { @Nullable Instant t1 = fromCloudTime(o1.getTime()); if (t1 == null) { return -1; } @Nullable Instant t2 = fromCloudTime(o2.getTime()); if (t2 == null) { return 1; } return t1.compareTo(t2); } } /** * Return job messages sorted in ascending order by timestamp. * @param jobId The id of the job to get the messages for. * @param startTimestampMs Return only those messages with a * timestamp greater than this value. * @return collection of messages */ public List<JobMessage> getJobMessages( String jobId, long startTimestampMs) throws IOException { // TODO: Allow filtering messages by importance Instant startTimestamp = new Instant(startTimestampMs); ArrayList<JobMessage> allMessages = new ArrayList<>(); String pageToken = null; while (true) { ListJobMessagesResponse response = dataflowClient.listJobMessages(jobId, pageToken); if (response == null || response.getJobMessages() == null) { return allMessages; } for (JobMessage m : response.getJobMessages()) { @Nullable Instant timestamp = fromCloudTime(m.getTime()); if (timestamp == null) { continue; } if (timestamp.isAfter(startTimestamp)) { allMessages.add(m); } } if (response.getNextPageToken() == null) { break; } else { pageToken = response.getNextPageToken(); } } Collections.sort(allMessages, new TimeStampComparator()); return allMessages; } /** * @deprecated this method defaults the region to "us-central1". Prefer using the overload with * an explicit regionId parameter. */ @Deprecated public static String getJobMonitoringPageURL(String projectName, String jobId) { return getJobMonitoringPageURL(projectName, "us-central1", jobId); } public static String getJobMonitoringPageURL(String projectName, String regionId, String jobId) { try { // Project name is allowed in place of the project id: the user will be redirected to a URL // that has the project name replaced with project id. return String.format( "https://console.cloud.google.com/dataflow/jobsDetail/locations/%s/jobs/%s?project=%s", URLEncoder.encode(regionId, "UTF-8"), URLEncoder.encode(jobId, "UTF-8"), URLEncoder.encode(projectName, "UTF-8")); } catch (UnsupportedEncodingException e) { // Should never happen. throw new AssertionError("UTF-8 encoding is not supported by the environment", e); } } public static String getGcloudCancelCommand(DataflowPipelineOptions options, String jobId) { // If using a different Dataflow API than default, prefix command with an API override. String dataflowApiOverridePrefix = ""; String apiUrl = options.getDataflowClient().getBaseUrl(); if (!apiUrl.equals(Dataflow.DEFAULT_BASE_URL)) { dataflowApiOverridePrefix = String.format("%s=%s ", ENDPOINT_OVERRIDE_ENV_VAR, apiUrl); } // Assemble cancel command from optional prefix and project/job parameters. return String.format("%s%s jobs --project=%s cancel %s", dataflowApiOverridePrefix, GCLOUD_DATAFLOW_PREFIX, options.getProject(), jobId); } public static State toState(String stateName) { return MoreObjects.firstNonNull(DATAFLOW_STATE_TO_JOB_STATE.get(stateName), State.UNKNOWN); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.andes.server.queue; import org.apache.commons.lang.NotImplementedException; import org.wso2.andes.AMQException; import org.wso2.andes.framing.ContentHeaderBody; import org.wso2.andes.framing.AMQShortString; import org.wso2.andes.framing.BasicContentHeaderProperties; import org.wso2.andes.framing.ContentBody; import org.wso2.andes.framing.abstraction.MessagePublishInfo; import org.wso2.andes.framing.abstraction.ContentChunk; import org.wso2.andes.server.AMQChannel; import org.wso2.andes.server.util.InternalBrokerBaseCase; import org.wso2.andes.server.message.AMQMessage; import org.wso2.andes.server.message.MessageMetaData; import org.wso2.andes.server.subscription.Subscription; import org.wso2.andes.server.subscription.SubscriptionFactory; import org.wso2.andes.server.subscription.SubscriptionFactoryImpl; import org.wso2.andes.server.protocol.InternalTestProtocolSession; import org.wso2.andes.server.registry.ApplicationRegistry; import org.wso2.andes.server.store.TestableMemoryMessageStore; import org.wso2.org.apache.mina.common.ByteBuffer; import javax.management.JMException; import java.util.ArrayList; /** * Test class to test AMQQueueMBean attribtues and operations */ public class AMQQueueMBeanTest extends InternalBrokerBaseCase { private static long MESSAGE_SIZE = 1000; private AMQQueueMBean _queueMBean; private static final SubscriptionFactoryImpl SUBSCRIPTION_FACTORY = SubscriptionFactoryImpl.INSTANCE; public void testMessageCountTransient() throws Exception { int messageCount = 10; sendMessages(messageCount, false); assertTrue(_queueMBean.getMessageCount() == messageCount); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); long queueDepth = (messageCount * MESSAGE_SIZE); assertTrue(_queueMBean.getQueueDepth() == queueDepth); _queueMBean.deleteMessageFromTop(); assertTrue(_queueMBean.getMessageCount() == (messageCount - 1)); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); _queueMBean.clearQueue(); assertEquals(0,(int)_queueMBean.getMessageCount()); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); //Ensure that the data has been removed from the Store verifyBrokerState(); } public void testMessageCountPersistent() throws Exception { int messageCount = 10; sendMessages(messageCount, true); assertEquals("", messageCount, _queueMBean.getMessageCount().intValue()); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); long queueDepth = (messageCount * MESSAGE_SIZE); assertTrue(_queueMBean.getQueueDepth() == queueDepth); _queueMBean.deleteMessageFromTop(); assertTrue(_queueMBean.getMessageCount() == (messageCount - 1)); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); _queueMBean.clearQueue(); assertTrue(_queueMBean.getMessageCount() == 0); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); //Ensure that the data has been removed from the Store verifyBrokerState(); } public void testDeleteMessages() throws Exception { int messageCount = 10; sendMessages(messageCount, true); assertEquals("", messageCount, _queueMBean.getMessageCount().intValue()); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); long queueDepth = (messageCount * MESSAGE_SIZE); assertTrue(_queueMBean.getQueueDepth() == queueDepth); //delete first message _queueMBean.deleteMessages(1L,1L); assertTrue(_queueMBean.getMessageCount() == (messageCount - 1)); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); try { _queueMBean.viewMessageContent(1L); fail("Message should no longer be on the queue"); } catch(Exception e) { } //delete last message, leaving 2nd to 9th _queueMBean.deleteMessages(10L,10L); assertTrue(_queueMBean.getMessageCount() == (messageCount - 2)); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); try { _queueMBean.viewMessageContent(10L); fail("Message should no longer be on the queue"); } catch(Exception e) { } //delete remaining messages, leaving none _queueMBean.deleteMessages(2L,9L); assertTrue(_queueMBean.getMessageCount() == (0)); assertTrue(_queueMBean.getReceivedMessageCount() == messageCount); //Ensure that the data has been removed from the Store verifyBrokerState(); } // todo: collect to a general testing class -duplicated from Systest/MessageReturntest private void verifyBrokerState() { TestableMemoryMessageStore store = (TestableMemoryMessageStore) getVirtualHost().getMessageStore(); // Unlike MessageReturnTest there is no need for a delay as there this thread does the clean up. assertEquals("Store should have no messages:" + store.getMessageCount(), 0, store.getMessageCount()); } public void testConsumerCount() throws AMQException { assertTrue(getQueue().getActiveConsumerCount() == 0); assertTrue(_queueMBean.getActiveConsumerCount() == 0); InternalTestProtocolSession protocolSession = new InternalTestProtocolSession(getVirtualHost()); AMQChannel channel = new AMQChannel(protocolSession, 1, getMessageStore()); protocolSession.addChannel(channel); Subscription subscription = SUBSCRIPTION_FACTORY.createSubscription(channel.getChannelId(), protocolSession, new AMQShortString("test"), false, null, false, channel.getCreditManager()); getQueue().registerSubscription(subscription, false); assertEquals(1,(int)_queueMBean.getActiveConsumerCount()); SubscriptionFactory subscriptionFactory = SUBSCRIPTION_FACTORY; Subscription s1 = subscriptionFactory.createSubscription(channel.getChannelId(), protocolSession, new AMQShortString("S1"), false, null, true, channel.getCreditManager()); Subscription s2 = subscriptionFactory.createSubscription(channel.getChannelId(), protocolSession, new AMQShortString("S2"), false, null, true, channel.getCreditManager()); getQueue().registerSubscription(s1,false); getQueue().registerSubscription(s2,false); assertTrue(_queueMBean.getActiveConsumerCount() == 3); assertTrue(_queueMBean.getConsumerCount() == 3); s1.close(); assertEquals(2, (int) _queueMBean.getActiveConsumerCount()); assertTrue(_queueMBean.getConsumerCount() == 3); } public void testGeneralProperties() throws Exception { long maxQueueDepth = 1000; // in bytes _queueMBean.setMaximumMessageCount(50000l); _queueMBean.setMaximumMessageSize(2000l); _queueMBean.setMaximumQueueDepth(maxQueueDepth); assertEquals("Max MessageCount not set",50000,_queueMBean.getMaximumMessageCount().longValue()); assertEquals("Max MessageSize not set",2000, _queueMBean.getMaximumMessageSize().longValue()); assertEquals("Max QueueDepth not set",maxQueueDepth, _queueMBean.getMaximumQueueDepth().longValue()); assertEquals("Queue Name does not match", new AMQShortString(getName()), _queueMBean.getName()); assertFalse("AutoDelete should not be set.",_queueMBean.isAutoDelete()); assertFalse("Queue should not be durable.",_queueMBean.isDurable()); //set+get exclusivity using the mbean, and also verify it is actually updated in the queue _queueMBean.setExclusive(true); assertTrue("Exclusive property should be true.",_queueMBean.isExclusive()); assertTrue("Exclusive property should be true.", getQueue().isExclusive()); _queueMBean.setExclusive(false); assertFalse("Exclusive property should be false.",_queueMBean.isExclusive()); assertFalse("Exclusive property should be false.", getQueue().isExclusive()); } public void testExceptions() throws Exception { try { _queueMBean.viewMessages(0L, 3L); fail(); } catch (JMException ex) { } try { _queueMBean.viewMessages(2L, 1L); fail(); } catch (JMException ex) { } try { _queueMBean.viewMessages(-1L, 1L); fail(); } catch (JMException ex) { } try { long end = Integer.MAX_VALUE; end+=2; _queueMBean.viewMessages(1L, end); fail("Expected Exception due to oversized(> 2^31) message range"); } catch (JMException ex) { } IncomingMessage msg = message(false, false); getQueue().clearQueue(); ArrayList<AMQQueue> qs = new ArrayList<AMQQueue>(); qs.add(getQueue()); msg.enqueue(qs); MessageMetaData mmd = msg.headersReceived(); msg.setStoredMessage(getMessageStore().addMessage(mmd)); long id = msg.getMessageNumber(); msg.addContentBodyFrame(new ContentChunk() { ByteBuffer _data = ByteBuffer.allocate((int)MESSAGE_SIZE); { _data.limit((int)MESSAGE_SIZE); } public int getSize() { return (int) MESSAGE_SIZE; } public ByteBuffer getData() { return _data; } public void reduceToFit() { } }); AMQMessage m = new AMQMessage(msg.getStoredMessage()); for(BaseQueue q : msg.getDestinationQueues()) { q.enqueue(m); } // _queue.process(_storeContext, new QueueEntry(_queue, msg), false); _queueMBean.viewMessageContent(id); try { _queueMBean.viewMessageContent(id + 1); fail(); } catch (JMException ex) { } } public void testFlowControlProperties() throws Exception { assertTrue(_queueMBean.getCapacity() == 0); assertTrue(_queueMBean.getFlowResumeCapacity() == 0); assertFalse(_queueMBean.isFlowOverfull()); //capacity currently 0, try setting FlowResumeCapacity above this try { _queueMBean.setFlowResumeCapacity(1L); fail("Should have failed to allow setting FlowResumeCapacity above Capacity"); } catch (IllegalArgumentException ex) { //expected exception assertTrue(_queueMBean.getFlowResumeCapacity() == 0); } //add a message to the queue sendMessages(1, true); //(FlowResume)Capacity currently 0, set both to 2 _queueMBean.setCapacity(2L); assertTrue(_queueMBean.getCapacity() == 2L); _queueMBean.setFlowResumeCapacity(2L); assertTrue(_queueMBean.getFlowResumeCapacity() == 2L); //Try setting Capacity below FlowResumeCapacity try { _queueMBean.setCapacity(1L); fail("Should have failed to allow setting Capacity below FlowResumeCapacity"); } catch (IllegalArgumentException ex) { //expected exception assertTrue(_queueMBean.getCapacity() == 2); } //create a channel and use it to exercise the capacity check mechanism AMQChannel channel = new AMQChannel(getSession(), 1, getMessageStore()); getQueue().checkCapacity(channel); assertTrue(_queueMBean.isFlowOverfull()); assertTrue(channel.getBlocking()); //set FlowResumeCapacity to MESSAGE_SIZE and check queue is now underfull and channel unblocked _queueMBean.setCapacity(MESSAGE_SIZE);//must increase capacity too _queueMBean.setFlowResumeCapacity(MESSAGE_SIZE); assertFalse(_queueMBean.isFlowOverfull()); assertFalse(channel.getBlocking()); } private IncomingMessage message(final boolean immediate, boolean persistent) throws AMQException { MessagePublishInfo publish = new MessagePublishInfo() { public AMQShortString getExchange() { return null; } public void setExchange(AMQShortString exchange) { throw new NotImplementedException(); } public void setRoutingKey(AMQShortString routingKey) { throw new NotImplementedException(); } public boolean isImmediate() { return immediate; } public boolean isMandatory() { return false; } public AMQShortString getRoutingKey() { return null; } }; ContentHeaderBody contentHeaderBody = new ContentHeaderBody(); contentHeaderBody.bodySize = MESSAGE_SIZE; // in bytes contentHeaderBody.setProperties(new BasicContentHeaderProperties()); ((BasicContentHeaderProperties) contentHeaderBody.getProperties()).setDeliveryMode((byte) (persistent ? 2 : 1)); IncomingMessage msg = new IncomingMessage(publish); msg.setContentHeaderBody(contentHeaderBody); return msg; } @Override public void setUp() throws Exception { super.setUp(); _queueMBean = new AMQQueueMBean(getQueue()); } public void tearDown() { ApplicationRegistry.remove(); } private void sendMessages(int messageCount, boolean persistent) throws AMQException { for (int i = 0; i < messageCount; i++) { IncomingMessage currentMessage = message(false, persistent); ArrayList<AMQQueue> qs = new ArrayList<AMQQueue>(); qs.add(getQueue()); currentMessage.enqueue(qs); // route header MessageMetaData mmd = currentMessage.headersReceived(); currentMessage.setStoredMessage(getMessageStore().addMessage(mmd)); // Add the body so we have somthing to test later currentMessage.addContentBodyFrame( getSession().getMethodRegistry() .getProtocolVersionMethodConverter() .convertToContentChunk( new ContentBody(ByteBuffer.allocate((int) MESSAGE_SIZE), MESSAGE_SIZE))); AMQMessage m = new AMQMessage(currentMessage.getStoredMessage()); for(BaseQueue q : currentMessage.getDestinationQueues()) { q.enqueue(m); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; import org.apache.camel.BeanScope; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Expression; import org.apache.camel.ExpressionFactory; import org.apache.camel.Message; import org.apache.camel.Predicate; import org.apache.camel.support.ExpressionAdapter; import org.apache.camel.support.ExpressionToPredicateAdapter; import org.apache.camel.support.builder.Namespaces; /** * Represents an expression clause within the DSL which when the expression is complete the clause continues to another * part of the DSL */ public class ExpressionClause<T> implements Expression, Predicate { private ExpressionClauseSupport<T> delegate; private volatile Expression expr; private volatile Predicate pred; public ExpressionClause(T result) { this.delegate = new ExpressionClauseSupport<>(result); } // Helper expressions // ------------------------------------------------------------------------- /** * Specify an {@link Expression} instance */ public T expression(Expression expression) { return delegate.expression(expression); } /** * Specify the constant expression value. <b>Important:</b> this is a fixed constant value that is only set once * during starting up the route, do not use this if you want dynamic values during routing. */ public T constant(Object value) { return delegate.constant(value); } /** * An expression of the exchange */ public T exchange() { return delegate.exchange(); } /** * A functional expression of the exchange */ public T exchange(final Function<Exchange, Object> function) { return delegate.expression(new ExpressionAdapter() { public Object evaluate(Exchange exchange) { return function.apply(exchange); } }); } /** * An expression of an inbound message */ public T message() { return inMessage(); } /** * A functional expression of an inbound message */ public T message(final Function<Message, Object> function) { return inMessage(function); } /** * An expression of an inbound message */ public T inMessage() { return delegate.inMessage(); } /** * A functional expression of an inbound message */ public T inMessage(final Function<Message, Object> function) { return delegate.expression(new ExpressionAdapter() { public Object evaluate(Exchange exchange) { return function.apply(exchange.getIn()); } }); } /** * An expression of an inbound message body */ public T body() { return delegate.body(); } /** * A functional expression of an inbound message body */ public T body(final Function<Object, Object> function) { return delegate.expression(new ExpressionAdapter() { public Object evaluate(Exchange exchange) { return function.apply(exchange.getIn().getBody()); } }); } /** * A functional expression of an inbound message body and headers */ public T body(final BiFunction<Object, Map<String, Object>, Object> function) { return delegate.expression(new ExpressionAdapter() { public Object evaluate(Exchange exchange) { return function.apply(exchange.getIn().getBody(), exchange.getIn().getHeaders()); } }); } /** * An expression of an inbound message body converted to the expected type */ public T body(Class<?> expectedType) { return delegate.body(expectedType); } /** * A functional expression of an inbound message body converted to the expected type */ public <B> T body(Class<B> expectedType, final Function<B, Object> function) { return delegate.expression(new ExpressionAdapter() { public Object evaluate(Exchange exchange) { return function.apply(exchange.getIn().getBody(expectedType)); } }); } /** * A functional expression of an inbound message body converted to the expected type and headers */ public <B> T body(Class<B> expectedType, final BiFunction<B, Map<String, Object>, Object> function) { return delegate.expression(new ExpressionAdapter() { public Object evaluate(Exchange exchange) { return function.apply(exchange.getIn().getBody(expectedType), exchange.getIn().getHeaders()); } }); } /** * An expression of an inbound message header of the given name */ public T header(String name) { return delegate.header(name); } /** * An expression of the inbound headers */ public T headers() { return delegate.headers(); } /** * An expression of an exchange property of the given name */ public T exchangeProperty(String name) { return delegate.exchangeProperty(name); } /** * An expression of the exchange properties */ public T exchangeProperties() { return delegate.exchangeProperties(); } // Languages // ------------------------------------------------------------------------- /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param bean the name of the bean looked up the registry * @return the builder to continue processing the DSL */ public T method(String bean) { return delegate.method(bean); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param instance the instance of the bean * @return the builder to continue processing the DSL */ public T method(Object instance) { return delegate.method(instance); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param beanType the Class of the bean which we want to invoke * @return the builder to continue processing the DSL */ public T method(Class<?> beanType) { return delegate.method(beanType); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param bean the name of the bean looked up the registry * @param method the name of the method to invoke on the bean * @return the builder to continue processing the DSL */ public T method(String bean, String method) { return delegate.method(bean, method); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param bean the name of the bean looked up the registry * @param scope the scope of the bean * @return the builder to continue processing the DSL */ public T method(String bean, BeanScope scope) { return delegate.method(bean, scope); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param bean the name of the bean looked up the registry * @param method the name of the method to invoke on the bean * @param scope the scope of the bean * @return the builder to continue processing the DSL */ public T method(String bean, String method, BeanScope scope) { return delegate.method(bean, method, scope); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param instance the instance of the bean * @param method the name of the method to invoke on the bean * @return the builder to continue processing the DSL */ public T method(Object instance, String method) { return delegate.method(instance, method); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param beanType the Class of the bean which we want to invoke * @param method the name of the method to invoke on the bean * @return the builder to continue processing the DSL */ public T method(Class<?> beanType, String method) { return delegate.method(beanType, method); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param beanType the Class of the bean which we want to invoke * @param scope the scope of the bean * @return the builder to continue processing the DSL */ public T method(Class<?> beanType, BeanScope scope) { return delegate.method(beanType, scope); } /** * Evaluates an expression using the <a href="http://camel.apache.org/bean-language.html">bean language</a> which * basically means the bean is invoked to determine the expression value. * * @param beanType the Class of the bean which we want to invoke * @param method the name of the method to invoke on the bean * @param scope the scope of the bean * @return the builder to continue processing the DSL */ public T method(Class<?> beanType, String method, BeanScope scope) { return delegate.method(beanType, method, scope); } /** * Evaluates a <a href="http://camel.apache.org/groovy.html">Groovy expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T groovy(String text) { return delegate.groovy(text); } /** * Returns a JOOR expression value builder */ public T joor(String value) { return delegate.joor(value); } /** * Returns a JOOR expression value builder */ public T joor(String value, Class<?> resultType) { return delegate.joor(value, resultType); } /** * Evaluates a <a href="http://camel.apache.org/datasonnet.html">Datasonnet expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T datasonnet(String text) { return delegate.datasonnet(text); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T jsonpath(String text) { return delegate.jsonpath(text); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> * * @param text the expression to be evaluated * @param suppressExceptions whether to suppress exceptions such as PathNotFoundException * @return the builder to continue processing the DSL */ public T jsonpath(String text, boolean suppressExceptions) { return delegate.jsonpath(text, suppressExceptions); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @return the builder to continue processing the DSL */ public T jsonpath(String text, Class<?> resultType) { return delegate.jsonpath(text, resultType); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> * * @param text the expression to be evaluated * @param suppressExceptions whether to suppress exceptions such as PathNotFoundException * @param resultType the return type expected by the expression * @return the builder to continue processing the DSL */ public T jsonpath(String text, boolean suppressExceptions, Class<?> resultType) { return delegate.jsonpath(text, suppressExceptions, resultType); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> * * @param text the expression to be evaluated * @param suppressExceptions whether to suppress exceptions such as PathNotFoundException * @param resultType the return type expected by the expression * @param headerName the name of the header to apply the expression to * @return the builder to continue processing the DSL */ public T jsonpath(String text, boolean suppressExceptions, Class<?> resultType, String headerName) { return delegate.jsonpath(text, suppressExceptions, true, resultType, headerName); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> with writeAsString enabled. * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T jsonpathWriteAsString(String text) { return delegate.jsonpathWriteAsString(text); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> with writeAsString enabled. * * @param text the expression to be evaluated * @param suppressExceptions whether to suppress exceptions such as PathNotFoundException * @return the builder to continue processing the DSL */ public T jsonpathWriteAsString(String text, boolean suppressExceptions) { return delegate.jsonpathWriteAsString(text, suppressExceptions); } /** * Evaluates a <a href="http://camel.apache.org/jsonpath.html">Json Path expression</a> with writeAsString enabled. * * @param text the expression to be evaluated * @param suppressExceptions whether to suppress exceptions such as PathNotFoundException * @param headerName the name of the header to apply the expression to * @return the builder to continue processing the DSL */ public T jsonpathWriteAsString(String text, boolean suppressExceptions, String headerName) { return delegate.jsonpathWriteAsString(text, suppressExceptions, true, headerName); } /** * Evaluates an <a href="http://camel.apache.org/ognl.html">OGNL expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T ognl(String text) { return delegate.ognl(text); } /** * Evaluates a <a href="http://camel.apache.org/mvel.html">MVEL expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T mvel(String text) { return delegate.mvel(text); } /** * Evaluates a <a href="http://camel.apache.org/ref-language.html">Ref expression</a> * * @param ref refers to the expression to be evaluated * @return the builder to continue processing the DSL */ public T ref(String ref) { return delegate.ref(ref); } /** * Evaluates a <a href="http://camel.apache.org/spel.html">SpEL expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T spel(String text) { return delegate.spel(text); } /** * Returns a compiled simple expression value builder */ public T csimple(String value) { return delegate.csimple(value); } /** * Returns a compile simple expression value builder */ public T csimple(String value, Class<?> resultType) { return delegate.csimple(value, resultType); } /** * Evaluates a <a href="http://camel.apache.org/simple.html">Simple expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T simple(String text) { return delegate.simple(text); } /** * Evaluates a <a href="http://camel.apache.org/simple.html">Simple expression</a> * * @param text the expression to be evaluated * @param resultType the result type * @return the builder to continue processing the DSL */ public T simple(String text, Class<?> resultType) { return delegate.simple(text, resultType); } /** * Evaluates a token expression on the message body * * @param token the token * @return the builder to continue processing the DSL */ public T tokenize(String token) { return delegate.tokenize(token); } /** * Evaluates a token expression on the message body * * @param token the token * @param regex whether the token is a regular expression or not * @return the builder to continue processing the DSL */ public T tokenize(String token, boolean regex) { return tokenize(token, regex, false); } /** * Evaluates a token expression on the message body * * @param token the token * @param regex whether the token is a regular expression or not * @param skipFirst whether to skip the first element * @return the builder to continue processing the DSL */ public T tokenize(String token, boolean regex, boolean skipFirst) { return delegate.tokenize(token, null, regex, skipFirst); } /** * Evaluates a token expression on the message body * * @param token the token * @param regex whether the token is a regular expression or not * @param group to group by the given number * @return the builder to continue processing the DSL */ public T tokenize(String token, boolean regex, int group) { return tokenize(token, regex, group, false); } /** * Evaluates a token expression on the message body * * @param token the token * @param regex whether the token is a regular expression or not * @param group to group by the given number * @return the builder to continue processing the DSL */ public T tokenize(String token, boolean regex, String group) { return tokenize(token, regex, group, false); } /** * Evaluates a token expression on the message body * * @param token the token * @param regex whether the token is a regular expression or not * @param group to group by the given number * @param skipFirst whether to skip the first element * @return the builder to continue processing the DSL */ public T tokenize(String token, boolean regex, int group, boolean skipFirst) { return delegate.tokenize(token, null, regex, group, skipFirst); } /** * Evaluates a token expression on the message body * * @param token the token * @param regex whether the token is a regular expression or not * @param group to group by the given number * @param skipFirst whether to skip the first element * @return the builder to continue processing the DSL */ public T tokenize(String token, boolean regex, String group, boolean skipFirst) { return delegate.tokenize(token, null, regex, group, skipFirst); } /** * Evaluates a token expression on the message body * * @param token the token * @param regex whether the token is a regular expression or not * @param group to group by the given number * @param skipFirst whether to skip the first element * @return the builder to continue processing the DSL */ public T tokenize(String token, boolean regex, int group, String groupDelimiter, boolean skipFirst) { return delegate.tokenize(token, null, regex, "" + group, groupDelimiter, skipFirst); } /** * Evaluates a token expression on the message body * * @param token the token * @param group to group by the given number * @return the builder to continue processing the DSL */ public T tokenize(String token, int group) { return delegate.tokenize(token, group); } /** * Evaluates a token expression on the message body * * @param token the token * @param group to group by the given number * @param skipFirst whether to skip the first element * @return the builder to continue processing the DSL */ public T tokenize(String token, int group, boolean skipFirst) { return delegate.tokenize(token, group, skipFirst); } /** * Evaluates a token expression on the given header * * @param token the token * @param headerName name of header to tokenize * @return the builder to continue processing the DSL */ public T tokenize(String token, String headerName) { return delegate.tokenize(token, headerName); } /** * Evaluates a token expression on the given header * * @param token the token * @param headerName name of header to tokenize * @param regex whether the token is a regular expression or not * @return the builder to continue processing the DSL */ public T tokenize(String token, String headerName, boolean regex) { return delegate.tokenize(token, headerName, regex); } /** * Evaluates a token pair expression on the message body. * <p/> * Tokens is not included. * * @param startToken the start token * @param endToken the end token * @return the builder to continue processing the DSL */ public T tokenizePair(String startToken, String endToken) { return tokenizePair(startToken, endToken, false); } /** * Evaluates a token pair expression on the message body * * @param startToken the start token * @param endToken the end token * @param includeTokens whether to include tokens * @return the builder to continue processing the DSL */ public T tokenizePair(String startToken, String endToken, boolean includeTokens) { return delegate.tokenizePair(startToken, endToken, includeTokens); } /** * Evaluates a XML token expression on the message body with XML content * * @param tagName the tag name of the child nodes to tokenize * @return the builder to continue processing the DSL */ public T tokenizeXML(String tagName) { return tokenizeXML(tagName, null); } /** * Evaluates a XML token expression on the message body with XML content * * @param tagName the tag name of the child nodes to tokenize * @param group to group by the given number * @return the builder to continue processing the DSL */ public T tokenizeXML(String tagName, int group) { return tokenizeXML(tagName, null, group); } /** * Evaluates a token pair expression on the message body with XML content * * @param tagName the tag name of the child nodes to tokenize * @param inheritNamespaceTagName parent or root tag name that contains namespace(s) to inherit * @return the builder to continue processing the DSL */ public T tokenizeXML(String tagName, String inheritNamespaceTagName) { return tokenizeXML(tagName, inheritNamespaceTagName, 0); } /** * Evaluates a token pair expression on the message body with XML content * * @param tagName the tag name of the child nodes to tokenize * @param inheritNamespaceTagName parent or root tag name that contains namespace(s) to inherit * @param group to group by the given number * @return the builder to continue processing the DSL */ public T tokenizeXML(String tagName, String inheritNamespaceTagName, int group) { return delegate.tokenizeXMLPair(tagName, inheritNamespaceTagName, group); } public T xtokenize(String path, Namespaces namespaces) { return xtokenize(path, 'i', namespaces); } public T xtokenize(String path, char mode, Namespaces namespaces) { return xtokenize(path, mode, namespaces, 0); } public T xtokenize(String path, char mode, Namespaces namespaces, int group) { return delegate.xtokenize(path, mode, namespaces, group); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T xpath(String text) { return delegate.xpath(text); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> on the supplied header name's * contents * * @param text the expression to be evaluated * @param headerName the name of the header to apply the expression to * @return the builder to continue processing the DSL */ public T xpath(String text, String headerName) { return delegate.xpath(text, headerName); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @return the builder to continue processing the DSL */ public T xpath(String text, Class<?> resultType) { return delegate.xpath(text, resultType); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type on * the supplied header name's contents * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param headerName the name of the header to apply the expression to * @return the builder to continue processing the DSL */ public T xpath(String text, Class<?> resultType, String headerName) { return delegate.xpath(text, resultType, headerName); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type and * set of namespace prefixes and URIs * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xpath(String text, Class<?> resultType, Namespaces namespaces) { return delegate.xpath(text, resultType, namespaces); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type and * set of namespace prefixes and URIs on the supplied header name's contents * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param headerName the name of the header to apply the expression to * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xpath(String text, Class<?> resultType, Namespaces namespaces, String headerName) { return delegate.xpath(text, resultType, namespaces, headerName); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified result type and * set of namespace prefixes and URIs * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xpath(String text, Class<?> resultType, Map<String, String> namespaces) { return delegate.xpath(text, resultType, namespaces); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified set of * namespace prefixes and URIs * * @param text the expression to be evaluated * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xpath(String text, Namespaces namespaces) { return delegate.xpath(text, namespaces); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> with the specified set of * namespace prefixes and URIs * * @param text the expression to be evaluated * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xpath(String text, Map<String, String> namespaces) { return delegate.xpath(text, namespaces); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> * * @param text the expression to be evaluated * @return the builder to continue processing the DSL */ public T xquery(String text) { return delegate.xquery(text); } /** * Evaluates an <a href="http://camel.apache.org/xpath.html">XPath expression</a> on the supplied header name's * contents * * @param text the expression to be evaluated * @param headerName the name of the header to apply the expression to * @return the builder to continue processing the DSL */ public T xquery(String text, String headerName) { return delegate.xquery(text, headerName); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @return the builder to continue processing the DSL */ public T xquery(String text, Class<?> resultType) { return delegate.xquery(text, resultType); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param headerName the name of the header to apply the expression to * @return the builder to continue processing the DSL */ public T xquery(String text, Class<?> resultType, String headerName) { return delegate.xquery(text, resultType, headerName); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type * and set of namespace prefixes and URIs * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xquery(String text, Class<?> resultType, Namespaces namespaces) { return delegate.xquery(text, resultType, namespaces); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param headerName the name of the header to apply the expression to * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xquery(String text, Class<?> resultType, Namespaces namespaces, String headerName) { return delegate.xquery(text, resultType, namespaces, headerName); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified result type * and set of namespace prefixes and URIs * * @param text the expression to be evaluated * @param resultType the return type expected by the expression * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xquery(String text, Class<?> resultType, Map<String, String> namespaces) { return delegate.xquery(text, resultType, namespaces); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified set of * namespace prefixes and URIs * * @param text the expression to be evaluated * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xquery(String text, Namespaces namespaces) { return delegate.xquery(text, namespaces); } /** * Evaluates an <a href="http://camel.apache.org/xquery.html">XQuery expression</a> with the specified set of * namespace prefixes and URIs * * @param text the expression to be evaluated * @param namespaces the namespace prefix and URIs to use * @return the builder to continue processing the DSL */ public T xquery(String text, Map<String, String> namespaces) { return delegate.xquery(text, namespaces); } /** * Evaluates a given language name with the expression text * * @param language the name of the language * @param expression the expression in the given language * @return the builder to continue processing the DSL */ public T language(String language, String expression) { return delegate.language(language, expression); } // Properties // ------------------------------------------------------------------------- public Expression getExpressionValue() { return delegate.getExpressionValue(); } public ExpressionFactory getExpressionType() { return delegate.getExpressionType(); } @Override public void init(CamelContext context) { if (expr == null) { synchronized (this) { if (expr == null) { Expression newExpression = getExpressionValue(); if (newExpression == null) { newExpression = delegate.getExpressionType().createExpression(context); } newExpression.init(context); expr = newExpression; } } } } @Override public void initPredicate(CamelContext context) { if (pred == null) { synchronized (this) { if (pred == null) { Expression newExpression = getExpressionValue(); if (newExpression == null) { pred = delegate.getPredicateType().createPredicate(context); } else { pred = ExpressionToPredicateAdapter.toPredicate(newExpression); } pred.initPredicate(context); } } } } @Override public <T> T evaluate(Exchange exchange, Class<T> type) { init(exchange.getContext()); return expr.evaluate(exchange, type); } @Override public boolean matches(Exchange exchange) { initPredicate(exchange.getContext()); return pred.matches(exchange); } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.materials.dependency; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.materials.AbstractMaterialConfig; import com.thoughtworks.go.config.materials.Filter; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.DependencyFilter; import static com.thoughtworks.go.util.ExceptionUtils.bombIfNull; @ConfigTag(value = "pipeline", label = "Pipeline") public class DependencyMaterialConfig extends AbstractMaterialConfig implements ParamsAttributeAware { public static final String PIPELINE_NAME = "pipelineName"; public static final String STAGE_NAME = "stageName"; public static final String PIPELINE_STAGE_NAME = "pipelineStageName"; public static final String IGNORE_FOR_SCHEDULING = "ignoreForScheduling"; public static final String TYPE = "DependencyMaterial"; private static final Pattern PIPELINE_STAGE_COMBINATION_PATTERN = Pattern.compile("^(.+) (\\[.+\\])$"); public static final String ORIGIN = "origin"; @ConfigAttribute(value = "pipelineName") private com.thoughtworks.go.config.CaseInsensitiveString pipelineName = new CaseInsensitiveString("Unknown"); @ConfigAttribute(value = "stageName") private CaseInsensitiveString stageName = new CaseInsensitiveString("Unknown"); @ConfigAttribute(value = "ignoreForScheduling") private boolean ignoreForScheduling = false; private String pipelineStageName; public DependencyMaterialConfig() { super(TYPE); } public DependencyMaterialConfig(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName) { this(null, pipelineName, stageName, false); } public DependencyMaterialConfig(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName, boolean ignoreForScheduling) { this(null, pipelineName, stageName, ignoreForScheduling); } public DependencyMaterialConfig(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName, final String serverAlias) { this(null, pipelineName, stageName, false); } public DependencyMaterialConfig(final CaseInsensitiveString name, final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName) { this(name, pipelineName, stageName, false); } public DependencyMaterialConfig(final CaseInsensitiveString name, final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName, final boolean ignoreForScheduling) { super(TYPE, name, new ConfigErrors()); bombIfNull(pipelineName, "null pipelineName"); bombIfNull(stageName, "null stageName"); this.pipelineName = pipelineName; this.stageName = stageName; this.ignoreForScheduling = ignoreForScheduling; } @Override public CaseInsensitiveString getName() { return super.getName() == null ? pipelineName : super.getName(); } public String getUserName() { return "cruise"; } @Override public String getLongDescription() { return getDescription(); } @Override public Filter filter() { return new DependencyFilter(); } @Override public boolean isInvertFilter() { return false; } @Override public boolean matches(String name, String regex) { return false; } @Override public String getDescription() { return CaseInsensitiveString.str(pipelineName); } @Override public String getTypeForDisplay() { return "Pipeline"; } @Override public boolean isAutoUpdate() { return true; } @Override public void setAutoUpdate(boolean autoUpdate) { } @Override protected void appendCriteria(Map<String, Object> parameters) { parameters.put("pipelineName", CaseInsensitiveString.str(pipelineName)); parameters.put("stageName", CaseInsensitiveString.str(stageName)); } @Override protected void appendAttributes(Map<String, Object> parameters) { appendCriteria(parameters); } public CaseInsensitiveString getPipelineName() { return pipelineName; } public void setPipelineName(CaseInsensitiveString pipelineName) { this.pipelineName = pipelineName; } public CaseInsensitiveString getStageName() { return stageName; } public void setStageName(CaseInsensitiveString stageName) { this.stageName = stageName; } public boolean ignoreForScheduling() { return ignoreForScheduling; } public void ignoreForScheduling(boolean ignoreForScheduling) { this.ignoreForScheduling = ignoreForScheduling; } @Override public String getFolder() { return null; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DependencyMaterialConfig that = (DependencyMaterialConfig) o; if (type != null ? !type.equals(that.type) : that.type != null) { return false; } if (pipelineName != null ? !pipelineName.equals(that.pipelineName) : that.pipelineName != null) { return false; } if (stageName != null ? !stageName.equals(that.stageName) : that.stageName != null) { return false; } return true; } @Override public int hashCode() { int result = (type != null ? type.hashCode() : 0); result = 31 * result + (pipelineName != null ? pipelineName.hashCode() : 0); result = 31 * result + (stageName != null ? stageName.hashCode() : 0); return result; } @Override public String toString() { return "DependencyMaterialConfig{" + "pipelineName='" + pipelineName + '\'' + ", stageName='" + stageName + '\'' + '}'; } @Override public String getDisplayName() { return CaseInsensitiveString.str(getName()); } @Override protected void validateConcreteMaterial(ValidationContext validationContext) { CaseInsensitiveString upstreamPipelineName = this.getPipelineName(); CaseInsensitiveString upstreamStageName = this.getStageName(); PipelineConfig upstreamPipeline = validationContext.getPipelineConfigByName(upstreamPipelineName); PipelineConfig pipeline = validationContext.getPipeline(); if (upstreamPipeline==null) { errors.add(DependencyMaterialConfig.PIPELINE_STAGE_NAME, String.format("Pipeline with name '%s' does not exist, it is defined as a dependency for pipeline '%s' (%s)", upstreamPipelineName, pipeline.name(), pipeline.getOriginDisplayName())); } else if (upstreamPipeline.findBy(upstreamStageName) == null) { errors.add(DependencyMaterialConfig.PIPELINE_STAGE_NAME, String.format("Stage with name '%s' does not exist on pipeline '%s', it is being referred to from pipeline '%s' (%s)", upstreamStageName, upstreamPipelineName, pipeline.name(), pipeline.getOriginDisplayName())); } } @Override public String getUriForDisplay() { return String.format("%s / %s", pipelineName, stageName); } public void validateUniqueness(Set<CaseInsensitiveString> dependencies) { CaseInsensitiveString upstreamPipelineName = pipelineName; if (dependencies.contains(upstreamPipelineName)) { String message = (String.format("A pipeline can depend on each upstream pipeline only once. Remove one of the occurrences of '%s' from the current pipeline dependencies.", upstreamPipelineName)); errors.add(PIPELINE_STAGE_NAME, message); } dependencies.add(pipelineName); } @Override public void setConfigAttributes(Object attributes) { resetCachedIdentityAttributes(); if (attributes == null) { return; } Map attributesMap = (Map) attributes; if (attributesMap.containsKey(MATERIAL_NAME)) { name = new CaseInsensitiveString((String) attributesMap.get(MATERIAL_NAME)); if (CaseInsensitiveString.isBlank(name)) { name = null; } } if (attributesMap.containsKey(PIPELINE_STAGE_NAME)) { pipelineStageName = (String) attributesMap.get(PIPELINE_STAGE_NAME); Matcher matcher = PIPELINE_STAGE_COMBINATION_PATTERN.matcher(pipelineStageName); if(matcher.matches()){ pipelineName = new CaseInsensitiveString(matcher.group(1)); String stageNameWithBrackets = matcher.group(2); stageName = new CaseInsensitiveString(stageNameWithBrackets.replace("[","").replace("]","")); } else { errors.add(PIPELINE_STAGE_NAME, String.format("'%s' should conform to the pattern 'pipeline [stage]'",pipelineStageName)); } } this.ignoreForScheduling = "true".equals(attributesMap.get(IGNORE_FOR_SCHEDULING)); } public String getPipelineStageName() { if (pipelineStageName != null) { return pipelineStageName; } if (CaseInsensitiveString.isBlank(pipelineName) || CaseInsensitiveString.isBlank(stageName)) { return null; } return String.format("%s [%s]", pipelineName, stageName); } @Override public Boolean isUsedInFetchArtifact(PipelineConfig pipelineConfig){ List<FetchTask> fetchTasks = pipelineConfig.getFetchTasks(); for (FetchTask fetchTask : fetchTasks) { if(pipelineName.equals(fetchTask.getDirectParentInAncestorPath())) return true; } return false; } @Override protected void appendPipelineUniqueCriteria(Map<String, Object> basicCriteria) { // Dependency materials are already unique within a pipeline } }
package baitaplon; import java.awt.Color; import java.awt.Point; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.ArrayList; import javax.swing.ButtonGroup; import javax.swing.JButton; import javax.swing.JColorChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JRadioButton; import javax.swing.JTextField; public class myframe extends JFrame{ public int scalex=30; public int scaley=30; public int ox=300; public int oy=300; public static ArrayList<String> function; public static String str; public static mypanel mp; public static JColorChooser clrc; public static JButton draw,cancle,confirm,btntt,vt; public static ButtonGroup type; public static JRadioButton b1,b2; public static JLabel ly,ltt,hc; public static JTextField ta,tttx,ttty; public static int y,x,i,j; public static float a,b,c; public static boolean del=false; public static myframe myfr; public Point po=new Point(); public float x(float k){ return (k-ox)/scalex; } public float y(String s,float m){ return Main.kq(m,s); } public int yp(float l){ return (int)(-l*scaley+oy); } public void ve(String s,mypanel p,Color c){ p.setcolor(c); for(x=0;x<600;x++) { p.drawline(x,yp(y(s,x(x))),x+1,yp(y(s,x(x+1)))); } } public void vetruc(){ Color c=mp.getcolor(); //luu mau sac cua do thi mp.setcolor(Color.black);//chuyen sang mau den de ve truc mp.drawline(0, oy, 600, oy); mp.drawline(595, oy-5, 600, oy); mp.drawline(595, oy+5, 600, oy); mp.drawline(ox, 0, ox, 500); mp.drawline(ox, 0, ox+5, 5); mp.drawline(ox, 0, ox-5, 5); mp.drawstring("x", 570, oy-30); mp.drawstring("y", ox+30, 30); for(int x=0;x<570;x=x+scalex){ if((x-ox)%scalex==0) { mp.drawline(x, oy-1, x, oy+1); mp.drawstring(String.valueOf(x/scalex-oy/scalex), x-10, oy+20); } } for(int x=0;x<500;x=x+30){ if((x-oy)%30==0&&x!=oy) { mp.drawline(ox-1, x, ox+1, x); mp.drawstring(String.valueOf((oy-x)/scaley), ox+5, x); } } mp.setcolor(c);//chuyen sang mau ve do thi } public myframe(String s){ super(s); this.setSize(1000, 700); this.setLayout(null); this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); myfr=this; function=new ArrayList<>(); clrc =new JColorChooser(); clrc.setBounds(650, 30, 300, 300); this.add(clrc); clrc.setColor(Color.black); clrc.addMouseListener(new MouseListener() { @Override public void mouseReleased(MouseEvent arg0) { } @Override public void mousePressed(MouseEvent arg0) { } @Override public void mouseExited(MouseEvent arg0) { mp.setcolor(clrc.getColor()); } @Override public void mouseEntered(MouseEvent arg0) { } @Override public void mouseClicked(MouseEvent arg0) { } }); draw=new JButton("draw"); draw.setBounds(650, 400, 100, 40); this.add(draw); draw.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { function.add(str); } }); cancle=new JButton("delete"); cancle.setBounds(850, 400, 100, 40); this.add(cancle); cancle.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { if(!function.isEmpty()) { del=true; } vetruc(); } }); type=new ButtonGroup(); type.add(b1); type.add(b2); ly=new JLabel("y="); ly.setBounds(180, 600, 100, 20); this.add(ly); ta=new JTextField("0"); ta.setBounds(200, 600, 400, 40); this.add(ta); ta.addFocusListener(new FocusListener() { @Override public void focusLost(FocusEvent arg0) { str=ta.getText().toString(); } @Override public void focusGained(FocusEvent arg0) { } }); ltt=new JLabel("selected point (x:y)",(int) CENTER_ALIGNMENT); ltt.setBounds(730, 430, 150, 40); this.add(ltt); tttx=new JTextField(); tttx.setBounds(640, 470, 150, 40); tttx.setText("0"); this.add(tttx); ttty=new JTextField(); ttty.setBounds(820, 470, 150, 40); ttty.setText("0"); this.add(ttty); Thread t=new Thread(new Runnable() { @Override public void run() { mp=new mypanel() { @Override public void mouseEntered(MouseEvent arg0) { // TODO Auto-generated method stub } @Override public void mouseExited(MouseEvent arg0) { // TODO Auto-generated method stub } @Override public void mousePressed(MouseEvent arg0) { // TODO Auto-generated method stub } @Override public void mouseReleased(MouseEvent arg0) { tttx.setText(String.valueOf(this.getx()/30-10)); ttty.setText(String.valueOf(-this.gety()/30+10)); } }; mp.setBounds(30, 30, 600, 500); mp.setBackground(Color.white); myfr.add(mp); mp.setcolor(Color.black);//dat mau mac dinh khi ve do thi while (true) { vetruc(); if(!function.isEmpty()) { for(int i=0;i<function.size();i++) { String s=function.get(i); ve(s,mp, mp.getcolor()); try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } if(del==true) { String s=function.get(function.size()-1); Color c=mp.getcolor(); ve(s,mp,Color.white); function.remove(function.size()-1); del=false; mp.setcolor(c); } } } } }); t.start(); } }
/* * Copyright (c) 2010-2012 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package krati.retention.clock; import java.io.Serializable; import java.nio.ByteBuffer; import java.util.Arrays; /** * Clock - A vector clock of long values. * * @version 0.4.2 * @author jwu * * <p> * 08/11, 2011 - Created <br/> * 09/27, 2011 - Updated compareTo to return Occurred <br/> */ public final class Clock implements Serializable { private final static long serialVersionUID = 1L; private final long[] _values; /** * The smallest Clock. */ public static final Clock ZERO = new Clock(); /** * Constructs a new instance of Clock. * * @param values - a long array representing this Clock. */ public Clock(long... values) { this._values = values; } /** * @return a long array representing this Clock. */ public long[] values() { return _values; } /** * Parses a Clock value from its string representation. * * @param str - the string representation of Clock * @return A Clock object. * <code>Clock.ZERO</code> is returned <code>upon</code> null or zero-length string. */ public static Clock parseClock(String str) { if(str == null || str.length() == 0) { return Clock.ZERO; } String[] parts = str.split(":"); long[] values = new long[parts.length]; for(int i = 0; i < values.length; i++) { values[i] = Long.parseLong(parts[i]); } return new Clock(values); } /** * Parses a Clock value from its raw bytes. * * @param raw - the raw bytes of Clock * @return a Clock object. * <code>Clock.ZERO</code> is returned upon <code>null</code> or a byte array with the length less than 8. */ public static Clock parseClock(byte[] raw) { if(raw == null || raw.length < 8) { return Clock.ZERO; } int cnt = raw.length >> 3; long[] values = new long[cnt]; ByteBuffer bb = ByteBuffer.wrap(raw); for(int i = 0; i < values.length; i++) { values[i] = bb.getLong(); } return new Clock(values); } /** * Gets the string representation of Clock in a colon separated list, e.g. <tt>16335:16912:15999</tt>. * * @return a string representation of Clock */ @Override public String toString() { StringBuilder b = new StringBuilder(); if(_values != null && 1 <= _values.length) { b.append(_values[0]); for(int i = 1; i < _values.length; i++) { b.append(':').append(_values[i]); } } return b.toString(); } /** * @return a byte array representing this Clock. */ public byte[] toByteArray() { if(_values != null) { byte[] byteArray = new byte[_values.length << 3]; ByteBuffer byteBuffer = ByteBuffer.wrap(byteArray); for(int i = 0; i < _values.length; i++) { byteBuffer.putLong(_values[i]); } return byteArray; } else { return new byte[0]; } } /** * Compares this clock with the specified clock for ordering. * * @param c - a clock to compare. */ public Occurred compareTo(Clock c) { if(this == c) return Occurred.EQUICONCURRENTLY; if(ZERO == c) return Occurred.AFTER; if(this == ZERO) return Occurred.BEFORE; int neg = 0, pos = 0, eq = 0; try { final long[] dst = c.values(); final int len = dst.length; if(_values.length == len) { for(int i = 0; i < len; i++) { long cmp = _values[i] - dst[i]; if(cmp < 0) { neg++; } else if(cmp > 0) { pos++; } else { eq++; } } if(eq == len) { return Occurred.EQUICONCURRENTLY; } else if(neg == len) { return Occurred.BEFORE; } else if(pos == len) { return Occurred.AFTER; } else { neg += eq; if(neg == len) { return Occurred.BEFORE; } pos += eq; if(pos == len) { return Occurred.AFTER; } return Occurred.CONCURRENTLY; } } } catch(Exception e) {} throw new IncomparableClocksException(this, c); } /** * @return <code>true</code> if this Clock occurred before the specified Clock <code>c</code>. * Otherwise, <code>false</code>. */ public boolean before(Clock c) { return compareTo(c) == Occurred.BEFORE; } /** * @return <code>true</code> if this Clock occurred after the specified Clock <code>c</code>. * Otherwise, <code>false</code>. */ public boolean after(Clock c) { return compareTo(c) == Occurred.AFTER; } /** * @return <code>true</code> if this Clock is equal to or occurred before the specified Clock <code>c</code>. * Otherwise, <code>false</code>. */ public boolean beforeEqual(Clock c) { Occurred o = compareTo(c); return o == Occurred.BEFORE || o == Occurred.EQUICONCURRENTLY; } /** * @return <code>true</code> if this Clock is equal to or occurred after the specified Clock <code>c</code>. * Otherwise, <code>false</code>. */ public boolean afterEqual(Clock c) { Occurred o = compareTo(c); return o == Occurred.AFTER || o == Occurred.EQUICONCURRENTLY; } @Override public boolean equals(Object o) { if (null == o) return false; if (o.getClass() != this.getClass()) return false; Clock c = (Clock) o; return Arrays.equals(this._values, c._values); } @Override public int hashCode() { return Arrays.hashCode(_values); } }
/******************************************************************************* * Copyright 2013 SAP AG * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.sap.core.odata.processor.core.jpa.jpql; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import javax.persistence.Cache; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.PersistenceUnitUtil; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.metamodel.Metamodel; import org.easymock.EasyMock; import org.junit.Test; import com.sap.core.odata.api.edm.EdmEntitySet; import com.sap.core.odata.api.edm.EdmEntityType; import com.sap.core.odata.api.edm.EdmException; import com.sap.core.odata.api.edm.EdmMapping; import com.sap.core.odata.api.edm.EdmProperty; import com.sap.core.odata.api.edm.EdmSimpleType; import com.sap.core.odata.api.exception.ODataException; import com.sap.core.odata.api.uri.KeyPredicate; import com.sap.core.odata.api.uri.NavigationSegment; import com.sap.core.odata.api.uri.expression.OrderByExpression; import com.sap.core.odata.api.uri.info.GetEntitySetUriInfo; import com.sap.core.odata.api.uri.info.GetEntityUriInfo; import com.sap.core.odata.processor.api.jpa.factory.JPAAccessFactory; import com.sap.core.odata.processor.api.jpa.factory.ODataJPAAccessFactory; import com.sap.core.odata.processor.api.jpa.jpql.JPQLContext; import com.sap.core.odata.processor.api.jpa.jpql.JPQLContext.JPQLContextBuilder; import com.sap.core.odata.processor.api.jpa.jpql.JPQLContextType; import com.sap.core.odata.processor.api.jpa.jpql.JPQLStatement.JPQLStatementBuilder; import com.sap.core.odata.processor.core.jpa.ODataJPAContextImpl; import com.sap.core.odata.processor.core.jpa.access.data.JPAProcessorImplTest; import com.sap.core.odata.processor.core.jpa.common.ODataJPATestConstants; import com.sap.core.odata.processor.core.jpa.factory.ODataJPAFactoryImpl; import com.sap.core.odata.processor.core.jpa.jpql.JPQLSelectContext.JPQLSelectContextBuilder; import com.sap.core.odata.processor.core.jpa.jpql.JPQLSelectSingleContext.JPQLSelectSingleContextBuilder; public class JPQLBuilderFactoryTest { @Test public void testGetStatementBuilderFactoryforSelect() throws ODataException { GetEntitySetUriInfo getEntitySetView = getUriInfo(); // Build JPQL Context JPQLContext selectContext = JPQLContext.createBuilder( JPQLContextType.SELECT, getEntitySetView).build(); JPQLStatementBuilder statementBuilder = new ODataJPAFactoryImpl() .getJPQLBuilderFactory().getStatementBuilder(selectContext); assertTrue(statementBuilder instanceof JPQLSelectStatementBuilder); } @Test public void testGetStatementBuilderFactoryforSelectSingle() throws ODataException { GetEntityUriInfo getEntityView = getEntityUriInfo(); // Build JPQL Context JPQLContext selectContext = JPQLContext.createBuilder( JPQLContextType.SELECT_SINGLE, getEntityView).build(); JPQLStatementBuilder statementBuilder = new ODataJPAFactoryImpl() .getJPQLBuilderFactory().getStatementBuilder(selectContext); assertTrue(statementBuilder instanceof JPQLSelectSingleStatementBuilder); } @Test public void testGetStatementBuilderFactoryforJoinSelect() throws ODataException { GetEntitySetUriInfo getEntitySetView = getUriInfo(); // Build JPQL Context JPQLContext selectContext = JPQLContext.createBuilder( JPQLContextType.JOIN, getEntitySetView).build(); JPQLStatementBuilder statementBuilder = new ODataJPAFactoryImpl() .getJPQLBuilderFactory().getStatementBuilder(selectContext); assertTrue(statementBuilder instanceof JPQLJoinStatementBuilder); } @Test public void testGetStatementBuilderFactoryforJoinSelectSingle() throws ODataException { GetEntityUriInfo getEntityView = getEntityUriInfo(); // Build JPQL Context JPQLContext selectContext = JPQLContext.createBuilder( JPQLContextType.JOIN_SINGLE, getEntityView).build(); JPQLStatementBuilder statementBuilder = new ODataJPAFactoryImpl() .getJPQLBuilderFactory().getStatementBuilder(selectContext); assertTrue(statementBuilder instanceof JPQLJoinSelectSingleStatementBuilder); } @Test public void testGetContextBuilderforDelete() throws ODataException { // Build JPQL ContextBuilder JPQLContextBuilder contextBuilder = new ODataJPAFactoryImpl() .getJPQLBuilderFactory().getContextBuilder( JPQLContextType.DELETE); assertNull(contextBuilder); } @Test public void testGetContextBuilderforSelect() throws ODataException { // Build JPQL ContextBuilder JPQLContextBuilder contextBuilder = new ODataJPAFactoryImpl() .getJPQLBuilderFactory().getContextBuilder( JPQLContextType.SELECT); assertNotNull(contextBuilder); assertTrue(contextBuilder instanceof JPQLSelectContextBuilder); } @Test public void testGetContextBuilderforSelectSingle() throws ODataException { // Build JPQL ContextBuilder JPQLContextBuilder contextBuilder = new ODataJPAFactoryImpl() .getJPQLBuilderFactory().getContextBuilder( JPQLContextType.SELECT_SINGLE); assertNotNull(contextBuilder); assertTrue(contextBuilder instanceof JPQLSelectSingleContextBuilder); } private GetEntitySetUriInfo getUriInfo() throws EdmException { GetEntitySetUriInfo getEntitySetView = EasyMock .createMock(GetEntitySetUriInfo.class); EdmEntitySet edmEntitySet = EasyMock.createMock(EdmEntitySet.class); EdmEntityType edmEntityType = EasyMock.createMock(EdmEntityType.class); EasyMock.expect(edmEntityType.getMapping()).andStubReturn(null); EasyMock.expect(edmEntityType.getName()).andStubReturn("SOItem"); EasyMock.replay(edmEntityType); OrderByExpression orderByExpression = EasyMock .createMock(OrderByExpression.class); EasyMock.expect(getEntitySetView.getTargetEntitySet()).andStubReturn( edmEntitySet); EdmEntitySet startEdmEntitySet = EasyMock.createMock(EdmEntitySet.class); EdmEntityType startEdmEntityType = EasyMock.createMock(EdmEntityType.class); EasyMock.expect(startEdmEntityType.getMapping()).andStubReturn(null); EasyMock.expect(startEdmEntityType.getName()).andStubReturn("SOHeader"); EasyMock.expect(startEdmEntitySet.getEntityType()).andStubReturn(startEdmEntityType); EasyMock.expect(getEntitySetView.getStartEntitySet()).andStubReturn( startEdmEntitySet); EasyMock.replay(startEdmEntityType, startEdmEntitySet); EasyMock.expect(getEntitySetView.getOrderBy()).andStubReturn( orderByExpression); EasyMock.expect(getEntitySetView.getSelect()).andStubReturn(null); EasyMock.expect(getEntitySetView.getFilter()).andStubReturn(null); List<NavigationSegment> navigationSegments = new ArrayList<NavigationSegment>(); EasyMock.expect(getEntitySetView.getNavigationSegments()) .andStubReturn(navigationSegments); KeyPredicate keyPredicate = EasyMock .createMock(KeyPredicate.class); EdmProperty kpProperty = EasyMock .createMock(EdmProperty.class); EdmSimpleType edmType = EasyMock .createMock(EdmSimpleType.class); EdmMapping edmMapping = EasyMock.createMock(EdmMapping.class); EasyMock.expect(edmMapping.getInternalName()).andStubReturn("Field1"); EasyMock.expect(keyPredicate.getLiteral()).andStubReturn("1"); try { EasyMock.expect(kpProperty.getName()).andStubReturn("Field1"); EasyMock.expect(kpProperty.getType()).andStubReturn(edmType); EasyMock.expect(kpProperty.getMapping()).andStubReturn(edmMapping); } catch (EdmException e2) { fail("this should not happen"); } EasyMock.expect(keyPredicate.getProperty()).andStubReturn(kpProperty); EasyMock.replay(edmMapping, edmType, kpProperty, keyPredicate); List<KeyPredicate> keyPredicates = new ArrayList<KeyPredicate>(); keyPredicates.add(keyPredicate); EasyMock.expect(getEntitySetView.getKeyPredicates()).andStubReturn(keyPredicates); EasyMock.replay(getEntitySetView); EasyMock.expect(edmEntitySet.getEntityType()).andStubReturn( edmEntityType); EasyMock.replay(edmEntitySet); return getEntitySetView; } private GetEntityUriInfo getEntityUriInfo() throws EdmException { GetEntityUriInfo getEntityView = EasyMock .createMock(GetEntityUriInfo.class); EdmEntitySet edmEntitySet = EasyMock.createMock(EdmEntitySet.class); EdmEntityType edmEntityType = EasyMock.createMock(EdmEntityType.class); EasyMock.expect(edmEntityType.getKeyProperties()).andStubReturn( new ArrayList<EdmProperty>()); EasyMock.expect(edmEntityType.getMapping()).andStubReturn(null); EasyMock.expect(edmEntityType.getName()).andStubReturn(""); EasyMock.expect(edmEntitySet.getEntityType()).andStubReturn( edmEntityType); EasyMock.expect(getEntityView.getSelect()).andStubReturn(null); EasyMock.expect(getEntityView.getTargetEntitySet()).andStubReturn( edmEntitySet); EdmEntitySet startEdmEntitySet = EasyMock.createMock(EdmEntitySet.class); EdmEntityType startEdmEntityType = EasyMock.createMock(EdmEntityType.class); EasyMock.expect(startEdmEntityType.getMapping()).andStubReturn(null); EasyMock.expect(startEdmEntityType.getName()).andStubReturn("SOHeader"); EasyMock.expect(startEdmEntitySet.getEntityType()).andStubReturn(startEdmEntityType); EasyMock.expect(getEntityView.getStartEntitySet()).andStubReturn( startEdmEntitySet); EasyMock.replay(startEdmEntityType, startEdmEntitySet); EasyMock.replay(edmEntityType, edmEntitySet); EasyMock.expect(getEntityView.getKeyPredicates()).andStubReturn( new ArrayList<KeyPredicate>()); List<NavigationSegment> navigationSegments = new ArrayList<NavigationSegment>(); EasyMock.expect(getEntityView.getNavigationSegments()).andStubReturn( navigationSegments); EasyMock.replay(getEntityView); return getEntityView; } @Test public void testJPAAccessFactory() { ODataJPAFactoryImpl oDataJPAFactoryImpl = new ODataJPAFactoryImpl(); JPAAccessFactory jpaAccessFactory = oDataJPAFactoryImpl .getJPAAccessFactory(); ODataJPAContextImpl oDataJPAContextImpl = new ODataJPAContextImpl(); Class<?> clazz = oDataJPAContextImpl.getClass(); try { Field field = clazz.getDeclaredField("em"); field.setAccessible(true); field.set(oDataJPAContextImpl, new JPAProcessorImplTest().getLocalEntityManager()); } catch (SecurityException e) { fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage() + ODataJPATestConstants.EXCEPTION_MSG_PART_2); } catch (NoSuchFieldException e) { fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage() + ODataJPATestConstants.EXCEPTION_MSG_PART_2); } catch (IllegalArgumentException e) { fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage() + ODataJPATestConstants.EXCEPTION_MSG_PART_2); } catch (IllegalAccessException e) { fail(ODataJPATestConstants.EXCEPTION_MSG_PART_1 + e.getMessage() + ODataJPATestConstants.EXCEPTION_MSG_PART_2); } EntityManagerFactory emf = new EntityManagerFactory() { @Override public boolean isOpen() { return false; } @Override public Map<String, Object> getProperties() { return null; } @Override public PersistenceUnitUtil getPersistenceUnitUtil() { return null; } @Override public Metamodel getMetamodel() { return null; } @Override public CriteriaBuilder getCriteriaBuilder() { return null; } @Override public Cache getCache() { return null; } @SuppressWarnings("rawtypes") @Override public EntityManager createEntityManager(final Map arg0) { return null; } @Override public EntityManager createEntityManager() { return null; } @Override public void close() {} }; oDataJPAContextImpl.setEntityManagerFactory(emf); oDataJPAContextImpl.setPersistenceUnitName("pUnit"); assertNotNull(jpaAccessFactory.getJPAProcessor(oDataJPAContextImpl)); assertNotNull(jpaAccessFactory.getJPAEdmModelView(oDataJPAContextImpl)); } @Test public void testOdataJpaAccessFactory() { ODataJPAFactoryImpl oDataJPAFactoryImpl = new ODataJPAFactoryImpl(); ODataJPAAccessFactory jpaAccessFactory = oDataJPAFactoryImpl .getODataJPAAccessFactory(); ODataJPAContextImpl oDataJPAContextImpl = new ODataJPAContextImpl(); EntityManagerFactory emf = new EntityManagerFactory() { @Override public boolean isOpen() { // TODO Auto-generated method stub return false; } @Override public Map<String, Object> getProperties() { // TODO Auto-generated method stub return null; } @Override public PersistenceUnitUtil getPersistenceUnitUtil() { // TODO Auto-generated method stub return null; } @Override public Metamodel getMetamodel() { // TODO Auto-generated method stub return null; } @Override public CriteriaBuilder getCriteriaBuilder() { // TODO Auto-generated method stub return null; } @Override public Cache getCache() { // TODO Auto-generated method stub return null; } @SuppressWarnings("rawtypes") @Override public EntityManager createEntityManager(final Map arg0) { // TODO Auto-generated method stub return null; } @Override public EntityManager createEntityManager() { // TODO Auto-generated method stub return null; } @Override public void close() { // TODO Auto-generated method stub } }; oDataJPAContextImpl.setEntityManagerFactory(emf); oDataJPAContextImpl.setPersistenceUnitName("pUnit"); assertNotNull(jpaAccessFactory.getODataJPAMessageService(new Locale( "en"))); assertNotNull(jpaAccessFactory.createODataJPAContext()); assertNotNull(jpaAccessFactory .createJPAEdmProvider(oDataJPAContextImpl)); assertNotNull(jpaAccessFactory .createODataProcessor(oDataJPAContextImpl)); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package commonj.sdo.impl; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.security.AccessController; import java.security.PrivilegedAction; import commonj.sdo.helper.CopyHelper; import commonj.sdo.helper.DataFactory; import commonj.sdo.helper.DataHelper; import commonj.sdo.helper.EqualityHelper; import commonj.sdo.helper.HelperContext; import commonj.sdo.helper.TypeHelper; import commonj.sdo.helper.XMLHelper; import commonj.sdo.helper.XSDHelper; /** * A HelperProvider obtains specific default helpers and other * implementation-specific objects used by a Java implementation of SDO. * * @version $Rev$ $Date$ */ public abstract class HelperProvider { /** * The default HelperProvider INSTANCE. This is located using the ClassLoader used * to load the HelperProvider class itself and if no default implementation is available * this field will be set to null. */ public static HelperProvider INSTANCE; /** * The name of the resource that is used for service location. */ public static final String SERVICE_RESOURCE_NAME = "META-INF/services/commonj.sdo.impl.HelperProvider"; /** * The name of the system property that will be checked for an implementation name. */ public static final String PROPERTY_NAME = "commonj.sdo.impl.HelperProvider"; static { // initialize the default instance using this class's classloader // set to null if none could be located (implies no default implementation) HelperProvider provider; try { provider = getInstance(HelperProvider.class.getClassLoader()); } catch (NoHelperProviderException e) { provider = null; } INSTANCE = provider; } public static synchronized void setDefaultInstance(ClassLoader cl) { if (INSTANCE == null) { try { INSTANCE = getInstance(cl); } catch (NoHelperProviderException e) { } } } /** * Locate and instantiate a HelperProvider. * <p/> * Attempt to locate a HelperProvider using first the Thread's current context classloader and then, * if that is not set, not readable, or does not provide an implementation, using the classloader * used to load the HelperProvider class itself. * <p/> * A new instance is returned for each sucessful invocation. * * @return an implementation of HelperProvider * @throws NoHelperProviderException if no provider implementation was defined or it could not be instantiated */ public static HelperProvider getInstance() throws NoHelperProviderException { String implName = getImplementationName(); ClassLoader cl = getContextClassLoader(); if (cl != null) { HelperProvider provider = loadImplementation(cl, implName); if (provider != null) { return provider; } } cl = HelperProvider.class.getClassLoader(); HelperProvider provider = loadImplementation(cl, implName); if (provider != null) { return provider; } throw new NoHelperProviderException(implName); } /** * Locate and instantiate a HelperProvider using the supplied ClassLoader. * <p/> * The name of the implementation to use is determined by the value of the "commonj.sdo.impl.HelperProvider" * system property. If this is not set or this code does not have permission to read it then the name * will be retrieved from the META-INF/services/commonj.sdo.impl.HelperProvider resource as returned * by the supplied classloader as described in the * <a href="http://java.sun.com/j2se/1.5.0/docs/guide/jar/jar.html#Service%20Provider">JAR file specification</a>. * <p/> * A new instance is returned for each sucessful invocation. * * @param cl the classloader to use to locate and instantiate the implementation * @return the specified implementation of HelperProvider * @throws NoHelperProviderException if no provider implementation was defined or it could not be instantiated */ public static HelperProvider getInstance(ClassLoader cl) throws NoHelperProviderException { String implName = getImplementationName(); HelperProvider provider = loadImplementation(cl, implName); if (provider == null) { throw new NoHelperProviderException(implName); } return provider; } private static ClassLoader getContextClassLoader() { try { return (ClassLoader)AccessController.doPrivileged(new PrivilegedAction() { public Object run() { return Thread.currentThread().getContextClassLoader(); } }); } catch (SecurityException e) { return null; } } private static HelperProvider loadImplementation(ClassLoader cl, String implName) throws NoHelperProviderException { // if no name is requested, locate using the supplied classloader if (implName == null) { implName = getImplementationName(cl); } // no implementation to try, return null if (implName == null) { return null; } // try an instantiate the implementation try { return (HelperProvider) cl.loadClass(implName).newInstance(); } catch (InstantiationException e) { throw new NoHelperProviderException(implName, e); } catch (IllegalAccessException e) { throw new NoHelperProviderException(implName, e); } catch (ClassNotFoundException e) { throw new NoHelperProviderException(implName, e); } } private static String getImplementationName() { try { return (String)AccessController.doPrivileged(new PrivilegedAction() { public Object run() { return System.getProperty(PROPERTY_NAME); } }); } catch (SecurityException e) { return null; } } private static String getImplementationName(ClassLoader cl) { InputStream is = cl.getResourceAsStream(SERVICE_RESOURCE_NAME); if (is == null) { return null; } InputStreamReader in; try { in = new InputStreamReader(is, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new AssertionError("UTF-8 encoding not available"); } try { BufferedReader reader = new BufferedReader(in, 128); try { String line; while ((line = reader.readLine()) != null) { int i = line.indexOf('#'); if (i != -1) { line = line.substring(0, i); } line = line.trim(); if (line.length() > 0) { return line; } } return null; } finally { reader.close(); } } catch (IOException e) { throw new NoHelperProviderException(e); } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // implementation specific methods for users that don't want to use the default implementation /** * Returns a CopyHelper obtained from this implementation. * * @return a CopyHelper obtained from this implementation */ public abstract CopyHelper copyHelper(); /** * Returns a DataFactory obtained from this implementation. * * @return a DataFactory obtained from this implementation */ public abstract DataFactory dataFactory(); /** * Returns a DataHelper obtained from this implementation. * * @return a DataHelper obtained from this implementation */ public abstract DataHelper dataHelper(); /** * Returns a EqualityHelper obtained from this implementation. * * @return a EqualityHelper obtained from this implementation */ public abstract EqualityHelper equalityHelper(); /** * Returns a TypeHelper obtained from this implementation. * * @return a TypeHelper obtained from this implementation */ public abstract TypeHelper typeHelper(); /** * Returns a XMLHelper obtained from this implementation. * * @return a XMLHelper obtained from this implementation */ public abstract XMLHelper xmlHelper(); /** * Returns a XSDHelper obtained from this implementation. * * @return a XSDHelper obtained from this implementation */ public abstract XSDHelper xsdHelper(); /** * Create a Resolvable using this implementation * * @return a Resolvable created using this implementation */ public abstract ExternalizableDelegator.Resolvable resolvable(); /** * Create a Resolvable using this implementation * * @param target the object to be resolved * @return a Resolvable created using this implementation */ public abstract ExternalizableDelegator.Resolvable resolvable(Object target); /////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // static helper methods required by the specification /** * Returns a CopyHelper obtained from the default HelperProvider. * * @return a CopyHelper obtained from the default HelperProvider */ public static CopyHelper getCopyHelper() { return INSTANCE.copyHelper(); } /** * Returns a DataFactory obtained from the default HelperProvider. * * @return a DataFactory obtained from the default HelperProvider */ public static DataFactory getDataFactory() { return INSTANCE.dataFactory(); } /** * Returns a DataHelper obtained from the default HelperProvider. * * @return a DataHelper obtained from the default HelperProvider */ public static DataHelper getDataHelper() { return INSTANCE.dataHelper(); } /** * Returns a EqualityHelper obtained from the default HelperProvider. * * @return a EqualityHelper obtained from the default HelperProvider */ public static EqualityHelper getEqualityHelper() { return INSTANCE.equalityHelper(); } /** * Returns a TypeHelper obtained from the default HelperProvider. * * @return a TypeHelper obtained from the default HelperProvider */ public static TypeHelper getTypeHelper() { return INSTANCE.typeHelper(); } /** * Returns a XMLHelper obtained from the default HelperProvider. * * @return a XMLHelper obtained from the default HelperProvider */ public static XMLHelper getXMLHelper() { return INSTANCE.xmlHelper(); } /** * Returns a XSDHelper obtained from the default HelperProvider. * * @return a XSDHelper obtained from the default HelperProvider */ public static XSDHelper getXSDHelper() { return INSTANCE.xsdHelper(); } /** * Create a Resolvable using the default HelperProvider * * @return a Resolvable created using the default HelperProvider */ public static ExternalizableDelegator.Resolvable createResolvable() { return INSTANCE.resolvable(); } /** * Create a Resolvable using the default HelperProvider * * @param target the object to be resolved * @return a Resolvable created using the default HelperProvider */ public static ExternalizableDelegator.Resolvable createResolvable(Object target) { return INSTANCE.resolvable(target); } //////////////////////////////////////////////////////////////////////////////////////////////////// // New in SDO 2.1 //////////////////////////////////////////////////////////////////////////////////////////////////// /** * Gets the default HelperContext * @return a HelperContext object */ public static HelperContext getDefaultContext() { return INSTANCE.helperContext(); } HelperContext helperContext() { return defaultContext; } // static HelperContext defaultContext = new DefaultHelperContext(); // TODO: Tuscany SDO implementation specific to create a special implementation of HelperContext protected static HelperContext defaultContext; static class DefaultHelperContext implements HelperContext { public CopyHelper getCopyHelper() { return INSTANCE.copyHelper(); } public DataFactory getDataFactory() { return INSTANCE.dataFactory(); } public DataHelper getDataHelper() { return INSTANCE.dataHelper(); } public EqualityHelper getEqualityHelper() { return INSTANCE.equalityHelper(); } public TypeHelper getTypeHelper() { return INSTANCE.typeHelper(); } public XMLHelper getXMLHelper() { return INSTANCE.xmlHelper(); } public XSDHelper getXSDHelper() { return INSTANCE.xsdHelper(); } } }
package weibo4j.org.json; /* Copyright (c) 2002 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import java.io.IOException; import java.io.Writer; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Map; /** * A JSONArray is an ordered sequence of values. Its external text form is a * string wrapped in square brackets with commas separating the values. The * internal form is an object having <code>get</code> and <code>opt</code> * methods for accessing the values by index, and <code>put</code> methods for * adding or replacing values. The values can be any of these types: * <code>Boolean</code>, <code>JSONArray</code>, <code>JSONObject</code>, * <code>Number</code>, <code>String</code>, or the * <code>JSONObject.NULL object</code>. * <p/> * The constructor can convert a JSON text into a Java object. The * <code>toString</code> method converts to JSON text. * <p/> * A <code>get</code> method returns a value if one can be found, and throws an * exception if one cannot be found. An <code>opt</code> method returns a * default value instead of throwing an exception, and so is useful for * obtaining optional values. * <p/> * The generic <code>get()</code> and <code>opt()</code> methods return an * object which you can cast or query for type. There are also typed * <code>get</code> and <code>opt</code> methods that do type checking and type * coercion for you. * <p/> * The texts produced by the <code>toString</code> methods strictly conform to * JSON syntax rules. The constructors are more forgiving in the texts they will * accept: * <ul> * <li>An extra <code>,</code>&nbsp;<small>(comma)</small> may appear just * before the closing bracket.</li> * <li>The <code>null</code> value will be inserted when there * is <code>,</code>&nbsp;<small>(comma)</small> elision.</li> * <li>Strings may be quoted with <code>'</code>&nbsp;<small>(single * quote)</small>.</li> * <li>Strings do not need to be quoted at all if they do not begin with a quote * or single quote, and if they do not contain leading or trailing spaces, * and if they do not contain any of these characters: * <code>{ } [ ] / \ : , = ; #</code> and if they do not look like numbers * and if they are not the reserved words <code>true</code>, * <code>false</code>, or <code>null</code>.</li> * <li>Values can be separated by <code>;</code> <small>(semicolon)</small> as * well as by <code>,</code> <small>(comma)</small>.</li> * <li>Numbers may have the <code>0-</code> <small>(octal)</small> or * <code>0x-</code> <small>(hex)</small> prefix.</li> * </ul> * * @author JSON.org * @version 2008-09-18 */ public class JSONArray { /** * The arrayList where the JSONArray's properties are kept. */ private ArrayList myArrayList; /** * Construct an empty JSONArray. */ public JSONArray() { this.myArrayList = new ArrayList(); } /** * Construct a JSONArray from a JSONTokener. * * @param x A JSONTokener * @throws JSONException If there is a syntax error. */ public JSONArray(JSONTokener x) throws JSONException { this(); char c = x.nextClean(); char q; if (c == '[') { q = ']'; } else if (c == '(') { q = ')'; } else { throw x.syntaxError("A JSONArray text must start with '['"); } if (x.nextClean() == ']') { return; } x.back(); for (; ; ) { if (x.nextClean() == ',') { x.back(); this.myArrayList.add(null); } else { x.back(); this.myArrayList.add(x.nextValue()); } c = x.nextClean(); switch (c) { case ';': case ',': if (x.nextClean() == ']') { return; } x.back(); break; case ']': case ')': if (q != c) { throw x.syntaxError("Expected a '" + new Character(q) + "'"); } return; default: throw x.syntaxError("Expected a ',' or ']'"); } } } /** * Construct a JSONArray from a source JSON text. * * @param source A string that begins with * <code>[</code>&nbsp;<small>(left bracket)</small> * and ends with <code>]</code>&nbsp;<small>(right bracket)</small>. * @throws JSONException If there is a syntax error. */ public JSONArray(String source) throws JSONException { this(new JSONTokener(source)); } /** * Construct a JSONArray from a Collection. * * @param collection A Collection. */ public JSONArray(Collection collection) { this.myArrayList = (collection == null) ? new ArrayList() : new ArrayList(collection); } /** * Construct a JSONArray from a collection of beans. * The collection should have Java Beans. * * @throws JSONException If not an array. */ public JSONArray(Collection collection, boolean includeSuperClass) { this.myArrayList = new ArrayList(); if (collection != null) { for (Iterator iter = collection.iterator(); iter.hasNext(); ) { this.myArrayList.add(new JSONObject(iter.next(), includeSuperClass)); } } } /** * Construct a JSONArray from an array * * @throws JSONException If not an array. */ public JSONArray(Object array) throws JSONException { this(); if (array.getClass().isArray()) { int length = Array.getLength(array); for (int i = 0; i < length; i += 1) { this.put(Array.get(array, i)); } } else { throw new JSONException("JSONArray initial value should be a string or collection or array."); } } /** * Construct a JSONArray from an array with a bean. * The array should have Java Beans. * * @throws JSONException If not an array. */ public JSONArray(Object array, boolean includeSuperClass) throws JSONException { this(); if (array.getClass().isArray()) { int length = Array.getLength(array); for (int i = 0; i < length; i += 1) { this.put(new JSONObject(Array.get(array, i), includeSuperClass)); } } else { throw new JSONException("JSONArray initial value should be a string or collection or array."); } } /** * Get the object value associated with an index. * * @param index The index must be between 0 and length() - 1. * @return An object value. * @throws JSONException If there is no value for the index. */ public Object get(int index) throws JSONException { Object o = opt(index); if (o == null) { throw new JSONException("JSONArray[" + index + "] not found."); } return o; } /** * Get the boolean value associated with an index. * The string values "true" and "false" are converted to boolean. * * @param index The index must be between 0 and length() - 1. * @return The truth. * @throws JSONException If there is no value for the index or if the * value is not convertable to boolean. */ public boolean getBoolean(int index) throws JSONException { Object o = get(index); if (o.equals(Boolean.FALSE) || (o instanceof String && ((String) o).equalsIgnoreCase("false"))) { return false; } else if (o.equals(Boolean.TRUE) || (o instanceof String && ((String) o).equalsIgnoreCase("true"))) { return true; } throw new JSONException("JSONArray[" + index + "] is not a Boolean."); } /** * Get the double value associated with an index. * * @param index The index must be between 0 and length() - 1. * @return The value. * @throws JSONException If the key is not found or if the value cannot * be converted to a number. */ public double getDouble(int index) throws JSONException { Object o = get(index); try { return o instanceof Number ? ((Number) o).doubleValue() : Double.valueOf((String) o).doubleValue(); } catch (Exception e) { throw new JSONException("JSONArray[" + index + "] is not a number."); } } /** * Get the int value associated with an index. * * @param index The index must be between 0 and length() - 1. * @return The value. * @throws JSONException If the key is not found or if the value cannot * be converted to a number. * if the value cannot be converted to a number. */ public int getInt(int index) throws JSONException { Object o = get(index); return o instanceof Number ? ((Number) o).intValue() : (int) getDouble(index); } /** * Get the JSONArray associated with an index. * * @param index The index must be between 0 and length() - 1. * @return A JSONArray value. * @throws JSONException If there is no value for the index. or if the * value is not a JSONArray */ public JSONArray getJSONArray(int index) throws JSONException { Object o = get(index); if (o instanceof JSONArray) { return (JSONArray) o; } throw new JSONException("JSONArray[" + index + "] is not a JSONArray."); } /** * Get the JSONObject associated with an index. * * @param index subscript * @return A JSONObject value. * @throws JSONException If there is no value for the index or if the * value is not a JSONObject */ public JSONObject getJSONObject(int index) throws JSONException { Object o = get(index); if (o instanceof JSONObject) { return (JSONObject) o; } throw new JSONException("JSONArray[" + index + "] is not a JSONObject."); } /** * Get the long value associated with an index. * * @param index The index must be between 0 and length() - 1. * @return The value. * @throws JSONException If the key is not found or if the value cannot * be converted to a number. */ public long getLong(int index) throws JSONException { Object o = get(index); return o instanceof Number ? ((Number) o).longValue() : (long) getDouble(index); } /** * Get the string associated with an index. * * @param index The index must be between 0 and length() - 1. * @return A string value. * @throws JSONException If there is no value for the index. */ public String getString(int index) throws JSONException { return get(index).toString(); } /** * Determine if the value is null. * * @param index The index must be between 0 and length() - 1. * @return true if the value at the index is null, or if there is no value. */ public boolean isNull(int index) { return JSONObject.NULL.equals(opt(index)); } /** * Make a string from the contents of this JSONArray. The * <code>separator</code> string is inserted between each element. * Warning: This method assumes that the data structure is acyclical. * * @param separator A string that will be inserted between the elements. * @return a string. * @throws JSONException If the array contains an invalid number. */ public String join(String separator) throws JSONException { int len = length(); StringBuffer sb = new StringBuffer(); for (int i = 0; i < len; i += 1) { if (i > 0) { sb.append(separator); } sb.append(JSONObject.valueToString(this.myArrayList.get(i))); } return sb.toString(); } /** * Get the number of elements in the JSONArray, included nulls. * * @return The length (or size). */ public int length() { return this.myArrayList.size(); } /** * Get the optional object value associated with an index. * * @param index The index must be between 0 and length() - 1. * @return An object value, or null if there is no * object at that index. */ public Object opt(int index) { return (index < 0 || index >= length()) ? null : this.myArrayList.get(index); } /** * Get the optional boolean value associated with an index. * It returns false if there is no value at that index, * or if the value is not Boolean.TRUE or the String "true". * * @param index The index must be between 0 and length() - 1. * @return The truth. */ public boolean optBoolean(int index) { return optBoolean(index, false); } /** * Get the optional boolean value associated with an index. * It returns the defaultValue if there is no value at that index or if * it is not a Boolean or the String "true" or "false" (case insensitive). * * @param index The index must be between 0 and length() - 1. * @param defaultValue A boolean default. * @return The truth. */ public boolean optBoolean(int index, boolean defaultValue) { try { return getBoolean(index); } catch (Exception e) { return defaultValue; } } /** * Get the optional double value associated with an index. * NaN is returned if there is no value for the index, * or if the value is not a number and cannot be converted to a number. * * @param index The index must be between 0 and length() - 1. * @return The value. */ public double optDouble(int index) { return optDouble(index, Double.NaN); } /** * Get the optional double value associated with an index. * The defaultValue is returned if there is no value for the index, * or if the value is not a number and cannot be converted to a number. * * @param index subscript * @param defaultValue The default value. * @return The value. */ public double optDouble(int index, double defaultValue) { try { return getDouble(index); } catch (Exception e) { return defaultValue; } } /** * Get the optional int value associated with an index. * Zero is returned if there is no value for the index, * or if the value is not a number and cannot be converted to a number. * * @param index The index must be between 0 and length() - 1. * @return The value. */ public int optInt(int index) { return optInt(index, 0); } /** * Get the optional int value associated with an index. * The defaultValue is returned if there is no value for the index, * or if the value is not a number and cannot be converted to a number. * * @param index The index must be between 0 and length() - 1. * @param defaultValue The default value. * @return The value. */ public int optInt(int index, int defaultValue) { try { return getInt(index); } catch (Exception e) { return defaultValue; } } /** * Get the optional JSONArray associated with an index. * * @param index subscript * @return A JSONArray value, or null if the index has no value, * or if the value is not a JSONArray. */ public JSONArray optJSONArray(int index) { Object o = opt(index); return o instanceof JSONArray ? (JSONArray) o : null; } /** * Get the optional JSONObject associated with an index. * Null is returned if the key is not found, or null if the index has * no value, or if the value is not a JSONObject. * * @param index The index must be between 0 and length() - 1. * @return A JSONObject value. */ public JSONObject optJSONObject(int index) { Object o = opt(index); return o instanceof JSONObject ? (JSONObject) o : null; } /** * Get the optional long value associated with an index. * Zero is returned if there is no value for the index, * or if the value is not a number and cannot be converted to a number. * * @param index The index must be between 0 and length() - 1. * @return The value. */ public long optLong(int index) { return optLong(index, 0); } /** * Get the optional long value associated with an index. * The defaultValue is returned if there is no value for the index, * or if the value is not a number and cannot be converted to a number. * * @param index The index must be between 0 and length() - 1. * @param defaultValue The default value. * @return The value. */ public long optLong(int index, long defaultValue) { try { return getLong(index); } catch (Exception e) { return defaultValue; } } /** * Get the optional string value associated with an index. It returns an * empty string if there is no value at that index. If the value * is not a string and is not null, then it is coverted to a string. * * @param index The index must be between 0 and length() - 1. * @return A String value. */ public String optString(int index) { return optString(index, ""); } /** * Get the optional string associated with an index. * The defaultValue is returned if the key is not found. * * @param index The index must be between 0 and length() - 1. * @param defaultValue The default value. * @return A String value. */ public String optString(int index, String defaultValue) { Object o = opt(index); return o != null ? o.toString() : defaultValue; } /** * Append a boolean value. This increases the array's length by one. * * @param value A boolean value. * @return this. */ public JSONArray put(boolean value) { put(value ? Boolean.TRUE : Boolean.FALSE); return this; } /** * Put a value in the JSONArray, where the value will be a * JSONArray which is produced from a Collection. * * @param value A Collection value. * @return this. */ public JSONArray put(Collection value) { put(new JSONArray(value)); return this; } /** * Append a double value. This increases the array's length by one. * * @param value A double value. * @return this. * @throws JSONException if the value is not finite. */ public JSONArray put(double value) throws JSONException { Double d = new Double(value); JSONObject.testValidity(d); put(d); return this; } /** * Append an int value. This increases the array's length by one. * * @param value An int value. * @return this. */ public JSONArray put(int value) { put(new Integer(value)); return this; } /** * Append an long value. This increases the array's length by one. * * @param value A long value. * @return this. */ public JSONArray put(long value) { put(new Long(value)); return this; } /** * Put a value in the JSONArray, where the value will be a * JSONObject which is produced from a Map. * * @param value A Map value. * @return this. */ public JSONArray put(Map value) { put(new JSONObject(value)); return this; } /** * Append an object value. This increases the array's length by one. * * @param value An object value. The value should be a * Boolean, Double, Integer, JSONArray, JSONObject, Long, or String, or the * JSONObject.NULL object. * @return this. */ public JSONArray put(Object value) { this.myArrayList.add(value); return this; } /** * Put or replace a boolean value in the JSONArray. If the index is greater * than the length of the JSONArray, then null elements will be added as * necessary to pad it out. * * @param index The subscript. * @param value A boolean value. * @return this. * @throws JSONException If the index is negative. */ public JSONArray put(int index, boolean value) throws JSONException { put(index, value ? Boolean.TRUE : Boolean.FALSE); return this; } /** * Put a value in the JSONArray, where the value will be a * JSONArray which is produced from a Collection. * * @param index The subscript. * @param value A Collection value. * @return this. * @throws JSONException If the index is negative or if the value is * not finite. */ public JSONArray put(int index, Collection value) throws JSONException { put(index, new JSONArray(value)); return this; } /** * Put or replace a double value. If the index is greater than the length of * the JSONArray, then null elements will be added as necessary to pad * it out. * * @param index The subscript. * @param value A double value. * @return this. * @throws JSONException If the index is negative or if the value is * not finite. */ public JSONArray put(int index, double value) throws JSONException { put(index, new Double(value)); return this; } /** * Put or replace an int value. If the index is greater than the length of * the JSONArray, then null elements will be added as necessary to pad * it out. * * @param index The subscript. * @param value An int value. * @return this. * @throws JSONException If the index is negative. */ public JSONArray put(int index, int value) throws JSONException { put(index, new Integer(value)); return this; } /** * Put or replace a long value. If the index is greater than the length of * the JSONArray, then null elements will be added as necessary to pad * it out. * * @param index The subscript. * @param value A long value. * @return this. * @throws JSONException If the index is negative. */ public JSONArray put(int index, long value) throws JSONException { put(index, new Long(value)); return this; } /** * Put a value in the JSONArray, where the value will be a * JSONObject which is produced from a Map. * * @param index The subscript. * @param value The Map value. * @return this. * @throws JSONException If the index is negative or if the the value is * an invalid number. */ public JSONArray put(int index, Map value) throws JSONException { put(index, new JSONObject(value)); return this; } /** * Put or replace an object value in the JSONArray. If the index is greater * than the length of the JSONArray, then null elements will be added as * necessary to pad it out. * * @param index The subscript. * @param value The value to put into the array. The value should be a * Boolean, Double, Integer, JSONArray, JSONObject, Long, or String, or the * JSONObject.NULL object. * @return this. * @throws JSONException If the index is negative or if the the value is * an invalid number. */ public JSONArray put(int index, Object value) throws JSONException { JSONObject.testValidity(value); if (index < 0) { throw new JSONException("JSONArray[" + index + "] not found."); } if (index < length()) { this.myArrayList.set(index, value); } else { while (index != length()) { put(JSONObject.NULL); } put(value); } return this; } /** * Produce a JSONObject by combining a JSONArray of names with the values * of this JSONArray. * * @param names A JSONArray containing a list of key strings. These will be * paired with the values. * @return A JSONObject, or null if there are no names or if this JSONArray * has no values. * @throws JSONException If any of the names are null. */ public JSONObject toJSONObject(JSONArray names) throws JSONException { if (names == null || names.length() == 0 || length() == 0) { return null; } JSONObject jo = new JSONObject(); for (int i = 0; i < names.length(); i += 1) { jo.put(names.getString(i), this.opt(i)); } return jo; } /** * Make a JSON text of this JSONArray. For compactness, no * unnecessary whitespace is added. If it is not possible to produce a * syntactically correct JSON text then null will be returned instead. This * could occur if the array contains an invalid number. * <p/> * Warning: This method assumes that the data structure is acyclical. * * @return a printable, displayable, transmittable * representation of the array. */ public String toString() { try { return '[' + join(",") + ']'; } catch (Exception e) { return null; } } /** * Make a prettyprinted JSON text of this JSONArray. * Warning: This method assumes that the data structure is acyclical. * * @param indentFactor The number of spaces to add to each level of * indentation. * @return a printable, displayable, transmittable * representation of the object, beginning * with <code>[</code>&nbsp;<small>(left bracket)</small> and ending * with <code>]</code>&nbsp;<small>(right bracket)</small>. * @throws JSONException */ public String toString(int indentFactor) throws JSONException { return toString(indentFactor, 0); } /** * Make a prettyprinted JSON text of this JSONArray. * Warning: This method assumes that the data structure is acyclical. * * @param indentFactor The number of spaces to add to each level of * indentation. * @param indent The indention of the top level. * @return a printable, displayable, transmittable * representation of the array. * @throws JSONException */ String toString(int indentFactor, int indent) throws JSONException { int len = length(); if (len == 0) { return "[]"; } int i; StringBuffer sb = new StringBuffer("["); if (len == 1) { sb.append(JSONObject.valueToString(this.myArrayList.get(0), indentFactor, indent)); } else { int newindent = indent + indentFactor; sb.append('\n'); for (i = 0; i < len; i += 1) { if (i > 0) { sb.append(",\n"); } for (int j = 0; j < newindent; j += 1) { sb.append(' '); } sb.append(JSONObject.valueToString(this.myArrayList.get(i), indentFactor, newindent)); } sb.append('\n'); for (i = 0; i < indent; i += 1) { sb.append(' '); } } sb.append(']'); return sb.toString(); } /** * Write the contents of the JSONArray as JSON text to a writer. * For compactness, no whitespace is added. * <p/> * Warning: This method assumes that the data structure is acyclical. * * @return The writer. * @throws JSONException */ public Writer write(Writer writer) throws JSONException { try { boolean b = false; int len = length(); writer.write('['); for (int i = 0; i < len; i += 1) { if (b) { writer.write(','); } Object v = this.myArrayList.get(i); if (v instanceof JSONObject) { ((JSONObject) v).write(writer); } else if (v instanceof JSONArray) { ((JSONArray) v).write(writer); } else { writer.write(JSONObject.valueToString(v)); } b = true; } writer.write(']'); return writer; } catch (IOException e) { throw new JSONException(e); } } }
/* * dgMaster: A versatile, open source data generator. *(c) 2007 M. Michalakopoulos, mmichalak@gmail.com */ package generator.gui; import generator.engine.ProgressUpdateObserver; import generator.engine.file.Generator; import generator.extenders.RandomiserInstance; import generator.extenders.RandomiserPanel; import generator.misc.ApplicationContext; import generator.misc.Constants; import generator.misc.DataFileDefinition; import generator.misc.DataFileItem; import generator.misc.FileOutDataSaver; import generator.misc.RandomiserType; import generator.misc.Utils; import java.net.URL; import java.util.Vector; import javax.swing.DefaultListModel; import javax.swing.ImageIcon; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.SwingUtilities; import org.apache.log4j.Logger; public class FileOutputPanel extends javax.swing.JPanel implements ProgressUpdateObserver { /** * */ private static final long serialVersionUID = 1L; private Logger logger = Logger.getLogger(FileOutputPanel.class); private Vector<RandomiserType> vRandomiserTypes; private Vector<RandomiserInstance> vRandomiserInstances; private Vector<DataFileDefinition> vDFDs; private DefaultListModel modelOutData, modelRT, modelRI; private ViewPropertiesForm frmProperties; private MainForm frmMain; private ProgressForm frmProgress; /** Creates new form FileOutputPanel */ public FileOutputPanel() { initComponents(); loadButtonImages(); ApplicationContext context = ApplicationContext.getInstance(); this.vRandomiserTypes = context.getRandomiserTypes(); this.vRandomiserInstances = context.getRandomiserInstances(); this.vDFDs = context.getDFD(); modelOutData = new DefaultListModel(); modelRT = new DefaultListModel(); modelRI = new DefaultListModel(); populateRandomiserTypes(); radLeft.setSelected(true); radComma.setSelected(true); lstOutData.setModel(modelOutData); lstRandomiserTypes.setModel(modelRT); lstRandomiserInstances.setModel(modelRI); lstRandomiserTypes.setSelectedIndex(0); } /** * Sets up the buttons. */ private void loadButtonImages() { URL urlAdd = this.getClass().getClassLoader().getResource("generator/images/list-add-small.png"); URL urlUp = this.getClass().getClassLoader().getResource("generator/images/go-up.png"); URL urlDown = this.getClass().getClassLoader().getResource("generator/images/go-down.png"); URL urlRemFormat = this.getClass().getClassLoader().getResource("generator/images/clear-format.gif"); URL urlRemSelect = this.getClass().getClassLoader().getResource("generator/images/list-remove-small.png"); URL urlBrowse = this.getClass().getClassLoader().getResource("generator/images/document-open-small.png"); URL urlGenerate = this.getClass().getClassLoader().getResource("generator/images/generate-data-small.png"); URL urlSave = this.getClass().getClassLoader().getResource("generator/images/document-save-small.png"); btnAdd.setIcon(new ImageIcon(urlAdd)); btnUp.setIcon(new ImageIcon(urlUp)); btnDown.setIcon(new ImageIcon(urlDown)); btnRemoveFormat.setIcon(new ImageIcon(urlRemFormat)); btnRemove.setIcon(new ImageIcon(urlRemSelect)); btnBrowseSave.setIcon(new ImageIcon(urlBrowse)); btnGenerate.setIcon(new ImageIcon(urlGenerate)); btnSave.setIcon(new ImageIcon(urlSave)); } /** * Loads randomiser types in the RandmomiserTypes list box. * On the top of the list "All user defined types" appears. * When the user selects this, all randomiserInstances are shown, * in the listbox next to the RandomiserTypes listbox * * <p>Preconditions: a) vRT should not be null, * b) vRI should not be null, * c) vDFDs should not be null * * <p>Post-effects : the panel is constructed, and the listbox of * RandomiserTypes is filled with RandomiserTypes * */ private void populateRandomiserTypes() { logger.debug("Loading list of randomiser types"); RandomiserType ri; modelRT.addElement("All user defined cases"); for(int i=0; i<vRandomiserTypes.size(); i++) { ri = vRandomiserTypes.elementAt(i); modelRT.addElement(ri.getName()); } logger.debug("Loading list of randomiser types: Done"); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc=" Generated Code ">//GEN-BEGIN:initComponents private void initComponents() { new javax.swing.ButtonGroup(); new javax.swing.ButtonGroup(); jScrollPane1 = new javax.swing.JScrollPane(); jPanel1 = new javax.swing.JPanel(); jPanel2 = new javax.swing.JPanel(); jPanel3 = new javax.swing.JPanel(); lstRandomiserTypes = new javax.swing.JList(); lstRandomiserInstances = new javax.swing.JList(); jLabel9 = new javax.swing.JLabel(); jLabel13 = new javax.swing.JLabel(); radCenter = new javax.swing.JRadioButton(); radLeft = new javax.swing.JRadioButton(); radRight = new javax.swing.JRadioButton(); jLabel6 = new javax.swing.JLabel(); txtWidth = new javax.swing.JTextField(); jLabel7 = new javax.swing.JLabel(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); btnAdd = new javax.swing.JButton(); btnViewRI = new javax.swing.JButton(); jLabel11 = new javax.swing.JLabel(); txtChar = new javax.swing.JTextField(); txtOrder = new javax.swing.JTextField(); jPanel4 = new javax.swing.JPanel(); jLabel3 = new javax.swing.JLabel(); btnRemove = new javax.swing.JButton(); btnRemoveFormat = new javax.swing.JButton(); btnUp = new javax.swing.JButton(); btnDown = new javax.swing.JButton(); jScrollPane2 = new javax.swing.JScrollPane(); lstOutData = new javax.swing.JList(); jPanel5 = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); txtFilename = new javax.swing.JTextField(); btnBrowseSave = new javax.swing.JButton(); jLabel5 = new javax.swing.JLabel(); radComma = new javax.swing.JRadioButton(); radTab = new javax.swing.JRadioButton(); radOther = new javax.swing.JRadioButton(); txtDelim = new javax.swing.JTextField(); jLabel8 = new javax.swing.JLabel(); txtNumOfRecs = new javax.swing.JTextField(); txtNone = new javax.swing.JRadioButton(); jPanel6 = new javax.swing.JPanel(); jLabel10 = new javax.swing.JLabel(); txtDefinition = new javax.swing.JTextField(); jLabel12 = new javax.swing.JLabel(); jScrollPane3 = new javax.swing.JScrollPane(); txtDescription = new javax.swing.JTextArea(); btnGenerate = new javax.swing.JButton(); btnSave = new javax.swing.JButton(); jPanel2.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Generated data", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.ABOVE_TOP)); lstRandomiserTypes.setBorder(javax.swing.BorderFactory.createEtchedBorder()); lstRandomiserTypes.addPropertyChangeListener(new java.beans.PropertyChangeListener() { public void propertyChange(java.beans.PropertyChangeEvent evt) { lstRandomiserTypesPropertyChange(evt); } }); lstRandomiserTypes.addListSelectionListener(new javax.swing.event.ListSelectionListener() { public void valueChanged(javax.swing.event.ListSelectionEvent evt) { lstRandomiserTypesValueChanged(evt); } }); lstRandomiserInstances.setBorder(javax.swing.BorderFactory.createEtchedBorder()); lstRandomiserInstances.setModel(new javax.swing.AbstractListModel() { /** * */ private static final long serialVersionUID = 1L; String[] strings = { "Item 1", "Item 2", "Item 3", "Item 4", "Item 5" }; public int getSize() { return strings.length; } public Object getElementAt(int i) { return strings[i]; } }); jLabel9.setText("Optional formatting"); radCenter.setText("Center"); radCenter.setMargin(new java.awt.Insets(0, 0, 0, 0)); radLeft.setText("Left"); radLeft.setMargin(new java.awt.Insets(0, 0, 0, 0)); radRight.setText("Right"); radRight.setMargin(new java.awt.Insets(0, 0, 0, 0)); jLabel6.setText("Width:"); jLabel13.setText("Order:"); txtWidth.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { txtWidthActionPerformed(evt); } }); jLabel7.setText("Alignment:"); jLabel1.setText("Available types"); jLabel2.setText("User defined types"); btnAdd.setText("Add >>"); btnAdd.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnAddActionPerformed(evt); } }); btnViewRI.setIcon(new javax.swing.ImageIcon("C:\\javaprojects\\GenGUI\\images\\document-properties.png")); btnViewRI.setText("View type properties"); btnViewRI.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnViewRIActionPerformed(evt); } }); jLabel11.setText("Encl. char.:"); org.jdesktop.layout.GroupLayout jPanel3Layout = new org.jdesktop.layout.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(org.jdesktop.layout.GroupLayout.TRAILING, jPanel3Layout.createSequentialGroup() .addContainerGap() .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jLabel1) .add(lstRandomiserTypes, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 170, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(org.jdesktop.layout.GroupLayout.LEADING, btnViewRI, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 181, Short.MAX_VALUE) .add(org.jdesktop.layout.GroupLayout.LEADING, jLabel2) .add(org.jdesktop.layout.GroupLayout.LEADING, lstRandomiserInstances, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 181, Short.MAX_VALUE)) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING, false) .add(jPanel3Layout.createSequentialGroup() .add(10, 10, 10) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(jPanel3Layout.createSequentialGroup() .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(org.jdesktop.layout.GroupLayout.TRAILING, jLabel13) .add(org.jdesktop.layout.GroupLayout.TRAILING, jLabel6) .add(org.jdesktop.layout.GroupLayout.TRAILING, jLabel11) .add(org.jdesktop.layout.GroupLayout.TRAILING, jLabel7)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(txtOrder, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 53, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(txtChar, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 53, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(txtWidth, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 53, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(radCenter, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 69, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(radRight) .add(radLeft)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)) .add(jPanel3Layout.createSequentialGroup() .add(jLabel9) .add(34, 34, 34))) .add(10, 10, 10)) .add(jPanel3Layout.createSequentialGroup() .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(btnAdd, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()))) ); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel3Layout.createSequentialGroup() .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(org.jdesktop.layout.GroupLayout.TRAILING, jLabel1) .add(org.jdesktop.layout.GroupLayout.TRAILING, jLabel2) .add(org.jdesktop.layout.GroupLayout.TRAILING, jLabel9, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 14, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel3Layout.createSequentialGroup() .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel3Layout.createSequentialGroup() .add(7, 7, 7) .add(txtWidth, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .add(jPanel3Layout.createSequentialGroup() .add(9, 9, 9) .add(jLabel6))) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(jLabel11) .add(txtChar, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(jLabel13) .add(txtOrder, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(radLeft) .add(jLabel7)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(radCenter) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(radRight) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(btnAdd)) .add(jPanel3Layout.createSequentialGroup() .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel3Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(lstRandomiserInstances, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 256, Short.MAX_VALUE) .add(lstRandomiserTypes, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 256, Short.MAX_VALUE)))) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(btnViewRI) .addContainerGap()) ); jLabel3.setText("Selected data for output"); btnRemove.setIcon(new javax.swing.ImageIcon("C:\\javaprojects\\GenGUI\\images\\list-remove-small.png")); btnRemove.setText("Remove selected"); btnRemove.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnRemoveActionPerformed(evt); } }); btnRemoveFormat.setIcon(new javax.swing.ImageIcon("C:\\javaprojects\\GenGUI\\images\\clear-format.gif")); btnRemoveFormat.setText("Remove formats"); btnRemoveFormat.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnRemoveFormatActionPerformed(evt); } }); btnUp.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnUpActionPerformed(evt); } }); btnDown.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnDownActionPerformed(evt); } }); lstOutData.setModel(new javax.swing.AbstractListModel() { /** * */ private static final long serialVersionUID = 1L; String[] strings = { "Item 1", "Item 2", "Item 3", "Item 4", "Item 5" }; public int getSize() { return strings.length; } public Object getElementAt(int i) { return strings[i]; } }); jScrollPane2.setViewportView(lstOutData); org.jdesktop.layout.GroupLayout jPanel4Layout = new org.jdesktop.layout.GroupLayout(jPanel4); jPanel4.setLayout(jPanel4Layout); jPanel4Layout.setHorizontalGroup( jPanel4Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel4Layout.createSequentialGroup() .addContainerGap() .add(jPanel4Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(btnUp, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 33, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(btnDown, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 33, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel4Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(jScrollPane2, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 325, Short.MAX_VALUE) .add(org.jdesktop.layout.GroupLayout.LEADING, jPanel4Layout.createSequentialGroup() .add(btnRemoveFormat, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 157, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .add(btnRemove, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 162, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .add(org.jdesktop.layout.GroupLayout.LEADING, jPanel4Layout.createSequentialGroup() .add(jLabel3) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 207, Short.MAX_VALUE))) .addContainerGap()) ); jPanel4Layout.setVerticalGroup( jPanel4Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel4Layout.createSequentialGroup() .add(jLabel3) .add(jPanel4Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel4Layout.createSequentialGroup() .add(94, 94, 94) .add(btnUp, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 24, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(btnDown, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 24, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .add(jPanel4Layout.createSequentialGroup() .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jScrollPane2, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 256, Short.MAX_VALUE))) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel4Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(btnRemoveFormat) .add(btnRemove)) .addContainerGap()) ); org.jdesktop.layout.GroupLayout jPanel2Layout = new org.jdesktop.layout.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel2Layout.createSequentialGroup() .addContainerGap() .add(jPanel3, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(15, 15, 15) .add(jPanel4, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel2Layout.createSequentialGroup() .add(jPanel2Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING, false) .add(org.jdesktop.layout.GroupLayout.LEADING, jPanel4, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .add(org.jdesktop.layout.GroupLayout.LEADING, jPanel3, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap(org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jPanel5.setBorder(javax.swing.BorderFactory.createTitledBorder("File properties")); jLabel4.setText("Output file:"); btnBrowseSave.setIcon(new javax.swing.ImageIcon("C:\\javaprojects\\GenGUI\\images\\document-open-small.png")); btnBrowseSave.setText("Browse..."); btnBrowseSave.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnBrowseSaveActionPerformed(evt); } }); jLabel5.setText("Delimiters:"); radComma.setText("Comma"); radComma.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0)); radComma.setMargin(new java.awt.Insets(0, 0, 0, 0)); radTab.setText("Tab"); radTab.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0)); radTab.setMargin(new java.awt.Insets(0, 0, 0, 0)); radOther.setText("Other:"); radOther.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0)); radOther.setMargin(new java.awt.Insets(0, 0, 0, 0)); jLabel8.setText("Number of recs.:"); txtNone.setText("None"); txtNone.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0)); txtNone.setMargin(new java.awt.Insets(0, 0, 0, 0)); org.jdesktop.layout.GroupLayout jPanel5Layout = new org.jdesktop.layout.GroupLayout(jPanel5); jPanel5.setLayout(jPanel5Layout); jPanel5Layout.setHorizontalGroup( jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel5Layout.createSequentialGroup() .add(jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel5Layout.createSequentialGroup() .add(49, 49, 49) .add(jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(jLabel5) .add(jLabel4)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING, false) .add(jPanel5Layout.createSequentialGroup() .add(radComma) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(radTab) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(radOther) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(txtDelim, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 34, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(txtNone)) .add(txtFilename)) .add(6, 6, 6) .add(btnBrowseSave)) .add(jPanel5Layout.createSequentialGroup() .add(25, 25, 25) .add(jLabel8) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(txtNumOfRecs, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 69, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))) .addContainerGap(502, Short.MAX_VALUE)) ); jPanel5Layout.setVerticalGroup( jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel5Layout.createSequentialGroup() .add(jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(jLabel4) .add(txtFilename, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(btnBrowseSave)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(jLabel5) .add(radComma) .add(radTab) .add(radOther) .add(txtDelim, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(txtNone)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel5Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(jLabel8) .add(txtNumOfRecs, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))) ); jPanel6.setBorder(javax.swing.BorderFactory.createTitledBorder("Definition Properties")); jLabel10.setText("Name:"); jLabel12.setText("Description:"); txtDescription.setColumns(20); txtDescription.setRows(5); jScrollPane3.setViewportView(txtDescription); org.jdesktop.layout.GroupLayout jPanel6Layout = new org.jdesktop.layout.GroupLayout(jPanel6); jPanel6.setLayout(jPanel6Layout); jPanel6Layout.setHorizontalGroup( jPanel6Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel6Layout.createSequentialGroup() .addContainerGap() .add(jPanel6Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(jLabel12) .add(jLabel10)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel6Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(txtDefinition, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 165, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(jScrollPane3, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 852, Short.MAX_VALUE)) .addContainerGap()) ); jPanel6Layout.setVerticalGroup( jPanel6Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel6Layout.createSequentialGroup() .add(jPanel6Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(jLabel10) .add(txtDefinition, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel6Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jLabel12) .add(jScrollPane3, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 79, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .addContainerGap(org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); btnGenerate.setIcon(new javax.swing.ImageIcon("C:\\javaprojects\\GenGUI\\images\\generate-data-small.png")); btnGenerate.setText("Generate"); btnGenerate.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnGenerateActionPerformed(evt); } }); btnSave.setIcon(new javax.swing.ImageIcon("C:\\javaprojects\\GenGUI\\images\\document-save-small.png")); btnSave.setText("Save"); btnSave.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnSaveActionPerformed(evt); } }); org.jdesktop.layout.GroupLayout jPanel1Layout = new org.jdesktop.layout.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel1Layout.createSequentialGroup() .add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(jPanel1Layout.createSequentialGroup() .add(btnGenerate, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 125, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(btnSave, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 111, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING, false) .add(jPanel6, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .add(jPanel2, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 949, Short.MAX_VALUE) .add(jPanel5, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) .addContainerGap(39, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jPanel1Layout.createSequentialGroup() .add(jPanel2, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel6, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel5, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(btnSave) .add(btnGenerate)) .add(47, 47, 47)) ); jScrollPane1.setViewportView(jPanel1); org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jScrollPane1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 978, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(jScrollPane1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 680, Short.MAX_VALUE) ); }// </editor-fold>//GEN-END:initComponents private void btnSaveActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnSaveActionPerformed {//GEN-HEADEREND:event_btnSaveActionPerformed DataFileDefinition dataFile = new DataFileDefinition(); DataFileItem dataFileItem; Vector<DataFileItem> vDataFileItems; String item, riName, options,encloseChar; String tokens[]; int idx, align, width, order; long nRecs; String filename, name, numOfRecs, delimiter, description; //filename filename = txtFilename.getText(); if(filename==null || filename.trim().length()==0) { JOptionPane.showMessageDialog(this,"Please enter a filename","Error",JOptionPane.ERROR_MESSAGE); return; } //delimiter if(radOther.isSelected()) { delimiter = txtDelim.getText(); if(delimiter==null || delimiter.trim().length()==0) { JOptionPane.showMessageDialog(this,"Please enter a value for the delimiter","Error",JOptionPane.ERROR_MESSAGE); return; } } else if(radComma.isSelected()) delimiter = ","; else delimiter = "\t"; //number of records numOfRecs = txtNumOfRecs.getText(); try { nRecs = Integer.parseInt(numOfRecs); if(nRecs<=0) nRecs=-1; } catch(Exception e) { nRecs = -1; } if(nRecs==-1) { JOptionPane.showMessageDialog(this,"Number of records should be an integer positive","Error",JOptionPane.ERROR_MESSAGE); return; } //name name = txtDefinition.getText(); if(name==null || name.trim().length()==0) { JOptionPane.showMessageDialog(this,"Please enter a name for the saved data","Error",JOptionPane.ERROR_MESSAGE); return; } description = txtDescription.getText(); if(description==null) description=""; //data items vDataFileItems = new Vector<DataFileItem>(); for(int i=0; i<modelOutData.size(); i++) { item = (String)modelOutData.elementAt(i); idx = item.indexOf(Constants.LEFT_MARK); riName = item.substring(0,idx); options= item.substring(idx+1, item.length()-1 ); //xx,yy,zz //do the parsing of the options substring tokens = options.split(","); if(tokens[0].equalsIgnoreCase("default")) width = Constants.DEFAULT_WIDTH; else { try { width = Integer.parseInt(tokens[0]); } catch(Exception e) { logger.error("Problem converting string to integer: " +tokens[0]); width=-1; } } if(tokens[1]==null || tokens[1].length()==0 || tokens[1].equalsIgnoreCase("none")) encloseChar=""; else encloseChar=tokens[1].substring(0,1); if(tokens[2].equalsIgnoreCase("left")) align = Constants.ALIGN_LEFT; else if(tokens[2].equalsIgnoreCase("center")) align = Constants.ALIGN_CENTER; else align = Constants.ALIGN_RIGHT; if(tokens[3].equalsIgnoreCase("None")) order = Constants.ORDER_DEFAULT; else { try { order = Integer.parseInt(tokens[3]); } catch(Exception e) { logger.error("Problem converting string to integer: " +tokens[3]); order=-1; } } dataFileItem = new DataFileItem(); dataFileItem.setRandomiserInstanceName(riName); dataFileItem.setWidth(width); dataFileItem.setEncloseChar(encloseChar); dataFileItem.setAlignment(align); dataFileItem.setOrder(order); vDataFileItems.add(dataFileItem); } dataFile.setName(name); dataFile.setDescription(description); dataFile.setDelimiter(delimiter); dataFile.setNumOfRecs(nRecs); dataFile.setOutFilename(txtFilename.getText()); dataFile.setOutDataItems(vDataFileItems); //this either: // a) adds the dataFile to the vector, // b) replaces an existing dataFile // c) does not add anything boolean toSave = updateDFDVector(vDFDs,dataFile); if(!toSave) return; try { //data file definition FileOutDataSaver fileOutSaver = new FileOutDataSaver(); fileOutSaver.saveData(vDFDs, FileOutputPanel.this); frmMain.refreshTree(); ApplicationContext.getInstance().setDFD(vDFDs); } catch(Exception e) { JOptionPane.showMessageDialog(this,"There was an error while saving the data/refreshing the tree.","Error",JOptionPane.ERROR_MESSAGE); logger.error("Text data not saved", e); return; } JOptionPane.showMessageDialog(this,"File definition saved.","Message",JOptionPane.INFORMATION_MESSAGE); }//GEN-LAST:event_btnSaveActionPerformed private void btnGenerateActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnGenerateActionPerformed {//GEN-HEADEREND:event_btnGenerateActionPerformed final Generator generator = new Generator(); final ProgressUpdateObserver observer = this; final JPanel pnl = this; SwingWorker worker = new SwingWorker() { public Object construct() { boolean found; generator.registerObserver(observer); generator.setEngineData(vRandomiserTypes,vRandomiserInstances,vDFDs); found = generator.setFileDefinitionOutput(txtDefinition.getText()); if(!found) { JOptionPane.showMessageDialog(pnl,"File definition name not found, will do nothing.","Error",JOptionPane.ERROR_MESSAGE); return null; } generator.generate(); return null; } }; worker.start(); }//GEN-LAST:event_btnGenerateActionPerformed private void btnBrowseSaveActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnBrowseSaveActionPerformed {//GEN-HEADEREND:event_btnBrowseSaveActionPerformed String inputFile; JFileChooser chooser = new JFileChooser(); int returnVal = chooser.showSaveDialog(this); if(returnVal == JFileChooser.APPROVE_OPTION) { inputFile = chooser.getSelectedFile().getAbsolutePath(); txtFilename.setText(inputFile); } }//GEN-LAST:event_btnBrowseSaveActionPerformed private void btnDownActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnDownActionPerformed {//GEN-HEADEREND:event_btnDownActionPerformed int idx = lstOutData.getSelectedIndex(); if(idx==-1 || idx==modelOutData.getSize()-1) return; String current = (String) modelOutData.getElementAt(idx); String next = (String) modelOutData.getElementAt(idx+1); modelOutData.setElementAt(current, idx+1); modelOutData.setElementAt(next, idx); lstOutData.setSelectedIndex(idx+1); }//GEN-LAST:event_btnDownActionPerformed private void btnUpActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnUpActionPerformed {//GEN-HEADEREND:event_btnUpActionPerformed int idx = lstOutData.getSelectedIndex(); if(idx==-1 || idx==0) return; String current = (String) modelOutData.getElementAt(idx); String previous= (String) modelOutData.getElementAt(idx-1); modelOutData.setElementAt(current, idx-1); modelOutData.setElementAt(previous, idx); lstOutData.setSelectedIndex(idx-1); }//GEN-LAST:event_btnUpActionPerformed private void btnRemoveFormatActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnRemoveFormatActionPerformed {//GEN-HEADEREND:event_btnRemoveFormatActionPerformed int row = lstOutData.getSelectedIndex(); if(row==-1) return; String selItem = (String)modelOutData.get(row); int idxLeft = selItem.indexOf(Constants.LEFT_MARK); String genInstance = selItem.substring(0,idxLeft); genInstance = genInstance + "(Default,None,Left)"; modelOutData.set(row,genInstance); }//GEN-LAST:event_btnRemoveFormatActionPerformed private void btnRemoveActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnRemoveActionPerformed {//GEN-HEADEREND:event_btnRemoveActionPerformed int row = lstOutData.getSelectedIndex(); if(row==-1) return; modelOutData.remove(row); if(row<modelOutData.size()) lstOutData.setSelectedIndex(row); else if(modelOutData.size()>0) lstOutData.setSelectedIndex(modelOutData.size()-1); }//GEN-LAST:event_btnRemoveActionPerformed private void btnViewRIActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnViewRIActionPerformed {//GEN-HEADEREND:event_btnViewRIActionPerformed RandomiserType rt=null; RandomiserInstance ri=null; String className, type; RandomiserPanel pnlRandomTypeUserPanel; Utils utils = new Utils(); //check that something is selected int row = lstRandomiserInstances.getSelectedIndex(); if(row==-1) return; String selRI = (String)modelRI.getElementAt(row); boolean found =false; int i=0; while( i<vRandomiserInstances.size() && !found) { ri = vRandomiserInstances.elementAt(i); type = ri.getRandomiserType(); if(ri.getName().equalsIgnoreCase(selRI)) { for(int k=0; k<vRandomiserTypes.size(); k++) { rt = vRandomiserTypes.elementAt(k); if(rt.getName().equalsIgnoreCase(type)) { className =rt.getGenerator(); found = true; break; } } } i++; } logger.debug("Panel found:" + found); if(found) { className = rt.getPanel(); pnlRandomTypeUserPanel = (RandomiserPanel) utils.createObject(className); if(pnlRandomTypeUserPanel==null) { logger.warn("error loading panel:"+className); return; } pnlRandomTypeUserPanel.initialise(ri); //create the hosting panel and pass the vector of existing data frmProperties = new ViewPropertiesForm(); frmProperties.setTitle("View properties: "+selRI); frmProperties.setPropertiesPanel(pnlRandomTypeUserPanel); frmProperties.setVisible(true); frmProperties.pack(); } }//GEN-LAST:event_btnViewRIActionPerformed private void btnAddActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btnAddActionPerformed {//GEN-HEADEREND:event_btnAddActionPerformed int width; int order = -1; String sWidth = txtWidth.getText(); String enclosingChar = txtChar.getText(); String sOrder = txtOrder.getText(); if(sOrder.trim().length()!=0) { try { // anything from -1 to infinite is ok order = Integer.parseInt(sOrder); } catch(Exception e) { order=-2; } if(order<=-2) { JOptionPane.showMessageDialog(this,"Order should be numerical greater than 0","Error",JOptionPane.ERROR_MESSAGE); return; } } width = 0; if(sWidth.trim().length()!=0) { try { width = Integer.parseInt(sWidth); if(width<=0) width=-1; } catch(Exception e) { width=-1; } if(width==-1) { JOptionPane.showMessageDialog(this,"Width should be numerical greater than 0","Error",JOptionPane.ERROR_MESSAGE); return; } } int idx = lstRandomiserInstances.getSelectedIndex(); if(idx==-1) { JOptionPane.showMessageDialog(this,"Please select a generator first!","Error",JOptionPane.ERROR_MESSAGE); return; } String name = (String)modelRI.getElementAt(idx); String align; if(radLeft.isSelected()) align=",Left"; else if(radCenter.isSelected()) align=",Center"; else align=",Right"; if(width==0) sWidth = "Default"; else sWidth = "" + width; if(enclosingChar==null || enclosingChar.trim().length()==0) enclosingChar = ",None"; else enclosingChar = "," + enclosingChar.substring(0,1); if(order == -1) sOrder = ",None"; else sOrder = "," + order; String toAdd = name + Constants.LEFT_MARK + sWidth + enclosingChar+ align + sOrder + Constants.RIGHT_MARK; modelOutData.addElement(toAdd); }//GEN-LAST:event_btnAddActionPerformed private void txtWidthActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_txtWidthActionPerformed {//GEN-HEADEREND:event_txtWidthActionPerformed }//GEN-LAST:event_txtWidthActionPerformed private void lstRandomiserTypesValueChanged(javax.swing.event.ListSelectionEvent evt)//GEN-FIRST:event_lstRandomiserTypesValueChanged {//GEN-HEADEREND:event_lstRandomiserTypesValueChanged if(evt.getValueIsAdjusting() ) return; int idx = lstRandomiserTypes.getSelectedIndex(); if(idx==-1) return; populateRandomiserInstances( idx ); if(lstRandomiserInstances.getModel().getSize()>0) lstRandomiserInstances.setSelectedIndex(0); }//GEN-LAST:event_lstRandomiserTypesValueChanged private void lstRandomiserTypesPropertyChange(java.beans.PropertyChangeEvent evt)//GEN-FIRST:event_lstRandomiserTypesPropertyChange {//GEN-HEADEREND:event_lstRandomiserTypesPropertyChange }//GEN-LAST:event_lstRandomiserTypesPropertyChange /** * Starts the data genereation process, according to the data file definition name * that is currently populated in the txtDefinition field. * Uses a SwingWorker thread. The generator class displays a form * with a progress bar. Notice, that there is a pending bug here, * it would be good if the rest of the GUI was aware of such a possibly * lengthy process; currently, it is not!. At least this method should not * be re-entrant. This remains to be fixed [*]. */ public void dataGenStarted() { frmProgress = new ProgressForm(); int width, height, posX, posY; width = this.getWidth(); height = this.getHeight(); frmProgress.setTitle("Progress..."); posX = this.getX() + (width-frmProgress.getWidth())/2; posY = this.getY() + (height-frmProgress.getHeight())/3; frmProgress.setLocation(posX,posY); frmProgress.setResizable(false); frmProgress.setVisible(true); } public void dataGenMaxProgressValue(int maxProgress) { frmProgress.setProgressMaxValue(maxProgress); } public boolean dataGenProgressContinue(String msg, int progress) { final int v=progress; final String s = msg; SwingUtilities.invokeLater( new Runnable() { public void run() { if(frmProgress!=null) { frmProgress.setProgressValue(v); frmProgress.setLabelMessage(s); } } }); return frmProgress.isInterrupted(); } public void dataGenEnd() { frmProgress.setVisible(false); if(frmProgress.isInterrupted()) JOptionPane.showMessageDialog(this,"Data generation process was interrupted by user.","Warning",JOptionPane.WARNING_MESSAGE); else JOptionPane.showMessageDialog(this,"Data generation done.","Information",JOptionPane.INFORMATION_MESSAGE); frmProgress=null; } public void datageGenError(String msg) { frmProgress.setVisible(false); frmProgress=null; JOptionPane.showMessageDialog(this,msg,"Information",JOptionPane.ERROR_MESSAGE); } /** * Loads the file definition populating the various * components. * * <p>Preconditions: vDFDs should not be null. * * <p>Post-effects : fields are loaded to the various components. * * @param fileDefinition - the file definition name to load. */ public void loadExistingDefinition(String fileDefinition) { logger.debug("Loading existing definition: " +fileDefinition); DataFileDefinition dfd; Vector<DataFileItem> vDataItems; String align, width, encloseChar, order; for(int i=0; i<vDFDs.size(); i++) { dfd = vDFDs.elementAt(i); if(dfd.getName().equalsIgnoreCase(fileDefinition)) { vDataItems = dfd.getOutDataItems(); for(int j=0; j<vDataItems.size(); j++) { DataFileItem dataItem = vDataItems.elementAt(j); if(dataItem.getWidth()==Constants.DEFAULT_WIDTH) width = "Default"; else width = ""+dataItem.getWidth(); if(dataItem.getAlignment()==Constants.ALIGN_LEFT) align = "Left"; else if(dataItem.getAlignment()==Constants.ALIGN_CENTER) align = "Center"; else align="Right"; if(dataItem.getEncloseChar()==null || dataItem.getEncloseChar().length()==0) encloseChar="None"; else encloseChar = dataItem.getEncloseChar(); if(dataItem.getOrder() == Constants.ORDER_DEFAULT) order = "None"; else{ order = ""+dataItem.getOrder(); } modelOutData.addElement(dataItem.getRandomiserInstanceName() + Constants.LEFT_MARK + width + ","+ encloseChar + ","+ align + "," + order + Constants.RIGHT_MARK); }//for txtDefinition.setText( dfd.getName() ); txtDescription.setText(dfd.getDescription()); txtFilename.setText(dfd.getOutFilename()); txtNumOfRecs.setText(""+dfd.getNumOfRecs()); break; } } logger.debug("Loading existing definition: Done"); } /** * Saves the existing data definition. * This means parsing the items in the output fields list, * and storing everything in xml. * */ /* * Checks the vector of the data file definitions, so as to see if the * data file definition currently on the panel already exists in the vDFDs * vector. * */ public boolean updateDFDVector(Vector<DataFileDefinition> vDataFileDefinitions, DataFileDefinition dfdToAdd) { int existPos;//, answer; DataFileDefinition dfd; //check that this randomiser instance exists existPos=-1; //answer=0; for(int i=0; i<vDataFileDefinitions.size(); i++) { dfd = vDataFileDefinitions.elementAt(i); if(dfd.getName().equalsIgnoreCase(dfdToAdd.getName())) existPos=i; } if(existPos!=-1) { vDataFileDefinitions.setElementAt(dfdToAdd,existPos); logger.debug("Overwritten existing element at position:"+existPos); } else { vDataFileDefinitions.add(dfdToAdd); logger.debug("Added one element..."); } return true; } //displays a FileChooser dialog for selecting a directory, where the output //file will be saved. //displays a form where the currently selected randomiser instance, // is displayed. This allows the user to check what are the properties // of an existing randomiser instance. //removes the selected data file item from the listbox of the fields // that will be in the output of the text files. //removes the format of the selected data file item from the listbox of the fields // that will be in the output of the text files. // (This is the rightmost panel) // The format is made up of the following info: // width, enclosing character alignment //moves the selected data file output item downwards in the list // (This is the rightmost panel) //moves the selected data file output item upwards in the list // (This is the rightmost panel) //adds a randomiser instance(2nd list box) to the // data file output items (3rd box from the left - rightmost panel) //for every randomiser type that is clicked (1st panel on the left) // load the randomiser instances that belong to it (2nd panel) //populates the 2nd list box with the randomiser instances // idx defines which randomiser type to load // notice that idx=0 points to the item "All user-defined cases" // private void populateRandomiserInstances(int idx) { RandomiserInstance ri; RandomiserType rt; modelRI.removeAllElements(); if(idx==0) { for(int i=0; i<vRandomiserInstances.size(); i++) { ri = vRandomiserInstances.elementAt(i); modelRI.addElement( ri.getName() ); } } else { rt=vRandomiserTypes.elementAt(idx-1); for(int i=0; i<vRandomiserInstances.size(); i++) { ri = vRandomiserInstances.elementAt(i); if(ri.getRandomiserType().equalsIgnoreCase(rt.getName())) { modelRI.addElement( ri.getName() ); } } } } void setMainForm(MainForm mainForm) { this.frmMain = mainForm; } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton btnAdd; private javax.swing.JButton btnBrowseSave; private javax.swing.JButton btnDown; private javax.swing.JButton btnGenerate; private javax.swing.JButton btnRemove; private javax.swing.JButton btnRemoveFormat; private javax.swing.JButton btnSave; private javax.swing.JButton btnUp; private javax.swing.JButton btnViewRI; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel10; private javax.swing.JLabel jLabel11; private javax.swing.JLabel jLabel12; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JLabel jLabel9; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JPanel jPanel4; private javax.swing.JPanel jPanel5; private javax.swing.JPanel jPanel6; private javax.swing.JLabel jLabel13; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JScrollPane jScrollPane3; private javax.swing.JList lstOutData; private javax.swing.JList lstRandomiserInstances; private javax.swing.JList lstRandomiserTypes; private javax.swing.JRadioButton radCenter; private javax.swing.JRadioButton radComma; private javax.swing.JRadioButton radLeft; private javax.swing.JRadioButton radOther; private javax.swing.JRadioButton radRight; private javax.swing.JRadioButton radTab; private javax.swing.JTextField txtChar; private javax.swing.JTextField txtOrder; private javax.swing.JTextField txtDefinition; private javax.swing.JTextField txtDelim; private javax.swing.JTextArea txtDescription; private javax.swing.JTextField txtFilename; private javax.swing.JRadioButton txtNone; private javax.swing.JTextField txtNumOfRecs; private javax.swing.JTextField txtWidth; // End of variables declaration//GEN-END:variables }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package org.more.asm; /** * A constant pool item. Constant pool items can be created with the 'newXXX' * methods in the {@link ClassWriter} class. * * @author Eric Bruneton */ final class Item { /** * Index of this item in the constant pool. */ int index; /** * Type of this constant pool item. A single class is used to represent all * constant pool item types, in order to minimize the bytecode size of this * package. The value of this field is one of {@link ClassWriter#INT}, * {@link ClassWriter#LONG}, {@link ClassWriter#FLOAT}, * {@link ClassWriter#DOUBLE}, {@link ClassWriter#UTF8}, * {@link ClassWriter#STR}, {@link ClassWriter#CLASS}, * {@link ClassWriter#NAME_TYPE}, {@link ClassWriter#FIELD}, * {@link ClassWriter#METH}, {@link ClassWriter#IMETH}, * {@link ClassWriter#MTYPE}, {@link ClassWriter#INDY}. * * MethodHandle constant 9 variations are stored using a range of 9 values * from {@link ClassWriter#HANDLE_BASE} + 1 to * {@link ClassWriter#HANDLE_BASE} + 9. * * Special Item types are used for Items that are stored in the ClassWriter * {@link ClassWriter#typeTable}, instead of the constant pool, in order to * avoid clashes with normal constant pool items in the ClassWriter constant * pool's hash table. These special item types are * {@link ClassWriter#TYPE_NORMAL}, {@link ClassWriter#TYPE_UNINIT} and * {@link ClassWriter#TYPE_MERGED}. */ int type; /** * Value of this item, for an integer item. */ int intVal; /** * Value of this item, for a long item. */ long longVal; /** * First part of the value of this item, for items that do not hold a * primitive value. */ String strVal1; /** * Second part of the value of this item, for items that do not hold a * primitive value. */ String strVal2; /** * Third part of the value of this item, for items that do not hold a * primitive value. */ String strVal3; /** * The hash code value of this constant pool item. */ int hashCode; /** * Link to another constant pool item, used for collision lists in the * constant pool's hash table. */ Item next; /** * Constructs an uninitialized {@link Item}. */ Item() { } /** * Constructs an uninitialized {@link Item} for constant pool element at * given position. * * @param index * index of the item to be constructed. */ Item(final int index) { this.index = index; } /** * Constructs a copy of the given item. * * @param index * index of the item to be constructed. * @param i * the item that must be copied into the item to be constructed. */ Item(final int index, final Item i) { this.index = index; type = i.type; intVal = i.intVal; longVal = i.longVal; strVal1 = i.strVal1; strVal2 = i.strVal2; strVal3 = i.strVal3; hashCode = i.hashCode; } /** * Sets this item to an integer item. * * @param intVal * the value of this item. */ void set(final int intVal) { this.type = ClassWriter.INT; this.intVal = intVal; this.hashCode = 0x7FFFFFFF & (type + intVal); } /** * Sets this item to a long item. * * @param longVal * the value of this item. */ void set(final long longVal) { this.type = ClassWriter.LONG; this.longVal = longVal; this.hashCode = 0x7FFFFFFF & (type + (int) longVal); } /** * Sets this item to a float item. * * @param floatVal * the value of this item. */ void set(final float floatVal) { this.type = ClassWriter.FLOAT; this.intVal = Float.floatToRawIntBits(floatVal); this.hashCode = 0x7FFFFFFF & (type + (int) floatVal); } /** * Sets this item to a double item. * * @param doubleVal * the value of this item. */ void set(final double doubleVal) { this.type = ClassWriter.DOUBLE; this.longVal = Double.doubleToRawLongBits(doubleVal); this.hashCode = 0x7FFFFFFF & (type + (int) doubleVal); } /** * Sets this item to an item that do not hold a primitive value. * * @param type * the type of this item. * @param strVal1 * first part of the value of this item. * @param strVal2 * second part of the value of this item. * @param strVal3 * third part of the value of this item. */ @SuppressWarnings("fallthrough") void set(final int type, final String strVal1, final String strVal2, final String strVal3) { this.type = type; this.strVal1 = strVal1; this.strVal2 = strVal2; this.strVal3 = strVal3; switch (type) { case ClassWriter.CLASS: this.intVal = 0; // intVal of a class must be zero, see visitInnerClass case ClassWriter.UTF8: case ClassWriter.STR: case ClassWriter.MTYPE: case ClassWriter.TYPE_NORMAL: hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()); return; case ClassWriter.NAME_TYPE: { hashCode = 0x7FFFFFFF & (type + strVal1.hashCode() * strVal2.hashCode()); return; } // ClassWriter.FIELD: // ClassWriter.METH: // ClassWriter.IMETH: // ClassWriter.HANDLE_BASE + 1..9 default: hashCode = 0x7FFFFFFF & (type + strVal1.hashCode() * strVal2.hashCode() * strVal3.hashCode()); } } /** * Sets the item to an InvokeDynamic item. * * @param name * invokedynamic's name. * @param desc * invokedynamic's desc. * @param bsmIndex * zero based index into the class attribute BootrapMethods. */ void set(String name, String desc, int bsmIndex) { this.type = ClassWriter.INDY; this.longVal = bsmIndex; this.strVal1 = name; this.strVal2 = desc; this.hashCode = 0x7FFFFFFF & (ClassWriter.INDY + bsmIndex * strVal1.hashCode() * strVal2.hashCode()); } /** * Sets the item to a BootstrapMethod item. * * @param position * position in byte in the class attribute BootrapMethods. * @param hashCode * hashcode of the item. This hashcode is processed from the * hashcode of the bootstrap method and the hashcode of all * bootstrap arguments. */ void set(int position, int hashCode) { this.type = ClassWriter.BSM; this.intVal = position; this.hashCode = hashCode; } /** * Indicates if the given item is equal to this one. <i>This method assumes * that the two items have the same {@link #type}</i>. * * @param i * the item to be compared to this one. Both items must have the * same {@link #type}. * @return <tt>true</tt> if the given item if equal to this one, * <tt>false</tt> otherwise. */ boolean isEqualTo(final Item i) { switch (type) { case ClassWriter.UTF8: case ClassWriter.STR: case ClassWriter.CLASS: case ClassWriter.MTYPE: case ClassWriter.TYPE_NORMAL: return i.strVal1.equals(strVal1); case ClassWriter.TYPE_MERGED: case ClassWriter.LONG: case ClassWriter.DOUBLE: return i.longVal == longVal; case ClassWriter.INT: case ClassWriter.FLOAT: return i.intVal == intVal; case ClassWriter.TYPE_UNINIT: return i.intVal == intVal && i.strVal1.equals(strVal1); case ClassWriter.NAME_TYPE: return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2); case ClassWriter.INDY: { return i.longVal == longVal && i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2); } // case ClassWriter.FIELD: // case ClassWriter.METH: // case ClassWriter.IMETH: // case ClassWriter.HANDLE_BASE + 1..9 default: return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2) && i.strVal3.equals(strVal3); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.types; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.common.typeutils.base.IntSerializer; import org.apache.flink.table.catalog.ObjectIdentifier; import org.apache.flink.table.types.logical.ArrayType; import org.apache.flink.table.types.logical.BigIntType; import org.apache.flink.table.types.logical.BinaryType; import org.apache.flink.table.types.logical.BooleanType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.FloatType; import org.apache.flink.table.types.logical.IntType; import org.apache.flink.table.types.logical.LocalZonedTimestampType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.NullType; import org.apache.flink.table.types.logical.RawType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.RowType.RowField; import org.apache.flink.table.types.logical.SmallIntType; import org.apache.flink.table.types.logical.StructuredType; import org.apache.flink.table.types.logical.StructuredType.StructuredAttribute; import org.apache.flink.table.types.logical.TimestampType; import org.apache.flink.table.types.logical.TinyIntType; import org.apache.flink.table.types.logical.TypeInformationRawType; import org.apache.flink.table.types.logical.VarCharType; import org.apache.flink.table.types.logical.YearMonthIntervalType; import org.apache.flink.table.types.logical.utils.LogicalTypeCasts; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import java.util.Arrays; import java.util.stream.Stream; import static org.assertj.core.api.Assertions.assertThat; /** Tests for {@link LogicalTypeCasts}. */ @Execution(ExecutionMode.CONCURRENT) class LogicalTypeCastsTest { public static Stream<Arguments> testData() { return Stream.of( Arguments.of(new SmallIntType(), new BigIntType(), true, true), // nullability does not match Arguments.of(new SmallIntType(false), new SmallIntType(), true, true), Arguments.of(new SmallIntType(), new SmallIntType(false), false, true), Arguments.of( new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.YEAR), new SmallIntType(), true, true), // not an interval with single field Arguments.of( new YearMonthIntervalType( YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH), new SmallIntType(), false, false), Arguments.of(new IntType(), new DecimalType(5, 5), true, true), // loss of precision Arguments.of(new FloatType(), new IntType(), false, true), Arguments.of(new VarCharType(Integer.MAX_VALUE), new FloatType(), false, true), Arguments.of(new FloatType(), new VarCharType(Integer.MAX_VALUE), false, true), Arguments.of( new DecimalType(3, 2), new VarCharType(Integer.MAX_VALUE), false, true), Arguments.of( new TypeInformationRawType<>(Types.GENERIC(LogicalTypesTest.class)), new TypeInformationRawType<>(Types.GENERIC(LogicalTypesTest.class)), true, true), Arguments.of( new TypeInformationRawType<>(Types.GENERIC(LogicalTypesTest.class)), new TypeInformationRawType<>(Types.GENERIC(Object.class)), false, false), Arguments.of(new NullType(), new IntType(), true, true), Arguments.of( new NullType(), new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType()))), true, true), Arguments.of( new ArrayType(new IntType()), new ArrayType(new BigIntType()), true, true), Arguments.of( new ArrayType(new IntType()), new ArrayType(new VarCharType(Integer.MAX_VALUE)), false, true), Arguments.of( new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType()))), new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new BigIntType()))), true, true), Arguments.of( new RowType( Arrays.asList( new RowField("f1", new IntType(), "description"), new RowField("f2", new IntType()))), new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new BigIntType()))), true, true), Arguments.of( new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType()))), new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new BooleanType()))), false, true), Arguments.of( new RowType( Arrays.asList( new RowField("f1", new IntType()), new RowField("f2", new IntType()))), new VarCharType(Integer.MAX_VALUE), false, true), // timestamp type and timestamp_ltz type Arguments.of(new TimestampType(9), new TimestampType(9), true, true), Arguments.of( new LocalZonedTimestampType(9), new LocalZonedTimestampType(9), true, true), Arguments.of(new TimestampType(3), new LocalZonedTimestampType(3), true, true), Arguments.of(new LocalZonedTimestampType(3), new TimestampType(3), true, true), Arguments.of(new TimestampType(3), new LocalZonedTimestampType(6), true, true), Arguments.of(new LocalZonedTimestampType(3), new TimestampType(6), true, true), Arguments.of( new TimestampType(false, 3), new LocalZonedTimestampType(6), true, true), Arguments.of( new LocalZonedTimestampType(false, 3), new TimestampType(6), true, true), Arguments.of(new TimestampType(6), new LocalZonedTimestampType(3), true, true), Arguments.of(new LocalZonedTimestampType(6), new TimestampType(3), true, true), // row and structured type Arguments.of( new RowType( Arrays.asList( new RowField("f1", new TimestampType()), new RowField("f2", new IntType()))), StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "User")) .attributes( Arrays.asList( new StructuredAttribute("f1", new TimestampType()), new StructuredAttribute("f2", new IntType()))) .build(), true, true), Arguments.of( new RowType( Arrays.asList( new RowField("f1", new TimestampType()), new RowField("f2", new IntType()))), StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "User")) .attributes( Arrays.asList( new StructuredAttribute("f1", new TimestampType()), new StructuredAttribute("diff", new IntType()))) .build(), true, true), Arguments.of( new RowType( Arrays.asList( new RowField("f1", new TimestampType()), new RowField("f2", new IntType()))), StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "User")) .attributes( Arrays.asList( new StructuredAttribute("f1", new TimestampType()), new StructuredAttribute("diff", new TinyIntType()))) .build(), false, true), // test slightly different children of anonymous structured types Arguments.of( StructuredType.newBuilder(Void.class) .attributes( Arrays.asList( new StructuredAttribute("f1", new TimestampType()), new StructuredAttribute( "diff", new TinyIntType(false)))) .build(), StructuredType.newBuilder(Void.class) .attributes( Arrays.asList( new StructuredAttribute("f1", new TimestampType()), new StructuredAttribute( "diff", new TinyIntType(true)))) .build(), true, true), Arguments.of( StructuredType.newBuilder(Void.class) .attributes( Arrays.asList( new StructuredAttribute("f1", new TimestampType()), new StructuredAttribute("diff", new IntType()))) .build(), StructuredType.newBuilder(Void.class) .attributes( Arrays.asList( new StructuredAttribute("f1", new TimestampType()), new StructuredAttribute("diff", new TinyIntType()))) .build(), false, true), // raw to binary Arguments.of( new RawType<>(Integer.class, IntSerializer.INSTANCE), new BinaryType(), false, true), // raw to binary Arguments.of( new RawType<>(Integer.class, IntSerializer.INSTANCE), VarCharType.STRING_TYPE, false, true)); } @ParameterizedTest(name = "{index}: [From: {0}, To: {1}, Implicit: {2}, Explicit: {3}]") @MethodSource("testData") void test( LogicalType sourceType, LogicalType targetType, boolean supportsImplicit, boolean supportsExplicit) { assertThat(LogicalTypeCasts.supportsImplicitCast(sourceType, targetType)) .as("Supports implicit casting") .isEqualTo(supportsImplicit); assertThat(LogicalTypeCasts.supportsExplicitCast(sourceType, targetType)) .as("Supports explicit casting") .isEqualTo(supportsExplicit); } }
/* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.javasrc.codedeps; import com.google.template.soy.data.SoyData; import com.google.template.soy.data.SoyMapData; import com.google.template.soy.data.internal.AugmentedSoyMapData; import com.google.template.soy.data.restricted.BooleanData; import com.google.template.soy.data.restricted.CollectionData; import com.google.template.soy.data.restricted.FloatData; import com.google.template.soy.data.restricted.IntegerData; import com.google.template.soy.data.restricted.NullData; import com.google.template.soy.data.restricted.NumberData; import com.google.template.soy.data.restricted.StringData; import com.google.template.soy.data.restricted.UndefinedData; import com.google.template.soy.shared.restricted.Sanitizers; import java.util.Map; import java.util.regex.Pattern; import javax.annotation.Nullable; /** * Library of utilities needed by the generated code. Do not use these from hand-written code. * * @author Kai Huang */ public class SoyUtils { private static final Pattern NEWLINE_PATTERN = Pattern.compile("(\\r\\n|\\r|\\n)"); // ----------------------------------------------------------------------------------------------- // Basics. public static SoyData $$getData(SoyData collectionData, String keyStr) { SoyData value = ((CollectionData) collectionData).get(keyStr); return (value != null) ? value : UndefinedData.INSTANCE; } // TODO: Use this in generated Java code instead of $$getData(), whenever possible. public static SoyData $$getDataSingle(SoyData collectionData, String key) { SoyData value = ((CollectionData) collectionData).getSingle(key); return (value != null) ? value : UndefinedData.INSTANCE; } public static SoyMapData $$augmentData(SoyMapData baseData, SoyMapData additionalData) { AugmentedSoyMapData augmentedData = new AugmentedSoyMapData(baseData); for (Map.Entry<String, SoyData> entry : additionalData.asMap().entrySet()) { augmentedData.putSingle(entry.getKey(), entry.getValue()); } return augmentedData; } // ----------------------------------------------------------------------------------------------- // Print directives. // See BasicDirectivesModule for details of how the escaping directives end up invoking these // concrete java implementations. public static String $$escapeHtml(SoyData value) { return Sanitizers.escapeHtml(value); } public static String $$escapeHtmlRcdata(SoyData value) { return Sanitizers.escapeHtmlRcdata(value); } public static String $$normalizeHtml(SoyData value) { return Sanitizers.normalizeHtml(value); } public static String $$normalizeHtmlNospace(SoyData value) { return Sanitizers.normalizeHtmlNospace(value); } public static String $$escapeHtmlAttribute(SoyData value) { return Sanitizers.escapeHtmlAttribute(value); } public static String $$escapeHtmlAttributeNospace(SoyData value) { return Sanitizers.escapeHtmlAttributeNospace(value); } public static String $$escapeJsString(SoyData value) { return Sanitizers.escapeJsString(value); } public static String $$escapeJsValue(SoyData value) { return Sanitizers.escapeJsValue(value); } public static String $$escapeJsRegex(SoyData value) { return Sanitizers.escapeJsRegex(value); } public static String $$escapeCssString(SoyData value) { return Sanitizers.escapeCssString(value); } public static String $$filterCssValue(SoyData value) { return Sanitizers.filterCssValue(value); } public static String $$escapeUri(SoyData value) { return Sanitizers.escapeUri(value); } public static String $$normalizeUri(SoyData value) { return Sanitizers.normalizeUri(value); } public static String $$filterNormalizeUri(SoyData value) { return Sanitizers.filterNormalizeUri(value); } public static String $$filterHtmlAttributes(SoyData value) { return Sanitizers.filterHtmlAttributes(value); } public static String $$filterHtmlElementName(SoyData value) { return Sanitizers.filterHtmlElementName(value); } public static String $$changeNewlineToBr(String value) { return NEWLINE_PATTERN.matcher(value).replaceAll("<br>"); } public static String $$insertWordBreaks(String value, int maxCharsBetweenWordBreaks) { StringBuilder result = new StringBuilder(); // These variables keep track of important state while looping through the string below. boolean isInTag = false; // whether we're inside an HTML tag boolean isMaybeInEntity = false; // whether we might be inside an HTML entity int numCharsWithoutBreak = 0; // number of characters since the last word break for (int codePoint, i = 0; i < value.length(); i += Character.charCount(codePoint)) { codePoint = value.codePointAt(i); // If hit maxCharsBetweenWordBreaks, and next char is not a space, then add <wbr>. if (numCharsWithoutBreak >= maxCharsBetweenWordBreaks && codePoint != ' ') { result.append("<wbr>"); numCharsWithoutBreak = 0; } if (isInTag) { // If inside an HTML tag and we see '>', it's the end of the tag. if (codePoint == '>') { isInTag = false; } } else if (isMaybeInEntity) { switch (codePoint) { // If maybe inside an entity and we see ';', it's the end of the entity. The entity // that just ended counts as one char, so increment numCharsWithoutBreak. case ';': isMaybeInEntity = false; ++numCharsWithoutBreak; break; // If maybe inside an entity and we see '<', we weren't actually in an entity. But // now we're inside an HTML tag. case '<': isMaybeInEntity = false; isInTag = true; break; // If maybe inside an entity and we see ' ', we weren't actually in an entity. Just // correct the state and reset the numCharsWithoutBreak since we just saw a space. case ' ': isMaybeInEntity = false; numCharsWithoutBreak = 0; break; } } else { // !isInTag && !isInEntity switch (codePoint) { // When not within a tag or an entity and we see '<', we're now inside an HTML tag. case '<': isInTag = true; break; // When not within a tag or an entity and we see '&', we might be inside an entity. case '&': isMaybeInEntity = true; break; // When we see a space, reset the numCharsWithoutBreak count. case ' ': numCharsWithoutBreak = 0; break; // When we see a non-space, increment the numCharsWithoutBreak. default: ++numCharsWithoutBreak; break; } } // In addition to adding <wbr>s, we still have to add the original characters. result.appendCodePoint(codePoint); } return result.toString(); } public static String $$truncate(String str, int maxLen, boolean doAddEllipsis) { if (str.length() <= maxLen) { return str; // no need to truncate } // If doAddEllipsis, either reduce maxLen to compensate, or else if maxLen is too small, just // turn off doAddEllipsis. if (doAddEllipsis) { if (maxLen > 3) { maxLen -= 3; } else { doAddEllipsis = false; } } // Make sure truncating at maxLen doesn't cut up a unicode surrogate pair. if (Character.isHighSurrogate(str.charAt(maxLen - 1)) && Character.isLowSurrogate(str.charAt(maxLen))) { maxLen -= 1; } // Truncate. str = str.substring(0, maxLen); // Add ellipsis. if (doAddEllipsis) { str += "..."; } return str; } // ----------------------------------------------------------------------------------------------- // Operators. public static NumberData $$negative(NumberData operand) { if (operand instanceof IntegerData) { return IntegerData.forValue( - operand.integerValue() ); } else { return FloatData.forValue( - operand.floatValue() ); } } public static NumberData $$times(NumberData operand0, NumberData operand1) { if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) { return IntegerData.forValue(operand0.integerValue() * operand1.integerValue()); } else { return FloatData.forValue(operand0.numberValue() * operand1.numberValue()); } } public static SoyData $$plus(SoyData operand0, SoyData operand1) { if (operand0 instanceof NumberData && operand1 instanceof NumberData) { if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) { return IntegerData.forValue(operand0.integerValue() + operand1.integerValue()); } else { return FloatData.forValue(operand0.numberValue() + operand1.numberValue()); } } else { // String concatenation. Note we're calling toString() instead of stringValue() in case one // of the operands needs to be coerced to a string. return StringData.forValue(operand0.toString() + operand1.toString()); } } public static NumberData $$minus(NumberData operand0, NumberData operand1) { if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) { return IntegerData.forValue(operand0.integerValue() - operand1.integerValue()); } else { return FloatData.forValue(operand0.numberValue() - operand1.numberValue()); } } public static BooleanData $$lessThan(NumberData operand0, NumberData operand1) { if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) { return BooleanData.forValue(operand0.integerValue() < operand1.integerValue()); } else { return BooleanData.forValue(operand0.numberValue() < operand1.numberValue()); } } public static BooleanData $$greaterThan(NumberData operand0, NumberData operand1) { if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) { return BooleanData.forValue(operand0.integerValue() > operand1.integerValue()); } else { return BooleanData.forValue(operand0.numberValue() > operand1.numberValue()); } } public static BooleanData $$lessThanOrEqual(NumberData operand0, NumberData operand1) { if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) { return BooleanData.forValue(operand0.integerValue() <= operand1.integerValue()); } else { return BooleanData.forValue(operand0.numberValue() <= operand1.numberValue()); } } public static BooleanData $$greaterThanOrEqual(NumberData operand0, NumberData operand1) { if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) { return BooleanData.forValue(operand0.integerValue() >= operand1.integerValue()); } else { return BooleanData.forValue(operand0.numberValue() >= operand1.numberValue()); } } // ----------------------------------------------------------------------------------------------- // Functions. public static BooleanData $$isNonnull(SoyData value) { return BooleanData.forValue(! (value instanceof UndefinedData || value instanceof NullData)); } public static NumberData $$round( NumberData valueData, @Nullable IntegerData numDigitsAfterPtData) { int numDigitsAfterPt = (numDigitsAfterPtData != null) ? numDigitsAfterPtData.integerValue() : 0 /* default */; if (numDigitsAfterPt == 0) { if (valueData instanceof IntegerData) { return valueData; } else { return IntegerData.forValue((int) Math.round(valueData.numberValue())); } } else if (numDigitsAfterPt > 0) { double value = valueData.numberValue(); double shift = Math.pow(10, numDigitsAfterPt); return FloatData.forValue(Math.round(value * shift) / shift); } else { double value = valueData.numberValue(); double shift = Math.pow(10, -numDigitsAfterPt); return IntegerData.forValue((int) (Math.round(value / shift) * shift)); } } public static NumberData $$min(NumberData arg0, NumberData arg1) { if (arg0 instanceof IntegerData && arg1 instanceof IntegerData) { return IntegerData.forValue(Math.min(arg0.integerValue(), arg1.integerValue())); } else { return FloatData.forValue(Math.min(arg0.numberValue(), arg1.numberValue())); } } public static NumberData $$max(NumberData arg0, NumberData arg1) { if (arg0 instanceof IntegerData && arg1 instanceof IntegerData) { return IntegerData.forValue(Math.max(arg0.integerValue(), arg1.integerValue())); } else { return FloatData.forValue(Math.max(arg0.numberValue(), arg1.numberValue())); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.significant; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestSearchContext; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; /** * */ public class SignificanceHeuristicTests extends ESTestCase { static class SignificantTermsTestSearchContext extends TestSearchContext { public SignificantTermsTestSearchContext() { super(null); } @Override public int numberOfShards() { return 1; } @Override public SearchShardTarget shardTarget() { return new SearchShardTarget("no node, this is a unit test", new Index("no index, this is a unit test", "_na_"), 0); } } // test that stream output can actually be read - does not replace bwc test public void testStreamResponse() throws Exception { Version version = randomVersion(random()); InternalSignificantTerms[] sigTerms = getRandomSignificantTerms(getRandomSignificanceheuristic()); // write ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); out.setVersion(version); sigTerms[0].writeTo(out); // read ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); in.setVersion(version); sigTerms[1].readFrom(in); assertTrue(sigTerms[1].significanceHeuristic.equals(sigTerms[0].significanceHeuristic)); InternalSignificantTerms.Bucket originalBucket = (InternalSignificantTerms.Bucket) sigTerms[0].buckets.get(0); InternalSignificantTerms.Bucket streamedBucket = (InternalSignificantTerms.Bucket) sigTerms[1].buckets.get(0); assertThat(originalBucket.getKeyAsString(), equalTo(streamedBucket.getKeyAsString())); assertThat(originalBucket.getSupersetDf(), equalTo(streamedBucket.getSupersetDf())); assertThat(originalBucket.getSubsetDf(), equalTo(streamedBucket.getSubsetDf())); assertThat(streamedBucket.getSubsetSize(), equalTo(10L)); assertThat(streamedBucket.getSupersetSize(), equalTo(20L)); } InternalSignificantTerms[] getRandomSignificantTerms(SignificanceHeuristic heuristic) { InternalSignificantTerms[] sTerms = new InternalSignificantTerms[2]; ArrayList<InternalSignificantTerms.Bucket> buckets = new ArrayList<>(); if (randomBoolean()) { buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null)); sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets, Collections.emptyList(), null); sTerms[1] = new SignificantLongTerms(); } else { BytesRef term = new BytesRef("someterm"); buckets.add(new SignificantStringTerms.Bucket(term, 1, 2, 3, 4, InternalAggregations.EMPTY)); sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, Collections.emptyList(), null); sTerms[1] = new SignificantStringTerms(); } return sTerms; } SignificanceHeuristic getRandomSignificanceheuristic() { List<SignificanceHeuristic> heuristics = new ArrayList<>(); heuristics.add(JLHScore.PROTOTYPE); heuristics.add(new MutualInformation(randomBoolean(), randomBoolean())); heuristics.add(new GND(randomBoolean())); heuristics.add(new ChiSquare(randomBoolean(), randomBoolean())); return heuristics.get(randomInt(3)); } public void testReduce() { List<InternalAggregation> aggs = createInternalAggregations(); SignificantTerms reducedAgg = (SignificantTerms) aggs.get(0).doReduce(aggs, null); assertThat(reducedAgg.getBuckets().size(), equalTo(2)); assertThat(reducedAgg.getBuckets().get(0).getSubsetDf(), equalTo(8L)); assertThat(reducedAgg.getBuckets().get(0).getSubsetSize(), equalTo(16L)); assertThat(reducedAgg.getBuckets().get(0).getSupersetDf(), equalTo(10L)); assertThat(reducedAgg.getBuckets().get(0).getSupersetSize(), equalTo(30L)); assertThat(reducedAgg.getBuckets().get(1).getSubsetDf(), equalTo(8L)); assertThat(reducedAgg.getBuckets().get(1).getSubsetSize(), equalTo(16L)); assertThat(reducedAgg.getBuckets().get(1).getSupersetDf(), equalTo(10L)); assertThat(reducedAgg.getBuckets().get(1).getSupersetSize(), equalTo(30L)); } // Create aggregations as they might come from three different shards and return as list. private List<InternalAggregation> createInternalAggregations() { String type = randomBoolean() ? "long" : "string"; SignificanceHeuristic significanceHeuristic = getRandomSignificanceheuristic(); List<InternalAggregation> aggs = new ArrayList<>(); List<InternalSignificantTerms.Bucket> terms0Buckets = new ArrayList<>(); terms0Buckets.add(createBucket(type, 4, 4, 5, 10, 0)); aggs.add(createAggregation(type, significanceHeuristic, terms0Buckets, 4, 10)); List<InternalSignificantTerms.Bucket> terms1Buckets = new ArrayList<>(); terms0Buckets.add(createBucket(type, 4, 4, 5, 10, 1)); aggs.add(createAggregation(type, significanceHeuristic, terms1Buckets, 4, 10)); List<InternalSignificantTerms.Bucket> terms01Buckets = new ArrayList<>(); terms0Buckets.add(createBucket(type, 4, 8, 5, 10, 0)); terms0Buckets.add(createBucket(type, 4, 8, 5, 10, 1)); aggs.add(createAggregation(type, significanceHeuristic, terms01Buckets, 8, 10)); return aggs; } private InternalSignificantTerms createAggregation(String type, SignificanceHeuristic significanceHeuristic, List<InternalSignificantTerms.Bucket> buckets, long subsetSize, long supersetSize) { if (type.equals("string")) { return new SignificantStringTerms(subsetSize, supersetSize, "sig_terms", 2, -1, significanceHeuristic, buckets, new ArrayList<PipelineAggregator>(), new HashMap<String, Object>()); } else { return new SignificantLongTerms(subsetSize, supersetSize, "sig_terms", ValueFormatter.RAW, 2, -1, significanceHeuristic, buckets, new ArrayList<PipelineAggregator>(), new HashMap<String, Object>()); } } private InternalSignificantTerms.Bucket createBucket(String type, long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label) { if (type.equals("string")) { return new SignificantStringTerms.Bucket(new BytesRef(Long.toString(label).getBytes(StandardCharsets.UTF_8)), subsetDF, subsetSize, supersetDF, supersetSize, InternalAggregations.EMPTY); } else { return new SignificantLongTerms.Bucket(subsetDF, subsetSize, supersetDF, supersetSize, label, InternalAggregations.EMPTY, ValueFormatter.RAW); } } // test that // 1. The output of the builders can actually be parsed // 2. The parser does not swallow parameters after a significance heuristic was defined public void testBuilderAndParser() throws Exception { Set<SignificanceHeuristicParser> parsers = new HashSet<>(); SignificanceHeuristicParserMapper heuristicParserMapper = new SignificanceHeuristicParserMapper(parsers); SearchContext searchContext = new SignificantTermsTestSearchContext(); // test jlh with string assertTrue(parseFromString(heuristicParserMapper, searchContext, "\"jlh\":{}") instanceof JLHScore); // test gnd with string assertTrue(parseFromString(heuristicParserMapper, searchContext, "\"gnd\":{}") instanceof GND); // test mutual information with string boolean includeNegatives = randomBoolean(); boolean backgroundIsSuperset = randomBoolean(); assertThat(parseFromString(heuristicParserMapper, searchContext, "\"mutual_information\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new MutualInformation(includeNegatives, backgroundIsSuperset)))); assertThat(parseFromString(heuristicParserMapper, searchContext, "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new ChiSquare(includeNegatives, backgroundIsSuperset)))); // test with builders assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new JLHScore()) instanceof JLHScore); assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new GND(backgroundIsSuperset)) instanceof GND); assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new MutualInformation(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new MutualInformation(includeNegatives, backgroundIsSuperset))); assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new ChiSquare(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new ChiSquare(includeNegatives, backgroundIsSuperset))); // test exceptions String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}"; String expectedError = "unknown field [some_unknown_field]"; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); faultyHeuristicdefinition = "\"chi_square\":{\"unknown_field\": true}"; expectedError = "unknown field [unknown_field]"; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); faultyHeuristicdefinition = "\"jlh\":{\"unknown_field\": true}"; expectedError = "expected an empty object, but found "; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); faultyHeuristicdefinition = "\"gnd\":{\"unknown_field\": true}"; expectedError = "unknown field [unknown_field]"; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); } protected void checkParseException(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String faultyHeuristicDefinition, String expectedError) throws IOException { IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, new HashMap<String, QueryParser<?>>()); try { XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); QueryParseContext parseContext = new QueryParseContext(registry); parseContext.reset(stParser); parseContext.parseFieldMatcher(ParseFieldMatcher.STRICT); stParser.nextToken(); new SignificantTermsParser(heuristicParserMapper, registry).parse("testagg", stParser, parseContext); fail(); } catch (ElasticsearchParseException e) { assertTrue(e.getMessage().contains(expectedError)); } } protected SignificanceHeuristic parseFromBuilder(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, SignificanceHeuristic significanceHeuristic) throws IOException { SignificantTermsAggregatorBuilder stBuilder = significantTerms("testagg"); stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); stBuilder.internalXContent(stXContentBuilder, null); XContentParser stParser = JsonXContent.jsonXContent.createParser(stXContentBuilder.string()); return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser); } private SignificanceHeuristic parseSignificanceHeuristic(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, XContentParser stParser) throws IOException { IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, new HashMap<String, QueryParser<?>>()); QueryParseContext parseContext = new QueryParseContext(registry); parseContext.reset(stParser); parseContext.parseFieldMatcher(ParseFieldMatcher.STRICT); stParser.nextToken(); SignificantTermsAggregatorBuilder aggregatorFactory = (SignificantTermsAggregatorBuilder) new SignificantTermsParser( heuristicParserMapper, registry).parse("testagg", stParser, parseContext); stParser.nextToken(); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L)); assertThat(stParser.currentToken(), equalTo(null)); stParser.close(); return aggregatorFactory.significanceHeuristic(); } protected SignificanceHeuristic parseFromString(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String heuristicString) throws IOException { XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser); } void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) { try { heuristicIsSuperset.getScore(2, 3, 1, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > supersetFreq")); } try { heuristicIsSuperset.getScore(1, 4, 2, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetSize > supersetSize")); } try { heuristicIsSuperset.getScore(2, 1, 3, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize")); } try { heuristicIsSuperset.getScore(1, 2, 4, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize")); } try { heuristicIsSuperset.getScore(1, 3, 4, 4); fail(); } catch (IllegalArgumentException assertionError) { assertNotNull(assertionError.getMessage()); assertTrue(assertionError.getMessage().contains("supersetFreq - subsetFreq > supersetSize - subsetSize")); } try { int idx = randomInt(3); long[] values = {1, 2, 3, 4}; values[idx] *= -1; heuristicIsSuperset.getScore(values[0], values[1], values[2], values[3]); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive")); } try { heuristicNotSuperset.getScore(2, 1, 3, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize")); } try { heuristicNotSuperset.getScore(1, 2, 4, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize")); } try { int idx = randomInt(3); long[] values = {1, 2, 3, 4}; values[idx] *= -1; heuristicNotSuperset.getScore(values[0], values[1], values[2], values[3]); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive")); } } void testAssertions(SignificanceHeuristic heuristic) { try { int idx = randomInt(3); long[] values = {1, 2, 3, 4}; values[idx] *= -1; heuristic.getScore(values[0], values[1], values[2], values[3]); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive")); } try { heuristic.getScore(1, 2, 4, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize")); } try { heuristic.getScore(2, 1, 3, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize")); } } public void testAssertions() throws Exception { testBackgroundAssertions(new MutualInformation(true, true), new MutualInformation(true, false)); testBackgroundAssertions(new ChiSquare(true, true), new ChiSquare(true, false)); testBackgroundAssertions(new GND(true), new GND(false)); testAssertions(PercentageScore.PROTOTYPE); testAssertions(JLHScore.PROTOTYPE); } public void testBasicScoreProperties() { basicScoreProperties(JLHScore.PROTOTYPE, true); basicScoreProperties(new GND(true), true); basicScoreProperties(PercentageScore.PROTOTYPE, true); basicScoreProperties(new MutualInformation(true, true), false); basicScoreProperties(new ChiSquare(true, true), false); } public void basicScoreProperties(SignificanceHeuristic heuristic, boolean test0) { assertThat(heuristic.getScore(1, 1, 1, 3), greaterThan(0.0)); assertThat(heuristic.getScore(1, 1, 2, 3), lessThan(heuristic.getScore(1, 1, 1, 3))); assertThat(heuristic.getScore(1, 1, 3, 4), lessThan(heuristic.getScore(1, 1, 2, 4))); if (test0) { assertThat(heuristic.getScore(0, 1, 2, 3), equalTo(0.0)); } double score = 0.0; try { long a = randomLong(); long b = randomLong(); long c = randomLong(); long d = randomLong(); score = heuristic.getScore(a, b, c, d); } catch (IllegalArgumentException e) { } assertThat(score, greaterThanOrEqualTo(0.0)); } public void testScoreMutual() throws Exception { SignificanceHeuristic heuristic = new MutualInformation(true, true); assertThat(heuristic.getScore(1, 1, 1, 3), greaterThan(0.0)); assertThat(heuristic.getScore(1, 1, 2, 3), lessThan(heuristic.getScore(1, 1, 1, 3))); assertThat(heuristic.getScore(2, 2, 2, 4), equalTo(1.0)); assertThat(heuristic.getScore(0, 2, 2, 4), equalTo(1.0)); assertThat(heuristic.getScore(2, 2, 4, 4), equalTo(0.0)); assertThat(heuristic.getScore(1, 2, 2, 4), equalTo(0.0)); assertThat(heuristic.getScore(3, 6, 9, 18), equalTo(0.0)); double score = 0.0; try { long a = randomLong(); long b = randomLong(); long c = randomLong(); long d = randomLong(); score = heuristic.getScore(a, b, c, d); } catch (IllegalArgumentException e) { } assertThat(score, lessThanOrEqualTo(1.0)); assertThat(score, greaterThanOrEqualTo(0.0)); heuristic = new MutualInformation(false, true); assertThat(heuristic.getScore(0, 1, 2, 3), equalTo(Double.NEGATIVE_INFINITY)); heuristic = new MutualInformation(true, false); score = heuristic.getScore(2, 3, 1, 4); assertThat(score, greaterThanOrEqualTo(0.0)); assertThat(score, lessThanOrEqualTo(1.0)); score = heuristic.getScore(1, 4, 2, 3); assertThat(score, greaterThanOrEqualTo(0.0)); assertThat(score, lessThanOrEqualTo(1.0)); score = heuristic.getScore(1, 3, 4, 4); assertThat(score, greaterThanOrEqualTo(0.0)); assertThat(score, lessThanOrEqualTo(1.0)); } public void testGNDCornerCases() throws Exception { GND gnd = new GND(true); //term is only in the subset, not at all in the other set but that is because the other set is empty. // this should actually not happen because only terms that are in the subset are considered now, // however, in this case the score should be 0 because a term that does not exist cannot be relevant... assertThat(gnd.getScore(0, randomIntBetween(1, 2), 0, randomIntBetween(2,3)), equalTo(0.0)); // the terms do not co-occur at all - should be 0 assertThat(gnd.getScore(0, randomIntBetween(1, 2), randomIntBetween(2, 3), randomIntBetween(5,6)), equalTo(0.0)); // comparison between two terms that do not exist - probably not relevant assertThat(gnd.getScore(0, 0, 0, randomIntBetween(1,2)), equalTo(0.0)); // terms co-occur perfectly - should be 1 assertThat(gnd.getScore(1, 1, 1, 1), equalTo(1.0)); gnd = new GND(false); assertThat(gnd.getScore(0, 0, 0, 0), equalTo(0.0)); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.colors; import com.intellij.application.options.ImportSourceChooserDialog; import com.intellij.application.options.SaveSchemeDialog; import com.intellij.application.options.SkipSelfSearchComponent; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.colors.impl.*; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.options.SchemeImportException; import com.intellij.openapi.options.SchemeImportUtil; import com.intellij.openapi.options.SchemeImporter; import com.intellij.openapi.options.SchemeImporterEP; import com.intellij.openapi.project.DefaultProjectFactory; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.EventDispatcher; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBInsets; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.List; public class SchemesPanel extends JPanel implements SkipSelfSearchComponent { private final ColorAndFontOptions myOptions; private ComboBox<MySchemeItem> mySchemeComboBox; private JButton myDeleteButton; private JButton myResetButton; private JButton myImportButton; private JLabel myHintLabel; private final EventDispatcher<ColorAndFontSettingsListener> myDispatcher = EventDispatcher.create(ColorAndFontSettingsListener.class); public SchemesPanel(ColorAndFontOptions options) { super(new BorderLayout()); myOptions = options; JPanel schemesGroup = new JPanel(new BorderLayout()); JPanel panel = new JPanel(new BorderLayout()); schemesGroup.add(createSchemePanel(), BorderLayout.NORTH); schemesGroup.add(panel, BorderLayout.CENTER); add(schemesGroup, BorderLayout.CENTER); mySchemeComboBox.addActionListener(new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent e) { String selectedName = getSelectedSchemeName(); if (selectedName != null) { EditorColorsScheme selected = myOptions.selectScheme(selectedName); final boolean readOnly = ColorAndFontOptions.isReadOnly(selected); myDeleteButton.setEnabled(!readOnly && ColorAndFontOptions.canBeDeleted(selected)); myHintLabel.setVisible(readOnly); if (areSchemesLoaded()) { myDispatcher.getMulticaster().schemeChanged(SchemesPanel.this); } AbstractColorsScheme originalScheme = selected instanceof AbstractColorsScheme ? ((AbstractColorsScheme)selected).getOriginal() : null; myResetButton.setEnabled( !readOnly && selectedName.startsWith(DefaultColorsScheme.EDITABLE_COPY_PREFIX) && originalScheme instanceof ReadOnlyColorsScheme); } } }); } private boolean myListLoaded = false; public boolean areSchemesLoaded() { return myListLoaded; } private JPanel createSchemePanel() { JPanel panel = new JPanel(new GridBagLayout()); int gridx = 0; panel.add(new JLabel(ApplicationBundle.message("editbox.scheme.name")), new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 5), 0, 0)); mySchemeComboBox = new ComboBox<>(); panel.add(mySchemeComboBox, new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 10), 0, 0)); JButton saveAsButton = new JButton(ApplicationBundle.message("button.save.as")); saveAsButton.addActionListener(new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent e) { showSaveAsDialog(); } }); panel.add(saveAsButton, new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 5), 0, 0)); myDeleteButton = new JButton(ApplicationBundle.message("button.delete")); myDeleteButton.addActionListener(new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent e) { String selectedName = getSelectedSchemeName(); if (selectedName != null) { myOptions.removeScheme(selectedName); } } }); panel.add(myDeleteButton, new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 5), 0, 0)); myResetButton = new JButton(ApplicationBundle.message("color.scheme.reset")); myResetButton.addActionListener(new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent e) { String selectedName = getSelectedSchemeName(); if (selectedName != null) { if (Messages .showOkCancelDialog(ApplicationBundle.message("color.scheme.reset.message"), ApplicationBundle.message("color.scheme.reset.title"), Messages.getQuestionIcon()) == Messages.OK) { myOptions.resetSchemeToOriginal(selectedName); } } } }); panel.add(myResetButton, new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 5), 0, 0)); myImportButton = new JButton("Import..."); myImportButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { chooseAndImport(); } }); myImportButton.setVisible(isImportAvailable()); panel.add(myImportButton, new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 5), 0, 0)); myHintLabel = new JLabel(ApplicationBundle.message("hint.readonly.scheme.cannot.be.modified")); myHintLabel.setEnabled(false); panel.add(myHintLabel, new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 5), 0, 0)); for (final ImportHandler importHandler : Extensions.getExtensions(ImportHandler.EP_NAME)) { final JButton button = new JButton(importHandler.getTitle()); button.addActionListener(new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent e) { importHandler.performImport(button, scheme -> { if (scheme != null) myOptions.addImportedScheme(scheme); }); } }); panel.add(button, new GridBagConstraints(gridx++, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new JBInsets(0, 0, 5, 5), 0, 0)); } panel.add(Box.createHorizontalGlue(), new GridBagConstraints(gridx+1, 0, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new JBInsets(0, 0, 0, 0), 0, 0)); return panel; } private void showSaveAsDialog() { List<String> names = ContainerUtil.newArrayList(myOptions.getSchemeNames()); String selectedName = AbstractColorsScheme.getDisplayName(myOptions.getSelectedScheme()); SaveSchemeDialog dialog = new SaveSchemeDialog(this, ApplicationBundle.message("title.save.color.scheme.as"), names, selectedName); if (dialog.showAndGet()) { myOptions.saveSchemeAs(dialog.getSchemeName()); } } @Deprecated public boolean updateDescription(boolean modified) { EditorColorsScheme scheme = myOptions.getSelectedScheme(); if (modified && ColorAndFontOptions.isReadOnly(scheme)) { return false; } return true; } public void resetSchemesCombo(final Object source) { if (this != source) { setListLoaded(false); EditorColorsScheme selectedSchemeBackup = myOptions.getSelectedScheme(); mySchemeComboBox.removeAllItems(); String[] schemeNames = myOptions.getSchemeNames(); MySchemeItem itemToSelect = null; for (String schemeName : schemeNames) { EditorColorsScheme scheme = myOptions.getScheme(schemeName); MySchemeItem item = new MySchemeItem(scheme); if (scheme == selectedSchemeBackup) itemToSelect = item; mySchemeComboBox.addItem(item); } mySchemeComboBox.setSelectedItem(itemToSelect); setListLoaded(true); myDispatcher.getMulticaster().schemeChanged(this); } } @Nullable private String getSelectedSchemeName() { return mySchemeComboBox.getSelectedIndex() != -1 ? ((MySchemeItem)mySchemeComboBox.getSelectedItem()).getSchemeName() : null; } private void setListLoaded(final boolean b) { myListLoaded = b; } public void addListener(ColorAndFontSettingsListener listener) { myDispatcher.addListener(listener); } private void chooseAndImport() { ImportSourceChooserDialog<EditorColorsScheme> importSourceChooserDialog = new ImportSourceChooserDialog<>(this, EditorColorsScheme.class); if (importSourceChooserDialog.showAndGet()) { final String selectedImporterName = importSourceChooserDialog.getSelectedSourceName(); if (selectedImporterName != null) { final SchemeImporter<EditorColorsScheme> importer = SchemeImporterEP.getImporter(selectedImporterName, EditorColorsScheme.class); if (importer != null) { VirtualFile importSource = SchemeImportUtil.selectImportSource(importer.getSourceExtensions(), this, null); if (importSource != null) { try { EditorColorsScheme imported = importer.importScheme(DefaultProjectFactory.getInstance().getDefaultProject(), importSource, myOptions.getSelectedScheme(), name -> { String newName = myOptions.getUniqueName(name); AbstractColorsScheme newScheme = new EditorColorsSchemeImpl(EmptyColorScheme.INSTANCE); newScheme.setName(newName); newScheme.setDefaultMetaInfo(EmptyColorScheme.INSTANCE); return newScheme; }); if (imported != null) { myOptions.addImportedScheme(imported); } } catch (SchemeImportException e) { SchemeImportUtil.showStatus(myImportButton, "Import failed: " + e.getMessage(), MessageType.ERROR); } } } } } } private static boolean isImportAvailable() { return !SchemeImporterEP.getExtensions(EditorColorsScheme.class).isEmpty(); } private final static class MySchemeItem { private EditorColorsScheme myScheme; public MySchemeItem(EditorColorsScheme scheme) { myScheme = scheme; } public String getSchemeName() { return myScheme.getName(); } @Override public String toString() { return AbstractColorsScheme.getDisplayName(myScheme); } } }
/* * Copyright 2003-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.groovy.runtime.metaclass; import groovy.lang.Closure; import groovy.lang.MetaBeanProperty; import groovy.lang.MetaMethod; import org.codehaus.groovy.reflection.CachedClass; import org.codehaus.groovy.reflection.ReflectionCache; import org.codehaus.groovy.util.ManagedConcurrentMap; import org.codehaus.groovy.util.ReferenceBundle; import java.lang.reflect.Modifier; import java.util.concurrent.ConcurrentHashMap; /** * This MetaBeanProperty will create a pseudo property whose value is bound to an object * using weak references. The values will go out of scope and be garbage collected when * the the object is collected * * In fact, this class should be called ExpandoProperty. * * @author Graeme Rocher * @author Alex Tkachman * @since 1.5 */ public class ThreadManagedMetaBeanProperty extends MetaBeanProperty { private static final ConcurrentHashMap<String,ManagedConcurrentMap> PROPNAME_TO_MAP = new ConcurrentHashMap<String, ManagedConcurrentMap>(); private final ManagedConcurrentMap instance2Prop; private Class declaringClass; private ThreadBoundGetter getter; private ThreadBoundSetter setter; private Object initialValue; private Closure initialValueCreator; private static final ReferenceBundle SOFT_BUNDLE = ReferenceBundle.getSoftBundle(); /** * Retrieves the initial value of the ThreadBound property * * @return The initial value */ public synchronized Object getInitialValue() { return getInitialValue(null); } public synchronized Object getInitialValue(Object object) { if (initialValueCreator != null) { return initialValueCreator.call(object); } return initialValue; } /** * Closure responsible for creating the initial value of thread-managed bean properties * * @param callable The closure responsible for creating the initial value */ public void setInitialValueCreator(Closure callable) { this.initialValueCreator = callable; } /** * Constructs a new ThreadManagedBeanProperty for the given arguments * * @param declaringClass The class that declares the property * @param name The name of the property * @param type The type of the property * @param iv The properties initial value */ public ThreadManagedMetaBeanProperty(Class declaringClass, String name, Class type, Object iv) { super(name, type, null, null); this.type = type; this.declaringClass = declaringClass; this.getter = new ThreadBoundGetter(name); this.setter = new ThreadBoundSetter(name); initialValue = iv; instance2Prop = getInstance2PropName(name); } /** * Constructs a new ThreadManagedBeanProperty for the given arguments * * @param declaringClass The class that declares the property * @param name The name of the property * @param type The type of the property * @param initialValueCreator The closure responsible for creating the initial value */ public ThreadManagedMetaBeanProperty(Class declaringClass, String name, Class type, Closure initialValueCreator) { super(name, type, null, null); this.type = type; this.declaringClass = declaringClass; this.getter = new ThreadBoundGetter(name); this.setter = new ThreadBoundSetter(name); this.initialValueCreator = initialValueCreator; instance2Prop = getInstance2PropName(name); } private static ManagedConcurrentMap getInstance2PropName(String name) { ManagedConcurrentMap res = PROPNAME_TO_MAP.get(name); if (res == null) { res = new ManagedConcurrentMap(SOFT_BUNDLE); ManagedConcurrentMap ores = PROPNAME_TO_MAP.putIfAbsent(name, res); if (ores != null) return ores; } return res; } /* (non-Javadoc) * @see groovy.lang.MetaBeanProperty#getGetter() */ public MetaMethod getGetter() { return this.getter; } /* (non-Javadoc) * @see groovy.lang.MetaBeanProperty#getSetter() */ public MetaMethod getSetter() { return this.setter; } /** * Accesses the ThreadBound state of the property as a getter * * @author Graeme Rocher */ class ThreadBoundGetter extends MetaMethod { private final String name; public ThreadBoundGetter(String name) { setParametersTypes(CachedClass.EMPTY_ARRAY); this.name = getGetterName(name, type); } public int getModifiers() { return Modifier.PUBLIC; } public String getName() { return name; } public Class getReturnType() { return type; } public CachedClass getDeclaringClass() { return ReflectionCache.getCachedClass(declaringClass); } /* (non-Javadoc) * @see groovy.lang.MetaMethod#invoke(java.lang.Object, java.lang.Object[]) */ public Object invoke(Object object, Object[] arguments) { return instance2Prop.getOrPut(object, getInitialValue()).getValue(); } } /** * Sets the ThreadBound state of the property like a setter */ private class ThreadBoundSetter extends MetaMethod { private final String name; public ThreadBoundSetter(String name) { setParametersTypes (new CachedClass [] {ReflectionCache.getCachedClass(type)} ); this.name = getSetterName(name); } public int getModifiers() { return Modifier.PUBLIC; } /* (non-Javadoc) * @see groovy.lang.MetaMethod#getName() */ public String getName() { return name; } public Class getReturnType() { return type; } public CachedClass getDeclaringClass() { return ReflectionCache.getCachedClass(declaringClass); } /* (non-Javadoc) * @see groovy.lang.MetaMethod#invoke(java.lang.Object, java.lang.Object[]) */ public Object invoke(Object object, Object[] arguments) { instance2Prop.put(object, arguments[0]); return null; } } }
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.web.position; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import static org.testng.AssertJUnit.assertTrue; import java.math.BigDecimal; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.springframework.core.io.FileSystemResourceLoader; import org.springframework.mock.web.MockServletContext; import org.testng.annotations.BeforeMethod; import org.threeten.bp.LocalDate; import org.threeten.bp.LocalTime; import org.threeten.bp.OffsetTime; import org.threeten.bp.ZoneOffset; import com.google.common.collect.Lists; import com.opengamma.core.historicaltimeseries.HistoricalTimeSeriesSource; import com.opengamma.core.id.ExternalSchemes; import com.opengamma.core.position.Counterparty; import com.opengamma.engine.InMemorySecuritySource; import com.opengamma.financial.security.equity.EquitySecurity; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalScheme; import com.opengamma.id.ObjectIdSupplier; import com.opengamma.id.UniqueId; import com.opengamma.master.config.impl.InMemoryConfigMaster; import com.opengamma.master.config.impl.MasterConfigSource; import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver; import com.opengamma.master.historicaltimeseries.impl.DefaultHistoricalTimeSeriesResolver; import com.opengamma.master.historicaltimeseries.impl.DefaultHistoricalTimeSeriesSelector; import com.opengamma.master.historicaltimeseries.impl.InMemoryHistoricalTimeSeriesMaster; import com.opengamma.master.historicaltimeseries.impl.MasterHistoricalTimeSeriesSource; import com.opengamma.master.position.ManageablePosition; import com.opengamma.master.position.ManageableTrade; import com.opengamma.master.position.PositionDocument; import com.opengamma.master.position.PositionMaster; import com.opengamma.master.position.PositionSearchRequest; import com.opengamma.master.position.PositionSearchResult; import com.opengamma.master.position.impl.InMemoryPositionMaster; import com.opengamma.master.security.ManageableSecurityLink; import com.opengamma.master.security.SecurityDocument; import com.opengamma.master.security.SecurityLoader; import com.opengamma.master.security.SecurityLoaderRequest; import com.opengamma.master.security.SecurityLoaderResult; import com.opengamma.master.security.SecurityMaster; import com.opengamma.master.security.impl.AbstractSecurityLoader; import com.opengamma.master.security.impl.InMemorySecurityMaster; import com.opengamma.util.money.Currency; import com.opengamma.util.test.TestGroup; import com.opengamma.web.FreemarkerOutputter; import com.opengamma.web.MockUriInfo; import com.opengamma.web.WebResourceTestUtils; import freemarker.template.Configuration; /** * Test base class for WebPositionResource tests */ public abstract class AbstractWebPositionResourceTestCase { protected static final ExternalId COUNTER_PARTY = ExternalId.of(Counterparty.DEFAULT_SCHEME, "BACS"); protected static final ZoneOffset ZONE_OFFSET = ZoneOffset.of("+0100"); protected static final EquitySecurity EQUITY_SECURITY = WebResourceTestUtils.getEquitySecurity(); protected static final ExternalId SEC_ID = EQUITY_SECURITY.getExternalIdBundle().getExternalId(ExternalSchemes.BLOOMBERG_TICKER); protected static final ManageableSecurityLink SECURITY_LINK = new ManageableSecurityLink(EQUITY_SECURITY.getExternalIdBundle()); protected static final String EMPTY_TRADES = "{\"trades\" : []}"; protected static final Long QUANTITY = Long.valueOf(100); protected SecurityMaster _secMaster; protected SecurityLoader _secLoader; protected HistoricalTimeSeriesSource _htsSource; protected WebPositionsResource _webPositionsResource; protected InMemorySecuritySource _securitySource; protected PositionMaster _positionMaster; protected List<ManageableTrade> _trades; protected UriInfo _uriInfo; protected Map<ExternalScheme, String> _externalSchemes; @BeforeMethod(groups = TestGroup.UNIT) public void setUp() throws Exception { _uriInfo = new MockUriInfo(true); _trades = getTrades(); _secMaster = new InMemorySecurityMaster(new ObjectIdSupplier("Mock")); _positionMaster = new InMemoryPositionMaster(); final MasterConfigSource configSource = new MasterConfigSource(new InMemoryConfigMaster()); final InMemoryHistoricalTimeSeriesMaster htsMaster = new InMemoryHistoricalTimeSeriesMaster(); final HistoricalTimeSeriesResolver htsResolver = new DefaultHistoricalTimeSeriesResolver(new DefaultHistoricalTimeSeriesSelector(configSource), htsMaster); _htsSource = new MasterHistoricalTimeSeriesSource(htsMaster, htsResolver); _securitySource = new InMemorySecuritySource(); _secLoader = new AbstractSecurityLoader() { @Override protected SecurityLoaderResult doBulkLoad(SecurityLoaderRequest request) { throw new UnsupportedOperationException("load security not supported"); } }; populateSecMaster(); _externalSchemes = new HashMap<>(); _externalSchemes.put(ExternalSchemes.OG_SYNTHETIC_TICKER, ExternalSchemes.OG_SYNTHETIC_TICKER.getName()); _webPositionsResource = new WebPositionsResource(_positionMaster, _secLoader, _securitySource, _htsSource, _externalSchemes); final MockServletContext sc = new MockServletContext("/web-engine", new FileSystemResourceLoader()); final Configuration cfg = FreemarkerOutputter.createConfiguration(); cfg.setServletContextForTemplateLoading(sc, "WEB-INF/pages"); FreemarkerOutputter.init(sc, cfg); _webPositionsResource.setServletContext(sc); _webPositionsResource.setUriInfo(_uriInfo); } protected List<ManageableTrade> getTrades() { final List<ManageableTrade> trades = Lists.newArrayList(); final ManageableTrade trade1 = new ManageableTrade(BigDecimal.valueOf(50), SEC_ID, LocalDate.parse("2011-12-07"), OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET), COUNTER_PARTY); trade1.setPremium(10.0); trade1.setPremiumCurrency(Currency.USD); trade1.setPremiumDate(LocalDate.parse("2011-12-08")); trade1.setPremiumTime(OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET)); trades.add(trade1); final ManageableTrade trade2 = new ManageableTrade(BigDecimal.valueOf(60), SEC_ID, LocalDate.parse("2011-12-08"), OffsetTime.of(LocalTime.of(16, 4), ZONE_OFFSET), COUNTER_PARTY); trade2.setPremium(20.0); trade2.setPremiumCurrency(Currency.USD); trade2.setPremiumDate(LocalDate.parse("2011-12-09")); trade2.setPremiumTime(OffsetTime.of(LocalTime.of(16, 4), ZONE_OFFSET)); trades.add(trade2); final ManageableTrade trade3 = new ManageableTrade(BigDecimal.valueOf(70), SEC_ID, LocalDate.parse("2011-12-09"), OffsetTime.of(LocalTime.of(17, 4), ZONE_OFFSET), COUNTER_PARTY); trade3.setPremium(30.0); trade3.setPremiumCurrency(Currency.USD); trade3.setPremiumDate(LocalDate.parse("2011-12-10")); trade3.setPremiumTime(OffsetTime.of(LocalTime.of(17, 4), ZONE_OFFSET)); trades.add(trade3); return trades; } protected void populateSecMaster() { final SecurityDocument added = _secMaster.add(new SecurityDocument(EQUITY_SECURITY)); _securitySource.addSecurity(added.getSecurity()); } protected void populatePositionMaster() { for (final ManageableTrade trade : _trades) { final ManageablePosition manageablePosition = new ManageablePosition(trade.getQuantity(), SEC_ID); manageablePosition.addTrade(trade); final PositionDocument positionDocument = new PositionDocument(manageablePosition); _positionMaster.add(positionDocument); } } protected String getTradesJson() throws Exception { return WebResourceTestUtils.loadJson("com/opengamma/web/position/tradesJson.txt").toString(); } protected void assertPositionWithNoTrades() { final PositionSearchRequest request = new PositionSearchRequest(); final PositionSearchResult searchResult = _positionMaster.search(request); assertNotNull(searchResult); final List<PositionDocument> docs = searchResult.getDocuments(); assertNotNull(docs); assertEquals(1, docs.size()); final ManageablePosition position = docs.get(0).getPosition(); assertEquals(BigDecimal.TEN, position.getQuantity()); assertEquals(SECURITY_LINK, position.getSecurityLink()); assertTrue(position.getTrades().isEmpty()); } protected void assertPositionAndTrades() { final PositionSearchRequest request = new PositionSearchRequest(); final PositionSearchResult searchResult = _positionMaster.search(request); assertNotNull(searchResult); final List<PositionDocument> docs = searchResult.getDocuments(); assertNotNull(docs); assertEquals(1, docs.size()); final ManageablePosition position = docs.get(0).getPosition(); assertEquals(BigDecimal.TEN, position.getQuantity()); assertEquals(SECURITY_LINK, position.getSecurityLink()); final List<ManageableTrade> trades = position.getTrades(); assertEquals(3, trades.size()); for (final ManageableTrade trade : trades) { assertEquals(SECURITY_LINK, trade.getSecurityLink()); trade.setUniqueId(null); trade.setSecurityLink(new ManageableSecurityLink(SEC_ID)); trade.setParentPositionId(null); assertTrue(_trades.contains(trade)); } } protected UniqueId addPosition() { final ManageableTrade origTrade = new ManageableTrade(BigDecimal.valueOf(50), SEC_ID, LocalDate.parse("2011-12-07"), OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET), COUNTER_PARTY); origTrade.setPremium(10.0); origTrade.setPremiumCurrency(Currency.USD); origTrade.setPremiumDate(LocalDate.parse("2011-12-08")); origTrade.setPremiumTime(OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET)); final ManageablePosition manageablePosition = new ManageablePosition(origTrade.getQuantity(), EQUITY_SECURITY.getExternalIdBundle()); manageablePosition.addTrade(origTrade); final PositionDocument addedPos = _positionMaster.add(new PositionDocument(manageablePosition)); final UniqueId uid = addedPos.getUniqueId(); return uid; } protected String getActualURL(final Response response) { return response.getMetadata().getFirst("Location").toString(); } }
package edu.nyu.cs.cs2580; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.Scanner; import java.util.Set; import java.util.Vector; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.jsoup.Jsoup; import org.owwlo.InvertedIndexing.InvertedIndexBuilder; import org.owwlo.InvertedIndexing.InvertedIndexBuilder.IvtMapInteger; import edu.nyu.cs.cs2580.SearchEngine.Options; import edu.nyu.cs.cs2580.utils.AssistantIndexBuilder; import edu.nyu.cs.cs2580.utils.PersistentStoreManager; import edu.nyu.cs.cs2580.utils.Utils; import edu.nyu.cs.cs2580.utils.PersistentStoreManager.TermFrequencyManager; /** * @CS2580: Implement this class for HW2. */ public class IndexerInvertedOccurrence extends Indexer { private List<IvtMapInteger> ivtIndexMapList = new ArrayList<IvtMapInteger>(); private Map<Integer, DocumentIndexed> docMap = null; private Map<String, Integer> docUrlMap = null; private Map<String, Object> infoMap = null; private String CorpusLocation = "data/wiki"; private AssistantIndexBuilder aib = null; private String previousQuery = new String(); private int previousDocid = -1; private Vector<Vector<Integer>> cachePos = new Vector<Vector<Integer>>(); private static final String DOC_IDX_TBL = "docDB"; private static final String DOC_URL_TBL = "docUrlDB"; private static final String DOC_INFO_TBL = "docInfoDB"; private TermFrequencyManager tfm; public IndexerInvertedOccurrence(Options options) { super(options); System.out.println("Using Indexer: " + this.getClass().getSimpleName()); } private List<File> getAllFiles(final File folder) { List<File> fileList = new LinkedList<File>(); for (final File fileEntry : folder.listFiles()) { if (fileEntry.isDirectory()) { fileList.addAll(getAllFiles(fileEntry)); } else { fileList.add(fileEntry); } } return fileList; } private class InvertIndexBuildingTask implements Runnable { private List<File> files; private int startFileIdx; private int endFileIdx; private Map<String, List<Integer>> ivtMap; private long termCount = 0; private AssistantIndexBuilder aib; public InvertIndexBuildingTask(List<File> files, int startFileIdx, int endFileIdx, Map<String, List<Integer>> ivtMap, AssistantIndexBuilder aib) { this.files = files; this.startFileIdx = startFileIdx; this.endFileIdx = endFileIdx; this.ivtMap = ivtMap; this.aib = aib; } public long getTermCount() { return termCount; } @Override public void run() { System.out.println("Thread " + Thread.currentThread().getName() + " processes files from " + startFileIdx + " to " + endFileIdx); for (int docId = startFileIdx; docId < endFileIdx; docId++) { File file = files.get(docId); Map<String, List<Integer>> ivtMapItem = new HashMap<String, List<Integer>>(); Map<String, Integer> ferqMap = new HashMap<String, Integer>(); org.jsoup.nodes.Document doc; try { doc = Jsoup.parse(file, "UTF-8"); } catch (IOException e1) { continue; } String title = doc.title(); String text = doc.text(); Stemmer s = new Stemmer(); Scanner scanner = new Scanner(text); int passageLength = 0; List<String> termList = new ArrayList<String>(); while (scanner.hasNext()) { String token = scanner.next().toLowerCase(); s.add(token.toCharArray(), token.length()); s.stem(); // Build inverted map. token = s.toString(); if (token.length() < 1 || token.length() > 20) { continue; } if (!ferqMap.containsKey(token)) { ferqMap.put(token, 0); } ferqMap.put(token, ferqMap.get(token) + 1); if (!ivtMapItem.containsKey(token)) { List<Integer> occList = new ArrayList<Integer>(); ivtMapItem.put(token, occList); } List<Integer> occList = ivtMapItem.get(token); occList.add(passageLength); ivtMapItem.put(token, occList); termList.add(token); passageLength++; } termCount += passageLength; tfm.addTermFrequencyForDoc(docId, ferqMap); String url = file.getName(); DocumentIndexed di = new DocumentIndexed(docId); di.setTitle(title); di.setUrl(url); di.setLength(passageLength); for (String token : ivtMapItem.keySet()) { if (!ivtMap.containsKey(token)) { ivtMap.put(token, new ArrayList<Integer>()); } List<Integer> recordList = ivtMap.get(token); List<Integer> occList = ivtMapItem.get(token); // sequentially add <docid, occurrence> to the posting list. for (int e : occList) { recordList.add(docId); recordList.add(e); } } // aib.buildDocTermPositionMap(docId, termList); buildDocumentIndex(di); } } } @Override public void constructIndex() throws IOException { String corpusFolder = _options._corpusPrefix; System.out.println("Construct index from: " + corpusFolder); long start_t = System.currentTimeMillis(); cleanUpDirectory(); // AssistantIndexBuilder.cleanFiles(_options); // Get all corpus files. List<File> files = getAllFiles(new File(corpusFolder)); int filesPerBatch = 1300; // initialStore(false, files.size() / filesPerBatch); int threadCount = 1; System.out.println("Start building index with " + threadCount + " threads. Elapsed: " + (System.currentTimeMillis() - start_t) / 1000.0 + "s"); infoMap = new HashMap<String, Object>(); docMap = new HashMap<Integer, DocumentIndexed>(); docUrlMap = new HashMap<String, Integer>(); infoMap.put("_numDocs", files.size()); InvertedIndexBuilder builder = InvertedIndexBuilder .getBuilder(new File(_options._indexPrefix)); AssistantIndexBuilder aib = AssistantIndexBuilder.getInstance(_options); tfm = new TermFrequencyManager(_options._indexPrefix); long termCount = 0; for (int batchNum = 0; batchNum < files.size() / filesPerBatch + 1; batchNum++) { int fileIdStart = batchNum * filesPerBatch; int fileIdEnd = (batchNum + 1) * filesPerBatch; if (fileIdEnd > files.size()) { fileIdEnd = files.size(); } System.out.println("Processing files from " + fileIdStart + " to " + fileIdEnd); ExecutorService threadPool = Executors .newFixedThreadPool(threadCount); IvtMapInteger ivtMapFile = builder .createDistributedIvtiIntegerMap(); Map<String, List<Integer>> ivtMap = new HashMap<String, List<Integer>>(); List<InvertIndexBuildingTask> taskList = new ArrayList<InvertIndexBuildingTask>(); int totalFileCount = fileIdEnd - fileIdStart; int filesPerThread = totalFileCount / threadCount; for (int threadId = 0; threadId < threadCount; threadId++) { int startFileIdx = threadId * filesPerThread + fileIdStart; int endFileIdx = (threadId + 1) * filesPerThread + fileIdStart; if (threadId == threadCount - 1) { endFileIdx = fileIdEnd; } InvertIndexBuildingTask iibt = new InvertIndexBuildingTask( files, startFileIdx, endFileIdx, ivtMap, aib); threadPool.submit(iibt); taskList.add(iibt); } threadPool.shutdown(); try { threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS); } catch (InterruptedException e) { e.printStackTrace(); } // Combine all posting lists for N threads. for (InvertIndexBuildingTask iibt : taskList) { termCount += iibt.getTermCount(); } System.out.println("Writing Inverted Map to disk. " + fileIdEnd + " pages have been processed. Elapsed: " + (System.currentTimeMillis() - start_t) / 1000.0 + "s"); ivtMapFile.putAll(ivtMap); ivtMapFile.close(); System.out.println("Batch commit done. Elapsed: " + (System.currentTimeMillis() - start_t) / 1000.0 + "s"); } infoMap.put("_totalTermFrequency", termCount); builder.close(); tfm.close(); CorpusAnalyzer ca = new CorpusAnalyzerPagerank(_options); LogMiner lm = new LogMinerNumviews(_options); Map<String, Double> pageRankMap = (Map<String, Double>) ca.load(); Map<String, Double> numViewsMap = (Map<String, Double>) lm.load(); for (Map.Entry<Integer, DocumentIndexed> die : docMap.entrySet()) { DocumentIndexed di = die.getValue(); String basename = di.getUrl(); // di.setPageRank((float) (double) pageRankMap.get(basename)); // di.setNumViews((int) (double) numViewsMap.get(basename)); } // aib.close(); storeVariables(); long end_t = System.currentTimeMillis(); System.out.println("Construct done. Duration: " + (end_t - start_t) / 1000.0 + "s"); } private void storeVariables() { File docMapFile = new File(this._options._indexPrefix, DOC_IDX_TBL); File docUrlFile = new File(this._options._indexPrefix, DOC_URL_TBL); File docInfoFile = new File(this._options._indexPrefix, DOC_INFO_TBL); PersistentStoreManager.writeObjectToFile(docMapFile, docMap); PersistentStoreManager.writeObjectToFile(docUrlFile, docUrlMap); PersistentStoreManager.writeObjectToFile(docInfoFile, infoMap); } private void readVariables() { File docMapFile = new File(this._options._indexPrefix, DOC_IDX_TBL); File docUrlFile = new File(this._options._indexPrefix, DOC_URL_TBL); File docInfoFile = new File(this._options._indexPrefix, DOC_INFO_TBL); docMap = (Map<Integer, DocumentIndexed>) PersistentStoreManager .readObjectFromFile(docMapFile); docUrlMap = (Map<String, Integer>) PersistentStoreManager .readObjectFromFile(docUrlFile); infoMap = (Map<String, Object>) PersistentStoreManager .readObjectFromFile(docInfoFile); _totalTermFrequency = (Long) infoMap.get("_totalTermFrequency"); _numDocs = (Integer) infoMap.get("_numDocs"); } private void cleanUpDirectory() { File dir = new File(_options._indexPrefix); dir.mkdirs(); for (File file : dir.listFiles()) { file.delete(); } } synchronized private void buildDocumentIndex(DocumentIndexed di) { docMap.put(di._docid, di); docUrlMap.put(di.getUrl(), di._docid); } @Override public void loadIndex() throws IOException, ClassNotFoundException { InvertedIndexBuilder builder = InvertedIndexBuilder .getBuilder(new File(_options._indexPrefix)); tfm = new TermFrequencyManager(_options._indexPrefix); IvtMapInteger ivtMapBatch = builder .getUnifiedDistributedIvtiIntegerMap(); ivtIndexMapList.add(ivtMapBatch); aib = AssistantIndexBuilder.getInstance(_options); // AssistantIndexBuilder aib = // AssistantIndexBuilder.getInstance(_options); // aib.buildTwoLetterMap(ivtMapBatch.keySet()); // aib.buildTermLenMap(ivtMapBatch.keySet()); // // String a = aib.getTermPositionManager().getTermAtPosition(1, 1); // System.out.println("a: " + a); // List<String> b = // aib.getTwoLetterTermListManager().getLetterTermList("or"); // for (String s : b) { // System.out.println(s); // } // List<String> c = aib.getTermLenListManager().getTermLenList(2); // for (String s : c) { // System.out.println(s); // } readVariables(); } @Override public Document getDoc(int docid) { return docMap.get(docid); } /** * In HW2, you should be using {@link DocumentIndexed}. */ private int nextInOccurence(int docId, List<Integer> postinglist, int phraseIndex, int termIndex) { int start = cachePos.get(phraseIndex).get(termIndex); for (int i = start; i < postinglist.size(); i += 2) { if (postinglist.get(i) > docId) { cachePos.get(phraseIndex).set(termIndex, i); return postinglist.get(i); } } cachePos.get(phraseIndex).set(termIndex, postinglist.size()); return -1; } private int nextForOccurence(int docId, Vector<List<Integer>> postinglists, int phraseIndex) { // System.out.println("current id is: "+docId); int previousVal = -1; boolean equilibrium = true; int maximum = Integer.MIN_VALUE; for (int i = 0; i < postinglists.size(); i++) { int currentId = nextInOccurence(docId, postinglists.get(i), phraseIndex, i); if (currentId < 0) return -1; if (previousVal < 0) { previousVal = currentId; maximum = currentId; } else { if (previousVal != currentId) { equilibrium = false; maximum = Math.max(maximum, currentId); } } } if (equilibrium == true) return previousVal; else return nextForOccurence(maximum - 1, postinglists, phraseIndex); } private int nextPos(List<Integer> postinglist, int docId, int pos, int phrasePos, int termPos) { int docPosition = -1; int start = cachePos.get(phrasePos).get(termPos); for (int i = start; i < postinglist.size(); i += 2) { if (postinglist.get(i) == docId) { docPosition = i; cachePos.get(phrasePos).set(termPos, i); break; } } if (docPosition == -1) { cachePos.get(phrasePos).set(termPos, postinglist.size()); return -1; } int Pos = docPosition + 1; while (Pos < postinglist.size() && postinglist.get(Pos - 1) == docId) { if (postinglist.get(Pos) > pos) { cachePos.get(phrasePos).set(termPos, Pos - 1); return postinglist.get(Pos); } Pos += 2; } cachePos.get(phrasePos).set(termPos, postinglist.size()); return -1; } private int nextPhrase(int docId, int pos, Vector<List<Integer>> postinglists, int phrasePos) { int[] positions = new int[postinglists.size()]; boolean success = true; for (int i = 0; i < positions.length; i++) { positions[i] = nextPos(postinglists.get(i), docId, pos, phrasePos, i); if (positions[i] < 0) return -1; } // int maximum=positions[0]; for (int i = 1; i < positions.length; i++) { if (positions[i] != positions[i - 1] + 1) success = false; // if (positions[i]>maximum) // maximum=positions[i]; } if (success == true) return positions[0]; else return nextPhrase(docId, positions[0], postinglists, phrasePos); } private int nextPhrase(int docId, Vector<List<Integer>> postinglists, int i) { int docVerify = nextForOccurence(docId, postinglists, i); // System.out.println("docVerify is: "+docVerify); if (docVerify < 0) return -1; int result = nextPhrase(docVerify, -1, postinglists, i); if (result > 0) return docVerify; return nextPhrase(docVerify, postinglists, i); } private int next(int docId, Vector<Vector<List<Integer>>> postinglists) { // System.out.println("current id is: "+docId); int previousVal = -1; boolean equilibrium = true; int maximum = Integer.MIN_VALUE; for (int i = 0; i < postinglists.size(); i++) { int currentId = nextPhrase(docId, postinglists.get(i), i); if (currentId < 0) return -1; if (previousVal < 0) { previousVal = currentId; maximum = currentId; } else { if (previousVal != currentId) { equilibrium = false; maximum = Math.max(maximum, currentId); } } } if (equilibrium == true) return previousVal; else return next(maximum - 1, postinglists); } private boolean canUseCache(Query query, int docid) { if (docid == -1) { return false; } if (query._query.equals(previousQuery) == false) { return false; } if (docid <= previousDocid) { return false; } return true; } @Override public Document nextDoc(Query query, int docid) { Vector<String> tokens = query._tokens; int result = -1; Vector<Vector<List<Integer>>> postingLists = new Vector<Vector<List<Integer>>>(); for (int i = 0; i < tokens.size(); i++) { Vector<List<Integer>> container = new Vector<List<Integer>>(); String[] consecutiveWords = tokens.get(i).split(" "); for (int j = 0; j < consecutiveWords.length; j++) { Stemmer s = new Stemmer(); s.add(consecutiveWords[j].toLowerCase().toCharArray(), consecutiveWords[j].length()); s.stem(); container.add(ivtGet(s.toString())); } // System.out.println("size is: "+docInvertedMap.get(s.toString()).size()); postingLists.add(container); } if (canUseCache(query, docid) == false) { previousQuery = query._query; previousDocid = -1; cachePos = new Vector<Vector<Integer>>(); for (int i = 0; i < postingLists.size(); i++) { Vector<Integer> tempVec = new Vector<Integer>(); int size = postingLists.get(i).size(); for (int j = 0; j < size; j++) tempVec.add(0); cachePos.add(tempVec); } } result = next(docid, postingLists); previousDocid = result - 1; if (result < 0) return null; else return getDoc(result); } @Override public int corpusDocFrequencyByTerm(String term) { // Number of documents in which {@code term} appeared, over the full // corpus. // Stem given term. Stemmer s = new Stemmer(); s.add(term.toLowerCase().toCharArray(), term.length()); s.stem(); if (!ivtContainsKey(s.toString())) { return 0; } // Get posting list from index. List<Integer> l = ivtGet(s.toString()); int count = 0; int last_id = -1; for (int i = 0; i < l.size() - 1; i += 2) { if (l.get(i) != last_id) { last_id = l.get(i); ++count; } } return count; } @Override public int corpusTermFrequency(String term) { // Number of times {@code term} appeared in corpus. // Stem given term. Stemmer s = new Stemmer(); s.add(term.toLowerCase().toCharArray(), term.length()); s.stem(); if (!ivtContainsKey(s.toString())) { return 0; } // Get posting list from index. List<Integer> l = ivtGet(s.toString()); return l.size() / 2; } @Override public int documentTermFrequency(String term, int docid) { // Stem given term. Stemmer s = new Stemmer(); s.add(term.toLowerCase().toCharArray(), term.length()); s.stem(); Map<String, Integer> tfMap = tfm.gettermFrequencyForDoc(docid); if (!tfMap.containsKey(s.toString())) { return 0; } return tfMap.get(s.toString()); } private boolean ivtContainsKey(String key) { for (Map<String, List<Integer>> m : ivtIndexMapList) { if (m.containsKey(key)) { return true; } } return false; } private Map<String, List<Integer>> cache = new LinkedHashMap<String, List<Integer>>(); private List<Integer> ivtGet(String key) { if (cache.containsKey(key)) { List<Integer> tmp = cache.get(key); cache.remove(tmp); cache.put(key, tmp); return cache.get(key); } if (cache.size() > 1) { cache.remove(cache.keySet().toArray()[0]); } List<Integer> l = new ArrayList<Integer>(); for (Map<String, List<Integer>> m : ivtIndexMapList) { if (m.containsKey(key)) { l.addAll(m.get(key)); } } cache.put(key, l); return l; } @Override public Map<String, Integer> documentTermFrequencyMap(int docid) { return tfm.gettermFrequencyForDoc(docid); } public int listLength(String key) { return ivtGet(key).size(); } private void addPQ(PriorityQueue<Word> pq, Word word, int windowSize) { if (pq.size() < windowSize) { if (word.frequency > 2000) pq.add(word); } else { if (word.compareTo(pq.peek()) > 0 && word.frequency > 2000) { pq.poll(); // System.out.println(word.term+" "+word.frequency); pq.add(word); } } } private boolean allChars(String k) { for (int i = 0; i < k.length(); i++) { if (Character.isAlphabetic(k.charAt(i)) == false) return false; } return true; } private void storeCandidate(Query query, Vector<Vector<Word>> temp, int windowSize) { for (int i = 0; i < query._tokens.size(); i++) { long currentTime = System.currentTimeMillis(); String target = query._tokens.get(i); PriorityQueue<Word> pq = new PriorityQueue<Word>(); Set<String> candidates = getPossible(target); Iterator<String> it = candidates.iterator(); while (it.hasNext()) { String k = it.next(); if (Math.abs(target.length() - k.length()) >= 3) continue; if (k.equals(target) == true) { int listLength = ivtGet(k).size(); temp.get(i).add(new Word(target, listLength)); continue; } if (allChars(k) == false) continue; if (Utils.wordDistance(target, k) <= 1) { int listLength = ivtGet(k).size(); Word word = new Word(k, listLength); addPQ(pq, word, windowSize); } } while (pq.isEmpty() == false) { temp.get(i).add(pq.poll()); } long timeElapsed = System.currentTimeMillis() - currentTime; System.out.println("time elapsed: " + timeElapsed); } } private void makeQueryList(Vector<Vector<Word>> Combinations, List<Query> candidates, String[] temp, int position) { if (position >= Combinations.size()) { String result = temp[0]; for (int i = 1; i < position; i++) { result += " "; result += temp[i]; } Query query = new Query(result); query.processQuery(); candidates.add(query); return; } Vector<Word> current = Combinations.get(position); for (int i = 0; i < current.size(); i++) { temp[position] = current.get(i).term; makeQueryList(Combinations, candidates, temp, position + 1); } } private void experiment(List<Query> lists, List<Query> result) { for (Query q : lists) { int count = 0; int docid = -1; DocumentIndexed doc; while ((doc = (DocumentIndexed) nextDoc(q, docid)) != null) { if (count >= 3) { result.add(q); break; } count++; docid = doc._docid; // System.out.println("current docid: "+docid); } } } public void refineCandidates(Vector<Vector<Word>> candidates, Query query) { for (int i = 0; i < candidates.size(); i++) { Vector<Word> vs = candidates.get(i); String target = query._tokens.get(i); Iterator<Word> it = vs.iterator(); int size = vs.size(); while (it.hasNext() == true) { Word current = it.next(); if (current.term.equals(target) == true) { if ((size >= 2) && (current.frequency < 2000)) { it.remove(); } } } } } public Set<String> getPossible(String str) { Set<String> result = new HashSet<String>(); long currentTime = System.currentTimeMillis(); int len = str.length(); if (len == 1) { result.add(str); return result; } Map<String, Integer> tempMap = new HashMap<String, Integer>(); for (int i = 0; i + 2 <= len; i++) { String biGram = str.substring(i, i + 2); List<String> biGramList = aib.getTwoLetterTermListManager() .getLetterTermList(biGram); if (biGramList == null) biGramList = new ArrayList<String>(); for (String element : biGramList) { if (tempMap.containsKey(element) == false) tempMap.put(element, 1); else tempMap.put(element, tempMap.get(element) + 1); } } double boundary = 0.0; if (len <= 5) boundary = 1; else boundary = 2; // int boundaryAsInt = (int)boundary; for (String element : tempMap.keySet()) { // System.out.println(element); int tmp = tempMap.get(element); double tmpDou = (double) tmp; if (tmpDou >= boundary) { result.add(element); // System.out.println("advertisement"); } } long offSet = System.currentTimeMillis() - currentTime; // System.out.println(offSet); return result; } public Map<String,List<Query>> querySearch(Query query) { /* * Map<String,Integer> result= supplementWord(query); for (String e: * result.keySet()){ System.out.println(e); } return null; */ Map<String,List<Query>> finalResult = new HashMap<String,List<Query>>(); List<Query> historySub = new ArrayList<Query> (); SessionHandler sh = SessionHandler.getInstance(); List<String> history =sh.queryAllHistory(); for (String str:history){ if (str.toLowerCase().contains(query._query.toLowerCase())==true){ Query qq = new Query(str); qq.processQuery(); historySub.add(qq); } } finalResult.put("history", historySub); long currentTime = System.currentTimeMillis(); int windowSize = 2; Vector<Vector<Word>> temp = new Vector<Vector<Word>>(); for (int i = 0; i < query._tokens.size(); i++) temp.add(new Vector<Word>()); storeCandidate(query, temp, windowSize); refineCandidates(temp, query); List<Query> candidates = new ArrayList<Query>(); String[] tempQuery = new String[temp.size()]; makeQueryList(temp, candidates, tempQuery, 0); // for (int i=0;i<candidates.size();i++) // System.out.println(candidates.get(i)); // long elapse = System.currentTimeMillis() - currentTime; // System.out.println("elapsed: "+elapse); System.out.println(candidates.size()); List<Query> result = new ArrayList<Query>(); if (query._tokens.size() < 4) experiment(candidates, result); else for (Query candidate : candidates) { result.add(candidate); } // elapse = System.currentTimeMillis() - currentTime; // System.out.println("elapsed: "+elapse); List<wordQ> tmp = new ArrayList<wordQ>(); for (int i = 0; i < result.size(); i++) { // System.out.println(result.get(i)); int distance = 0; for (int j = 0; j < query._tokens.size(); j++) { if (query._tokens.get(j).charAt(0) != result.get(i)._tokens.get(j).charAt(0)) { distance += (query._tokens.size() - j); } } // System.out.println(result.get(i)+"distance is: "+distance); tmp.add(new wordQ(result.get(i), distance)); } Collections.sort(tmp); long elapse = System.currentTimeMillis() - currentTime; System.out.println("elapsed: " + elapse); for (int i = 0; i < tmp.size(); i++) { result.set(i, tmp.get(i).term); } int index=-1; for (int i = 0; i < result.size(); i++){ //System.out.println("result: " + result.get(i)); if (historySub.size()>0&&historySub.get(0)._query.equals(result.get(i)._query)==true){ index = i; break; } } if (index>0&&index<result.size()) result.remove(index); finalResult.put("correction", result); List<Query> suggestions = new ArrayList<Query> (); List<String> l = sh.queryExplore(); for (String str:l){ Query qqq = new Query(str); qqq.processQuery(); suggestions.add(qqq); } finalResult.put("suggestions", suggestions); return finalResult; } public String nextFollowing(Query q, int docid, int position) { for (int i = 0; i < q._tokens.size(); i++) { String toCompare = q._tokens.get(i); // System.out.println(toCompare); String cur = aib.getTermPositionManager().getTermAtPosition(docid, position + i); // System.out.println(cur); if (cur == null) return null; if (cur.equals(toCompare) == false) return ""; } return aib.getTermPositionManager().getTermAtPosition(docid, position + q._tokens.size()); } public Map<String, Integer> supplementWord(Query q) { System.out.println("i am here....."); DocumentIndexed doc; int docid = -1; Map<String, Integer> mapping = new HashMap<String, Integer>(); int count = 0; while ((doc = (DocumentIndexed) nextDoc(q, docid)) != null) { if (count >= 20) { break; } docid = doc._docid; // System.out.println(docid); for (int position = 1;; position++) { String k = nextFollowing(q, docid, position); if (k == null) break; if (k.length() > 0) { if (mapping.containsKey(k) == false) mapping.put(k, 1); else mapping.put(k, mapping.get(k) + 1); } } count++; // System.out.println("current docid: "+docid); } return mapping; } public static void main(String[] args) { } } class wordQ implements Comparable<wordQ> { public Query term; public int frequency; @Override public int compareTo(wordQ w) { if (frequency < w.frequency) { return -1; } if (frequency > w.frequency) { return 1; } return 0; } public wordQ(Query term, int f) { this.term = term; this.frequency = f; } } class Word implements Comparable<Word> { public String term; public int frequency; @Override public int compareTo(Word w) { if (frequency < w.frequency) { return -1; } return 1; } public Word(String term, int f) { this.term = term; this.frequency = f; } }
/* * Copyright 2017 Obsidian Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.obsidian.compiler.Types; import com.obsidian.compiler.ControlFlowGraph.ArgumentCfgNode; import com.obsidian.compiler.ControlFlowGraph.FunctionCallAnalysisResult; import com.obsidian.compiler.Lava.*; import com.obsidian.compiler.LvEvalEnvironment; import com.obsidian.compiler.MemberInfo; import com.obsidian.compiler.Panic; import com.obsidian.compiler.RawValues.RawFunctionValue; import com.obsidian.compiler.RawValues.RawOverloadedFunctionValue; import com.obsidian.compiler.RawValues.RawSingleFunctionValue; import com.obsidian.compiler.RawValues.RawValue; import java.util.ArrayList; import java.util.List; /** * Created by Jonathon on 5/26/2017. */ public class OverloadedFunctionType extends FunctionType { public List<SingleFunctionType> overloads; public OverloadedFunctionType() { this.overloads = new ArrayList<>(1); } public void addOverload(SingleFunctionType func) { this.overloads.add(func); } @Override public String getHumanName() { return null; } @Override public MemberInfo getMemberInfo(LvEvalEnvironment lv, String memberId, RawValue rawValue) { return null; } @Override public LvValue getAsString(RawValue value) { return null; // Cannot convert to string, return null } @Override public LvEvalResult call(LvEvalEnvironment lv, LvRangeInFile source, RawFunctionValue value, ArgumentCfgNode[] argNodes, List<LvValue> argValues) { FunctionCallAnalysisResult result = this.analyzeCanCallWithArgs(lv, argValues); if (result.returnType != null) { RawSingleFunctionValue singleFunctionValue = new RawSingleFunctionValue(((RawOverloadedFunctionValue) value).node.overloads.get(result.matchedFunctionIndex)); return this.overloads.get(result.matchedFunctionIndex).call(lv, source, singleFunctionValue, argNodes, argValues); } return null; } @Override public FunctionCallAnalysisResult analyzeCanCallWithArgs(LvEvalEnvironment lv, List<LvValue> args) { List<Type> argTypes = Type.valueListToTypeList(args); FunctionCallAnalysisResult result = new FunctionCallAnalysisResult(); if (UnionType.containsAnyUnionTypes(argTypes)) { ArgumentPermutationSet argSet = this.getArgumentPermutations(argTypes); List<Type> returnTypes = new ArrayList<>(1); List<List<Type>> argListSet = argSet.sets; int nonMatchCount = 0; for (List<Type> argList : argListSet) { Type type = this.canCallAnyOverloadWithArgTypes(lv, argList); if (type == null) { // TODO: Error about how 1 or more of the arg types was unmatched nonMatchCount++; } else { returnTypes.add(type); } } if (nonMatchCount > 0) { result.returnType = null; } else { result.returnType = UnionType.coalesce(returnTypes); } return result; } int matchCount = 0; Type returnType = Type.unknown(); for (int i = 0; i < this.overloads.size(); i++) { SingleFunctionType overload = this.overloads.get(i); FunctionCallAnalysisResult subResult = overload.analyzeCanCallWithArgs(lv, args); if (subResult.returnType != null) { matchCount++; returnType = overload.returnType; result.matchedFunctionIndex = i; } } if (matchCount == 0) { result.returnType = null; } else if (matchCount == 1) { result.returnType = returnType; } else if (matchCount > 1) { // TODO: Throw error if more than one function matches throw new Panic(); } return result; } @Override public String getSignature() { return "TODO: Overloaded function signatures"; } @Override public boolean isPureFunction() { return false; // TODO } private Type canCallAnyOverloadWithArgTypes(LvEvalEnvironment lv, List<Type> argList) { int matchCount = 0; Type returnType = Type.unknown(); for (SingleFunctionType overload : this.overloads) { FunctionCallAnalysisResult result = overload.analyzeCanCallWithArgs(lv, Type.typeListToValueList(argList)); if (result.returnType != null) { matchCount++; returnType = overload.returnType; } } if (matchCount == 0) { return null; } if (matchCount > 1) { // TODO: Throw error if more than one function matches } return returnType; } private ArgumentPermutationSet getArgumentPermutations(List<Type> args) { ArgumentPermutationSet set = new ArgumentPermutationSet(); this.addArgumentPermutationsToSet(args, set, 0); return set; } private void addArgumentPermutationsToSet(List<Type> args, ArgumentPermutationSet listSet, int startIndex) { if (startIndex == args.size()) { return; } Type arg = args.get(startIndex); if (listSet.sets.size() == 0) { if (arg instanceof UnionType) { // If the current set is empty and the first arg is a union type, start new lists for each type in the union UnionType argType_asUnionType = (UnionType) arg; for (Type argSubType : argType_asUnionType.types) { List<Type> newSet = new ArrayList<>(args.size()); newSet.add(argSubType); listSet.sets.add(newSet); } } else { // If the current set is empty, and the first arg is a single type, start a single new list for it List<Type> newSet = new ArrayList<>(args.size()); newSet.add(arg); listSet.sets.add(newSet); } } else { if (arg instanceof UnionType) { // If the current list is not empty, and the arg is a union type, we must create a new list for each type in the union UnionType argType_asUnionType = (UnionType) arg; List<List<Type>> newSet = new ArrayList<>(listSet.sets.size() * argType_asUnionType.types.size()); for (List<Type> set : listSet.sets) { for (Type subtype : argType_asUnionType.types) { List<Type> newList = new ArrayList<>(set); newList.add(subtype); newSet.add(newList); } } listSet.sets = newSet; } else { // Arg is not a union type, just add it to each of the sets for (List<Type> set : listSet.sets) { set.add(arg); } } } this.addArgumentPermutationsToSet(args, listSet, startIndex + 1); } @Override public LvEvalResult eval(LvEvalEnvironment lv) { return null; } }
/* Copyright (c) 2008 Health Market Science, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.healthmarketscience.jackcess.impl.query; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import com.healthmarketscience.jackcess.RowId; import com.healthmarketscience.jackcess.impl.DatabaseImpl; import com.healthmarketscience.jackcess.impl.RowIdImpl; import com.healthmarketscience.jackcess.impl.RowImpl; import static com.healthmarketscience.jackcess.impl.query.QueryFormat.*; import com.healthmarketscience.jackcess.query.Query; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Base class for classes which encapsulate information about an Access query. * The {@link #toSQLString()} method can be used to convert this object into * the actual SQL string which this query data represents. * * @author James Ahlborn */ public abstract class QueryImpl implements Query { protected static final Log LOG = LogFactory.getLog(QueryImpl.class); private static final Row EMPTY_ROW = new Row(); private final String _name; private final List<Row> _rows; private final int _objectId; private final Type _type; private final int _objectFlag; protected QueryImpl(String name, List<Row> rows, int objectId, int objectFlag, Type type) { _name = name; _rows = rows; _objectId = objectId; _type = type; _objectFlag = objectFlag; if(type != Type.UNKNOWN) { short foundType = getShortValue(getQueryType(rows), _type.getValue()); if(foundType != _type.getValue()) { throw new IllegalStateException(withErrorContext( "Unexpected query type " + foundType)); } } } /** * Returns the name of the query. */ @Override public String getName() { return _name; } /** * Returns the type of the query. */ @Override public Type getType() { return _type; } @Override public boolean isHidden() { return((_objectFlag & DatabaseImpl.HIDDEN_OBJECT_FLAG) != 0); } /** * Returns the unique object id of the query. */ @Override public int getObjectId() { return _objectId; } @Override public int getObjectFlag() { return _objectFlag; } /** * Returns the rows from the system query table from which the query * information was derived. */ public List<Row> getRows() { return _rows; } protected List<Row> getRowsByAttribute(Byte attribute) { return getRowsByAttribute(getRows(), attribute); } protected Row getRowByAttribute(Byte attribute) { return getUniqueRow(getRowsByAttribute(getRows(), attribute)); } public Row getTypeRow() { return getRowByAttribute(TYPE_ATTRIBUTE); } protected List<Row> getParameterRows() { return getRowsByAttribute(PARAMETER_ATTRIBUTE); } protected Row getFlagRow() { return getRowByAttribute(FLAG_ATTRIBUTE); } protected Row getRemoteDatabaseRow() { return getRowByAttribute(REMOTEDB_ATTRIBUTE); } protected List<Row> getTableRows() { return getRowsByAttribute(TABLE_ATTRIBUTE); } protected List<Row> getColumnRows() { return getRowsByAttribute(COLUMN_ATTRIBUTE); } protected List<Row> getJoinRows() { return getRowsByAttribute(JOIN_ATTRIBUTE); } protected Row getWhereRow() { return getRowByAttribute(WHERE_ATTRIBUTE); } protected List<Row> getGroupByRows() { return getRowsByAttribute(GROUPBY_ATTRIBUTE); } protected Row getHavingRow() { return getRowByAttribute(HAVING_ATTRIBUTE); } protected List<Row> getOrderByRows() { return getRowsByAttribute(ORDERBY_ATTRIBUTE); } protected abstract void toSQLString(StringBuilder builder); protected void toSQLParameterString(StringBuilder builder) { // handle any parameters List<String> params = getParameters(); if(!params.isEmpty()) { builder.append("PARAMETERS ").append(params) .append(';').append(NEWLINE); } } @Override public List<String> getParameters() { return (new RowFormatter(getParameterRows()) { @Override protected void format(StringBuilder builder, Row row) { String typeName = PARAM_TYPE_MAP.get(row.flag); if(typeName == null) { throw new IllegalStateException(withErrorContext( "Unknown param type " + row.flag)); } builder.append(row.name1).append(' ').append(typeName); if((TEXT_FLAG.equals(row.flag)) && (getIntValue(row.extra, 0) > 0)) { builder.append('(').append(row.extra).append(')'); } } }).format(); } protected List<String> getFromTables() { // grab the list of query tables List<TableSource> tableExprs = new ArrayList<TableSource>(); for(Row table : getTableRows()) { StringBuilder builder = new StringBuilder(); if(table.expression != null) { toQuotedExpr(builder, table.expression).append(IDENTIFIER_SEP_CHAR); } if(table.name1 != null) { toOptionalQuotedExpr(builder, table.name1, true); } toAlias(builder, table.name2); String key = ((table.name2 != null) ? table.name2 : table.name1); tableExprs.add(new SimpleTable(key, builder.toString())); } // combine the tables with any query joins List<Row> joins = getJoinRows(); for(Row joinRow : joins) { String fromTable = joinRow.name1; String toTable = joinRow.name2; TableSource fromTs = null; TableSource toTs = null; // combine existing join expressions containing the target tables for(Iterator<TableSource> joinIter = tableExprs.iterator(); (joinIter.hasNext() && ((fromTs == null) || (toTs == null))); ) { TableSource ts = joinIter.next(); if((fromTs == null) && ts.containsTable(fromTable)) { fromTs = ts; // special case adding expr to existing join if((toTs == null) && ts.containsTable(toTable)) { toTs = ts; break; } joinIter.remove(); } else if((toTs == null) && ts.containsTable(toTable)) { toTs = ts; joinIter.remove(); } } if(fromTs == null) { fromTs = new SimpleTable(fromTable); } if(toTs == null) { toTs = new SimpleTable(toTable); } if(fromTs == toTs) { if(fromTs.sameJoin(joinRow.flag, joinRow.expression)) { // easy-peasy, we just added the join expression to existing join, // nothing more to do continue; } throw new IllegalStateException(withErrorContext( "Inconsistent join types for " + fromTable + " and " + toTable)); } // new join expression tableExprs.add(new Join(fromTs, toTs, joinRow.flag, joinRow.expression)); } // convert join objects to SQL strings List<String> result = new AppendableList<String>(); for(TableSource ts : tableExprs) { result.add(ts.toString()); } return result; } protected String getFromRemoteDbPath() { return getRemoteDatabaseRow().name1; } protected String getFromRemoteDbType() { return getRemoteDatabaseRow().expression; } protected String getWhereExpression() { return getWhereRow().expression; } protected List<String> getOrderings() { return (new RowFormatter(getOrderByRows()) { @Override protected void format(StringBuilder builder, Row row) { builder.append(row.expression); if(DESCENDING_FLAG.equalsIgnoreCase(row.name1)) { builder.append(" DESC"); } } }).format(); } @Override public String getOwnerAccessType() { return(hasFlag(OWNER_ACCESS_SELECT_TYPE) ? "WITH OWNERACCESS OPTION" : DEFAULT_TYPE); } protected boolean hasFlag(int flagMask) { return hasFlag(getFlagRow(), flagMask); } protected boolean supportsStandardClauses() { return true; } /** * Returns the actual SQL string which this query data represents. */ @Override public String toSQLString() { StringBuilder builder = new StringBuilder(); if(supportsStandardClauses()) { toSQLParameterString(builder); } toSQLString(builder); if(supportsStandardClauses()) { String accessType = getOwnerAccessType(); if(!DEFAULT_TYPE.equals(accessType)) { builder.append(NEWLINE).append(accessType); } builder.append(';'); } return builder.toString(); } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } /** * Creates a concrete Query instance from the given query data. * * @param objectFlag the flag indicating the type of the query * @param name the name of the query * @param rows the rows from the system query table containing the data * describing this query * @param objectId the unique object id of this query * * @return a Query instance for the given query data */ public static QueryImpl create(int objectFlag, String name, List<Row> rows, int objectId) { // remove other object flags before testing for query type int objTypeFlag = objectFlag & OBJECT_FLAG_MASK; if(objTypeFlag == 0) { // sometimes the query rows tell a different story short rowTypeFlag = getShortValue(getQueryType(rows), objTypeFlag); Type rowType = TYPE_MAP.get(rowTypeFlag); if((rowType != null) && (rowType.getObjectFlag() != objTypeFlag)) { // use row type instead of object flag type objTypeFlag = rowType.getObjectFlag(); } } try { switch(objTypeFlag) { case SELECT_QUERY_OBJECT_FLAG: return new SelectQueryImpl(name, rows, objectId, objectFlag); case MAKE_TABLE_QUERY_OBJECT_FLAG: return new MakeTableQueryImpl(name, rows, objectId, objectFlag); case APPEND_QUERY_OBJECT_FLAG: return new AppendQueryImpl(name, rows, objectId, objectFlag); case UPDATE_QUERY_OBJECT_FLAG: return new UpdateQueryImpl(name, rows, objectId, objectFlag); case DELETE_QUERY_OBJECT_FLAG: return new DeleteQueryImpl(name, rows, objectId, objectFlag); case CROSS_TAB_QUERY_OBJECT_FLAG: return new CrossTabQueryImpl(name, rows, objectId, objectFlag); case DATA_DEF_QUERY_OBJECT_FLAG: return new DataDefinitionQueryImpl(name, rows, objectId, objectFlag); case PASSTHROUGH_QUERY_OBJECT_FLAG: return new PassthroughQueryImpl(name, rows, objectId, objectFlag); case UNION_QUERY_OBJECT_FLAG: return new UnionQueryImpl(name, rows, objectId, objectFlag); default: // unknown querytype throw new IllegalStateException(withErrorContext( "unknown query object flag " + objTypeFlag, name)); } } catch(IllegalStateException e) { LOG.warn(withErrorContext("Failed parsing query", name), e); } // return unknown query return new UnknownQueryImpl(name, rows, objectId, objectFlag); } private static Short getQueryType(List<Row> rows) { return getFirstRowByAttribute(rows, TYPE_ATTRIBUTE).flag; } private static List<Row> getRowsByAttribute(List<Row> rows, Byte attribute) { List<Row> result = new ArrayList<Row>(); for(Row row : rows) { if(attribute.equals(row.attribute)) { result.add(row); } } return result; } private static Row getFirstRowByAttribute(List<Row> rows, Byte attribute) { for(Row row : rows) { if(attribute.equals(row.attribute)) { return row; } } return EMPTY_ROW; } protected Row getUniqueRow(List<Row> rows) { if(rows.size() == 1) { return rows.get(0); } if(rows.isEmpty()) { return EMPTY_ROW; } throw new IllegalStateException(withErrorContext( "Unexpected number of rows for" + rows)); } protected static List<Row> filterRowsByFlag( List<Row> rows, final short flag) { return new RowFilter() { @Override protected boolean keep(Row row) { return hasFlag(row, flag); } }.filter(rows); } protected static List<Row> filterRowsByNotFlag( List<Row> rows, final short flag) { return new RowFilter() { @Override protected boolean keep(Row row) { return !hasFlag(row, flag); } }.filter(rows); } protected static boolean hasFlag(Row row, int flagMask) { return((getShortValue(row.flag, 0) & flagMask) != 0); } protected static short getShortValue(Short s, int def) { return ((s != null) ? (short)s : (short)def); } protected static int getIntValue(Integer i, int def) { return ((i != null) ? (int)i : def); } protected static StringBuilder toOptionalQuotedExpr(StringBuilder builder, String fullExpr, boolean isIdentifier) { String[] exprs = (isIdentifier ? IDENTIFIER_SEP_PAT.split(fullExpr) : new String[]{fullExpr}); for(int i = 0; i < exprs.length; ++i) { String expr = exprs[i]; if(QUOTABLE_CHAR_PAT.matcher(expr).find()) { toQuotedExpr(builder, expr); } else { builder.append(expr); } if(i < (exprs.length - 1)) { builder.append(IDENTIFIER_SEP_CHAR); } } return builder; } protected static StringBuilder toQuotedExpr(StringBuilder builder, String expr) { return (!isQuoted(expr) ? builder.append('[').append(expr).append(']') : builder.append(expr)); } protected static boolean isQuoted(String expr) { return ((expr.length() >= 2) && (expr.charAt(0) == '[') && (expr.charAt(expr.length() - 1) == ']')); } protected static StringBuilder toRemoteDb(StringBuilder builder, String remoteDbPath, String remoteDbType) { if((remoteDbPath != null) || (remoteDbType != null)) { // note, always include path string, even if empty builder.append(" IN '"); if(remoteDbPath != null) { builder.append(remoteDbPath); } builder.append('\''); if(remoteDbType != null) { builder.append(" [").append(remoteDbType).append(']'); } } return builder; } protected static StringBuilder toAlias(StringBuilder builder, String alias) { if(alias != null) { toOptionalQuotedExpr(builder.append(" AS "), alias, false); } return builder; } private String withErrorContext(String msg) { return withErrorContext(msg, getName()); } private static String withErrorContext(String msg, String queryName) { return msg + " (Query: " + queryName + ")"; } private static final class UnknownQueryImpl extends QueryImpl { private UnknownQueryImpl(String name, List<Row> rows, int objectId, int objectFlag) { super(name, rows, objectId, objectFlag, Type.UNKNOWN); } @Override protected void toSQLString(StringBuilder builder) { throw new UnsupportedOperationException(); } } /** * Struct containing the information from a single row of the system query * table. */ public static final class Row { private final RowId _id; public final Byte attribute; public final String expression; public final Short flag; public final Integer extra; public final String name1; public final String name2; public final Integer objectId; public final byte[] order; private Row() { this._id = null; this.attribute = null; this.expression = null; this.flag = null; this.extra = null; this.name1 = null; this.name2= null; this.objectId = null; this.order = null; } public Row(com.healthmarketscience.jackcess.Row tableRow) { this(tableRow.getId(), tableRow.getByte(COL_ATTRIBUTE), tableRow.getString(COL_EXPRESSION), tableRow.getShort(COL_FLAG), tableRow.getInt(COL_EXTRA), tableRow.getString(COL_NAME1), tableRow.getString(COL_NAME2), tableRow.getInt(COL_OBJECTID), tableRow.getBytes(COL_ORDER)); } public Row(RowId id, Byte attribute, String expression, Short flag, Integer extra, String name1, String name2, Integer objectId, byte[] order) { this._id = id; this.attribute = attribute; this.expression = expression; this.flag = flag; this.extra = extra; this.name1 = name1; this.name2= name2; this.objectId = objectId; this.order = order; } public com.healthmarketscience.jackcess.Row toTableRow() { com.healthmarketscience.jackcess.Row tableRow = new RowImpl((RowIdImpl)_id); tableRow.put(COL_ATTRIBUTE, attribute); tableRow.put(COL_EXPRESSION, expression); tableRow.put(COL_FLAG, flag); tableRow.put(COL_EXTRA, extra); tableRow.put(COL_NAME1, name1); tableRow.put(COL_NAME2, name2); tableRow.put(COL_OBJECTID, objectId); tableRow.put(COL_ORDER, order); return tableRow; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } } protected static abstract class RowFormatter { private final List<Row> _list; protected RowFormatter(List<Row> list) { _list = list; } public List<String> format() { return format(new AppendableList<String>()); } public List<String> format(List<String> strs) { for(Row row : _list) { StringBuilder builder = new StringBuilder(); format(builder, row); strs.add(builder.toString()); } return strs; } protected abstract void format(StringBuilder builder, Row row); } protected static abstract class RowFilter { protected RowFilter() { } public List<Row> filter(List<Row> list) { for(Iterator<Row> iter = list.iterator(); iter.hasNext(); ) { if(!keep(iter.next())) { iter.remove(); } } return list; } protected abstract boolean keep(Row row); } protected static class AppendableList<E> extends ArrayList<E> { private static final long serialVersionUID = 0L; protected AppendableList() { } protected AppendableList(Collection<? extends E> c) { super(c); } protected String getSeparator() { return ", "; } @Override public String toString() { StringBuilder builder = new StringBuilder(); for(Iterator<E> iter = iterator(); iter.hasNext(); ) { builder.append(iter.next().toString()); if(iter.hasNext()) { builder.append(getSeparator()); } } return builder.toString(); } } /** * Base type of something which provides table data in a query */ private static abstract class TableSource { @Override public String toString() { StringBuilder sb = new StringBuilder(); toString(sb, true); return sb.toString(); } protected abstract void toString(StringBuilder sb, boolean isTopLevel); public abstract boolean containsTable(String table); public abstract boolean sameJoin(short type, String on); } /** * Table data provided by a single table expression. */ private static final class SimpleTable extends TableSource { private final String _tableName; private final String _tableExpr; private SimpleTable(String tableName) { this(tableName, toOptionalQuotedExpr( new StringBuilder(), tableName, true).toString()); } private SimpleTable(String tableName, String tableExpr) { _tableName = tableName; _tableExpr = tableExpr; } @Override protected void toString(StringBuilder sb, boolean isTopLevel) { sb.append(_tableExpr); } @Override public boolean containsTable(String table) { return _tableName.equalsIgnoreCase(table); } @Override public boolean sameJoin(short type, String on) { return false; } } /** * Table data provided by a join expression. */ private final class Join extends TableSource { private final TableSource _from; private final TableSource _to; private final short _jType; // combine all the join expressions with "AND" private final List<String> _on = new AppendableList<String>() { private static final long serialVersionUID = 0L; @Override protected String getSeparator() { return ") AND ("; } }; private Join(TableSource from, TableSource to, short type, String on) { _from = from; _to = to; _jType = type; _on.add(on); } @Override protected void toString(StringBuilder sb, boolean isTopLevel) { String joinType = JOIN_TYPE_MAP.get(_jType); if(joinType == null) { throw new IllegalStateException(withErrorContext( "Unknown join type " + _jType)); } if(!isTopLevel) { sb.append("("); } _from.toString(sb, false); sb.append(joinType); _to.toString(sb, false); sb.append(" ON "); boolean multiOnExpr = (_on.size() > 1); if(multiOnExpr) { sb.append("("); } sb.append(_on); if(multiOnExpr) { sb.append(")"); } if(!isTopLevel) { sb.append(")"); } } @Override public boolean containsTable(String table) { return _from.containsTable(table) || _to.containsTable(table); } @Override public boolean sameJoin(short type, String on) { if(_jType == type) { // note, AND conditions are added in _reverse_ order _on.add(0, on); return true; } return false; } } }
/* JASON-Project-AI * Copyright (C) 2005, Charles Paulson, Brady Catherman, Jim Lowe * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package tictactoe; import game.*; import java.io.*; /** * Handles all the information related to a game of Tic Tac Toe. * * @author Brady Catherman * @version 1.0 */ public class TicTacToeGameState implements GameState, Serializable { //private static TreeElement root = null; //private TreeElement node = null; private boolean endGame; private int currentPlayer; private int emptySquares; private int winMask; private int board; /** * Creates a GameState for a TicTacToe game, and sets the starting * player to player 0. */ public TicTacToeGameState() { currentPlayer = 0; clear(); } /** * Returns true if this game has ended. * * @return True if the game has ended. */ public boolean isGameOver() { return endGame; } /** * Performs the given move on this field. This will perform checking and * alter the state of the existing game. If the move isn't valid then * the state of the game may not be valid. * * @param m the move that is being performed. * @return Nothing. */ public void makeMove(Move m) { TicTacToeMove t = (TicTacToeMove) m; int x = t.getXpos(); int y = t.getYpos(); int boardOffset = 3 * x + y; int bitOffset = (boardOffset << 1) + currentPlayer; emptySquares--; board |= 1 << bitOffset; generateWinMask(); currentPlayer = 1 - currentPlayer; endGame = (0 == emptySquares) || (winMask != 0); } /** * Sets the winMask to reflect which board positions are part of the * win. */ private void generateWinMask() { final int DIAGONAL1 = 65793; final int DIAGONAL2 = 4368; final int VERTICAL = 4161; final int HORIZONTAL = 21; int tempboard = ~board >> currentPlayer; if (0 == (tempboard & DIAGONAL1)) winMask |= DIAGONAL1; if (0 == (tempboard & DIAGONAL2)) winMask |= DIAGONAL2; int vertical = VERTICAL; int horizontal = HORIZONTAL; for (int i = 0; i < 3; i++) { if (0 == (tempboard & vertical)) winMask |= vertical; if (0 == (tempboard & horizontal)) winMask |= horizontal; vertical <<= 2; horizontal <<= 6; } } /** * Evaluates the given move and returns a new GameState object that * reflects the changes. This is used for AI evaluation. The existing * GameState is uneffected by this call. If the move is not valid * then the state of the game may not be valid. * * @param m the move the be performed. * @return The new GameState */ public TicTacToeGameState evaluateMove(Move m) { TicTacToeGameState nextState = easyClone(); nextState.makeMove(m); return nextState; } /** * Creates a clone without having to worry about cloning exceptions. * * @return A new TicTacToeGameState which is identical to the current * state. */ public TicTacToeGameState easyClone() { TicTacToeGameState iThinkImACloneNow = new TicTacToeGameState(); iThinkImACloneNow.endGame = endGame; iThinkImACloneNow.currentPlayer = currentPlayer; iThinkImACloneNow.emptySquares = emptySquares; iThinkImACloneNow.winMask = winMask; iThinkImACloneNow.board = board; return iThinkImACloneNow; } /** * This returns the number of the next player. It is assumed that the * return value is correct and not out of bounds or negative. If an * invalid number is returned then the stability of the Game object * is not insured. * * @return The number of the next player. */ public int nextPlayer() { return currentPlayer; } /** * Evaluates a field and returns a numeric score. This score will be * compared against other GameState objects of this same type. Higher * scores are to be considered better, lower are worse. * * @return a numeric score that ranks this game. */ public int gameRank(int player) { if (0 == winMask) return 0; // No Winner. int value = 1 + emptySquares; return (player==currentPlayer) ? -value : value; } /** * Returns a list of all of the valid moves for this field. This is used * by the AI function to loop through and search for the best move that * can be made. * * @return An array of Move objects. */ public Move[] validMoves() { Move[] moves = new Move[emptySquares]; int mask = 3; int pos = 0; for (int i = 0; i < emptySquares; i++, pos++, mask<<=2) { while ((board & mask) != 0) { mask <<= 2; pos++; } moves[i] = new TicTacToeMove(pos / 3, pos % 3); } return moves; } /** * Returns true if and only if the given move is valid. * * @param m The move to test for validity. * @return True if m is a valid move. False otherwise. */ public boolean isValidMove(TicTacToeMove m) { int x = m.getXpos(); int y = m.getYpos(); int bitOffset = (3 * x + y) << 1; return (0 == (board & (3 << bitOffset))); } /** * Clears the game state and returns it back to the starting condition. * * @return Nothing. */ public void clear() { emptySquares = 9; board = 0; winMask = 0; currentPlayer = 0; endGame = false; } /** * Returns the Field that is currently in use. The player number (1/2) * represents an X or an O, 0 represents an empty field. * * @return an Array representing this Field. */ public int[][] getField() { int[][] field = new int[3][3]; int tempboard = board; for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) { field[i][j] = tempboard & 3; tempboard >>= 2; } return field; } /** * Returns a boolean array of the winning positions on the field. This * is used by the display to show where the winning pieces are. * * @return a boolean array representing for each board position, * whether it should be highlighted when displaying the winning * positions. */ public boolean[][] getWinField() { boolean[][] wins = new boolean[3][3]; int mask = 1; for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++, mask <<= 2) wins[i][j] = (winMask & mask) != 0; return wins; } }
package com.tlf.msg.platform.domain; import java.util.ArrayList; import java.util.Date; import java.util.List; public class RouteExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; protected Integer begin; protected Integer end; protected Integer start; protected Integer rows; public RouteExample() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } public void setBegin(Integer begin) { this.begin = begin; } public Integer getBegin() { return begin; } public void setEnd(Integer end) { this.end = end; } public Integer getEnd() { return end; } public void setStart(Integer start) { this.start = start; } public Integer getStart() { return start; } public void setRows(Integer rows) { this.rows = rows; } public Integer getRows() { return rows; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andEntityOidIsNull() { addCriterion("ENTITY_OID is null"); return (Criteria) this; } public Criteria andEntityOidIsNotNull() { addCriterion("ENTITY_OID is not null"); return (Criteria) this; } public Criteria andEntityOidEqualTo(Long value) { addCriterion("ENTITY_OID =", value, "entityOid"); return (Criteria) this; } public Criteria andEntityOidNotEqualTo(Long value) { addCriterion("ENTITY_OID <>", value, "entityOid"); return (Criteria) this; } public Criteria andEntityOidGreaterThan(Long value) { addCriterion("ENTITY_OID >", value, "entityOid"); return (Criteria) this; } public Criteria andEntityOidGreaterThanOrEqualTo(Long value) { addCriterion("ENTITY_OID >=", value, "entityOid"); return (Criteria) this; } public Criteria andEntityOidLessThan(Long value) { addCriterion("ENTITY_OID <", value, "entityOid"); return (Criteria) this; } public Criteria andEntityOidLessThanOrEqualTo(Long value) { addCriterion("ENTITY_OID <=", value, "entityOid"); return (Criteria) this; } public Criteria andEntityOidIn(List<Long> values) { addCriterion("ENTITY_OID in", values, "entityOid"); return (Criteria) this; } public Criteria andEntityOidNotIn(List<Long> values) { addCriterion("ENTITY_OID not in", values, "entityOid"); return (Criteria) this; } public Criteria andEntityOidBetween(Long value1, Long value2) { addCriterion("ENTITY_OID between", value1, value2, "entityOid"); return (Criteria) this; } public Criteria andEntityOidNotBetween(Long value1, Long value2) { addCriterion("ENTITY_OID not between", value1, value2, "entityOid"); return (Criteria) this; } public Criteria andAppOidIsNull() { addCriterion("APP_OID is null"); return (Criteria) this; } public Criteria andAppOidIsNotNull() { addCriterion("APP_OID is not null"); return (Criteria) this; } public Criteria andAppOidEqualTo(Long value) { addCriterion("APP_OID =", value, "appOid"); return (Criteria) this; } public Criteria andAppOidNotEqualTo(Long value) { addCriterion("APP_OID <>", value, "appOid"); return (Criteria) this; } public Criteria andAppOidGreaterThan(Long value) { addCriterion("APP_OID >", value, "appOid"); return (Criteria) this; } public Criteria andAppOidGreaterThanOrEqualTo(Long value) { addCriterion("APP_OID >=", value, "appOid"); return (Criteria) this; } public Criteria andAppOidLessThan(Long value) { addCriterion("APP_OID <", value, "appOid"); return (Criteria) this; } public Criteria andAppOidLessThanOrEqualTo(Long value) { addCriterion("APP_OID <=", value, "appOid"); return (Criteria) this; } public Criteria andAppOidIn(List<Long> values) { addCriterion("APP_OID in", values, "appOid"); return (Criteria) this; } public Criteria andAppOidNotIn(List<Long> values) { addCriterion("APP_OID not in", values, "appOid"); return (Criteria) this; } public Criteria andAppOidBetween(Long value1, Long value2) { addCriterion("APP_OID between", value1, value2, "appOid"); return (Criteria) this; } public Criteria andAppOidNotBetween(Long value1, Long value2) { addCriterion("APP_OID not between", value1, value2, "appOid"); return (Criteria) this; } public Criteria andMasterChannelOidIsNull() { addCriterion("MASTER_CHANNEL_OID is null"); return (Criteria) this; } public Criteria andMasterChannelOidIsNotNull() { addCriterion("MASTER_CHANNEL_OID is not null"); return (Criteria) this; } public Criteria andMasterChannelOidEqualTo(Long value) { addCriterion("MASTER_CHANNEL_OID =", value, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidNotEqualTo(Long value) { addCriterion("MASTER_CHANNEL_OID <>", value, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidGreaterThan(Long value) { addCriterion("MASTER_CHANNEL_OID >", value, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidGreaterThanOrEqualTo(Long value) { addCriterion("MASTER_CHANNEL_OID >=", value, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidLessThan(Long value) { addCriterion("MASTER_CHANNEL_OID <", value, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidLessThanOrEqualTo(Long value) { addCriterion("MASTER_CHANNEL_OID <=", value, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidIn(List<Long> values) { addCriterion("MASTER_CHANNEL_OID in", values, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidNotIn(List<Long> values) { addCriterion("MASTER_CHANNEL_OID not in", values, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidBetween(Long value1, Long value2) { addCriterion("MASTER_CHANNEL_OID between", value1, value2, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelOidNotBetween(Long value1, Long value2) { addCriterion("MASTER_CHANNEL_OID not between", value1, value2, "masterChannelOid"); return (Criteria) this; } public Criteria andMasterChannelParamIsNull() { addCriterion("MASTER_CHANNEL_PARAM is null"); return (Criteria) this; } public Criteria andMasterChannelParamIsNotNull() { addCriterion("MASTER_CHANNEL_PARAM is not null"); return (Criteria) this; } public Criteria andMasterChannelParamEqualTo(String value) { addCriterion("MASTER_CHANNEL_PARAM =", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamNotEqualTo(String value) { addCriterion("MASTER_CHANNEL_PARAM <>", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamGreaterThan(String value) { addCriterion("MASTER_CHANNEL_PARAM >", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamGreaterThanOrEqualTo(String value) { addCriterion("MASTER_CHANNEL_PARAM >=", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamLessThan(String value) { addCriterion("MASTER_CHANNEL_PARAM <", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamLessThanOrEqualTo(String value) { addCriterion("MASTER_CHANNEL_PARAM <=", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamLike(String value) { addCriterion("MASTER_CHANNEL_PARAM like", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamNotLike(String value) { addCriterion("MASTER_CHANNEL_PARAM not like", value, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamIn(List<String> values) { addCriterion("MASTER_CHANNEL_PARAM in", values, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamNotIn(List<String> values) { addCriterion("MASTER_CHANNEL_PARAM not in", values, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamBetween(String value1, String value2) { addCriterion("MASTER_CHANNEL_PARAM between", value1, value2, "masterChannelParam"); return (Criteria) this; } public Criteria andMasterChannelParamNotBetween(String value1, String value2) { addCriterion("MASTER_CHANNEL_PARAM not between", value1, value2, "masterChannelParam"); return (Criteria) this; } public Criteria andBackupChannelOidIsNull() { addCriterion("BACKUP_CHANNEL_OID is null"); return (Criteria) this; } public Criteria andBackupChannelOidIsNotNull() { addCriterion("BACKUP_CHANNEL_OID is not null"); return (Criteria) this; } public Criteria andBackupChannelOidEqualTo(Long value) { addCriterion("BACKUP_CHANNEL_OID =", value, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidNotEqualTo(Long value) { addCriterion("BACKUP_CHANNEL_OID <>", value, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidGreaterThan(Long value) { addCriterion("BACKUP_CHANNEL_OID >", value, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidGreaterThanOrEqualTo(Long value) { addCriterion("BACKUP_CHANNEL_OID >=", value, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidLessThan(Long value) { addCriterion("BACKUP_CHANNEL_OID <", value, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidLessThanOrEqualTo(Long value) { addCriterion("BACKUP_CHANNEL_OID <=", value, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidIn(List<Long> values) { addCriterion("BACKUP_CHANNEL_OID in", values, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidNotIn(List<Long> values) { addCriterion("BACKUP_CHANNEL_OID not in", values, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidBetween(Long value1, Long value2) { addCriterion("BACKUP_CHANNEL_OID between", value1, value2, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelOidNotBetween(Long value1, Long value2) { addCriterion("BACKUP_CHANNEL_OID not between", value1, value2, "backupChannelOid"); return (Criteria) this; } public Criteria andBackupChannelParamIsNull() { addCriterion("BACKUP_CHANNEL_PARAM is null"); return (Criteria) this; } public Criteria andBackupChannelParamIsNotNull() { addCriterion("BACKUP_CHANNEL_PARAM is not null"); return (Criteria) this; } public Criteria andBackupChannelParamEqualTo(String value) { addCriterion("BACKUP_CHANNEL_PARAM =", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamNotEqualTo(String value) { addCriterion("BACKUP_CHANNEL_PARAM <>", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamGreaterThan(String value) { addCriterion("BACKUP_CHANNEL_PARAM >", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamGreaterThanOrEqualTo(String value) { addCriterion("BACKUP_CHANNEL_PARAM >=", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamLessThan(String value) { addCriterion("BACKUP_CHANNEL_PARAM <", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamLessThanOrEqualTo(String value) { addCriterion("BACKUP_CHANNEL_PARAM <=", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamLike(String value) { addCriterion("BACKUP_CHANNEL_PARAM like", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamNotLike(String value) { addCriterion("BACKUP_CHANNEL_PARAM not like", value, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamIn(List<String> values) { addCriterion("BACKUP_CHANNEL_PARAM in", values, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamNotIn(List<String> values) { addCriterion("BACKUP_CHANNEL_PARAM not in", values, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamBetween(String value1, String value2) { addCriterion("BACKUP_CHANNEL_PARAM between", value1, value2, "backupChannelParam"); return (Criteria) this; } public Criteria andBackupChannelParamNotBetween(String value1, String value2) { addCriterion("BACKUP_CHANNEL_PARAM not between", value1, value2, "backupChannelParam"); return (Criteria) this; } public Criteria andStatusIsNull() { addCriterion("STATUS is null"); return (Criteria) this; } public Criteria andStatusIsNotNull() { addCriterion("STATUS is not null"); return (Criteria) this; } public Criteria andStatusEqualTo(String value) { addCriterion("STATUS =", value, "status"); return (Criteria) this; } public Criteria andStatusNotEqualTo(String value) { addCriterion("STATUS <>", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThan(String value) { addCriterion("STATUS >", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThanOrEqualTo(String value) { addCriterion("STATUS >=", value, "status"); return (Criteria) this; } public Criteria andStatusLessThan(String value) { addCriterion("STATUS <", value, "status"); return (Criteria) this; } public Criteria andStatusLessThanOrEqualTo(String value) { addCriterion("STATUS <=", value, "status"); return (Criteria) this; } public Criteria andStatusLike(String value) { addCriterion("STATUS like", value, "status"); return (Criteria) this; } public Criteria andStatusNotLike(String value) { addCriterion("STATUS not like", value, "status"); return (Criteria) this; } public Criteria andStatusIn(List<String> values) { addCriterion("STATUS in", values, "status"); return (Criteria) this; } public Criteria andStatusNotIn(List<String> values) { addCriterion("STATUS not in", values, "status"); return (Criteria) this; } public Criteria andStatusBetween(String value1, String value2) { addCriterion("STATUS between", value1, value2, "status"); return (Criteria) this; } public Criteria andStatusNotBetween(String value1, String value2) { addCriterion("STATUS not between", value1, value2, "status"); return (Criteria) this; } public Criteria andCreatedByIsNull() { addCriterion("CREATED_BY is null"); return (Criteria) this; } public Criteria andCreatedByIsNotNull() { addCriterion("CREATED_BY is not null"); return (Criteria) this; } public Criteria andCreatedByEqualTo(String value) { addCriterion("CREATED_BY =", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByNotEqualTo(String value) { addCriterion("CREATED_BY <>", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByGreaterThan(String value) { addCriterion("CREATED_BY >", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByGreaterThanOrEqualTo(String value) { addCriterion("CREATED_BY >=", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByLessThan(String value) { addCriterion("CREATED_BY <", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByLessThanOrEqualTo(String value) { addCriterion("CREATED_BY <=", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByLike(String value) { addCriterion("CREATED_BY like", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByNotLike(String value) { addCriterion("CREATED_BY not like", value, "createdBy"); return (Criteria) this; } public Criteria andCreatedByIn(List<String> values) { addCriterion("CREATED_BY in", values, "createdBy"); return (Criteria) this; } public Criteria andCreatedByNotIn(List<String> values) { addCriterion("CREATED_BY not in", values, "createdBy"); return (Criteria) this; } public Criteria andCreatedByBetween(String value1, String value2) { addCriterion("CREATED_BY between", value1, value2, "createdBy"); return (Criteria) this; } public Criteria andCreatedByNotBetween(String value1, String value2) { addCriterion("CREATED_BY not between", value1, value2, "createdBy"); return (Criteria) this; } public Criteria andCreatedDateIsNull() { addCriterion("CREATED_DATE is null"); return (Criteria) this; } public Criteria andCreatedDateIsNotNull() { addCriterion("CREATED_DATE is not null"); return (Criteria) this; } public Criteria andCreatedDateEqualTo(Date value) { addCriterion("CREATED_DATE =", value, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateNotEqualTo(Date value) { addCriterion("CREATED_DATE <>", value, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateGreaterThan(Date value) { addCriterion("CREATED_DATE >", value, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateGreaterThanOrEqualTo(Date value) { addCriterion("CREATED_DATE >=", value, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateLessThan(Date value) { addCriterion("CREATED_DATE <", value, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateLessThanOrEqualTo(Date value) { addCriterion("CREATED_DATE <=", value, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateIn(List<Date> values) { addCriterion("CREATED_DATE in", values, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateNotIn(List<Date> values) { addCriterion("CREATED_DATE not in", values, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateBetween(Date value1, Date value2) { addCriterion("CREATED_DATE between", value1, value2, "createdDate"); return (Criteria) this; } public Criteria andCreatedDateNotBetween(Date value1, Date value2) { addCriterion("CREATED_DATE not between", value1, value2, "createdDate"); return (Criteria) this; } public Criteria andLastUpdByIsNull() { addCriterion("LAST_UPD_BY is null"); return (Criteria) this; } public Criteria andLastUpdByIsNotNull() { addCriterion("LAST_UPD_BY is not null"); return (Criteria) this; } public Criteria andLastUpdByEqualTo(String value) { addCriterion("LAST_UPD_BY =", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByNotEqualTo(String value) { addCriterion("LAST_UPD_BY <>", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByGreaterThan(String value) { addCriterion("LAST_UPD_BY >", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByGreaterThanOrEqualTo(String value) { addCriterion("LAST_UPD_BY >=", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByLessThan(String value) { addCriterion("LAST_UPD_BY <", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByLessThanOrEqualTo(String value) { addCriterion("LAST_UPD_BY <=", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByLike(String value) { addCriterion("LAST_UPD_BY like", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByNotLike(String value) { addCriterion("LAST_UPD_BY not like", value, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByIn(List<String> values) { addCriterion("LAST_UPD_BY in", values, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByNotIn(List<String> values) { addCriterion("LAST_UPD_BY not in", values, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByBetween(String value1, String value2) { addCriterion("LAST_UPD_BY between", value1, value2, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdByNotBetween(String value1, String value2) { addCriterion("LAST_UPD_BY not between", value1, value2, "lastUpdBy"); return (Criteria) this; } public Criteria andLastUpdDateIsNull() { addCriterion("LAST_UPD_DATE is null"); return (Criteria) this; } public Criteria andLastUpdDateIsNotNull() { addCriterion("LAST_UPD_DATE is not null"); return (Criteria) this; } public Criteria andLastUpdDateEqualTo(Date value) { addCriterion("LAST_UPD_DATE =", value, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateNotEqualTo(Date value) { addCriterion("LAST_UPD_DATE <>", value, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateGreaterThan(Date value) { addCriterion("LAST_UPD_DATE >", value, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateGreaterThanOrEqualTo(Date value) { addCriterion("LAST_UPD_DATE >=", value, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateLessThan(Date value) { addCriterion("LAST_UPD_DATE <", value, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateLessThanOrEqualTo(Date value) { addCriterion("LAST_UPD_DATE <=", value, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateIn(List<Date> values) { addCriterion("LAST_UPD_DATE in", values, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateNotIn(List<Date> values) { addCriterion("LAST_UPD_DATE not in", values, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateBetween(Date value1, Date value2) { addCriterion("LAST_UPD_DATE between", value1, value2, "lastUpdDate"); return (Criteria) this; } public Criteria andLastUpdDateNotBetween(Date value1, Date value2) { addCriterion("LAST_UPD_DATE not between", value1, value2, "lastUpdDate"); return (Criteria) this; } public Criteria andIsNull(String filed) { addCriterion(filed + " is null"); return (Criteria) this; } public Criteria andIsNotNull(String filed) { addCriterion(filed + " is not null"); return (Criteria) this; } public Criteria andEqualTo(String filed, String value) { addCriterion(filed + " =", value, filed); return (Criteria) this; } public Criteria andNotEqualTo(String filed, String value) { addCriterion(filed + " <>", value, filed); return (Criteria) this; } public Criteria andGreaterThan(String filed, String value) { addCriterion(filed + " > ", value, filed); return (Criteria) this; } public Criteria andGreaterThanOrEqualTo(String filed, String value) { addCriterion(filed + " >=", value, filed); return (Criteria) this; } public Criteria andLessThan(String filed, String value) { addCriterion(filed + " <", value, filed); return (Criteria) this; } public Criteria andLessThanOrEqualTo(String filed, String value) { addCriterion(filed + " <=", value, filed); return (Criteria) this; } public Criteria andIn(String filed, String value) { addCriterion(filed + " in", value, filed); return (Criteria) this; } public Criteria andNotIn(String filed, String value) { addCriterion(filed + " not in", value, filed); return (Criteria) this; } public Criteria andBetween(String filed, String value1, String value2) { addCriterion(filed + " between", value1, value2, filed); return (Criteria) this; } public Criteria andNotBetween(String filed, String value1, String value2) { addCriterion(filed + " not between", value1, value2, filed); return (Criteria) this; } public Criteria andLike(String filed, String value) { addCriterion(filed + " like", value, filed); return (Criteria) this; } public Criteria andNotLike(String filed, String value) { addCriterion(filed + " not like", value, filed); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.core.test; import com.hazelcast.cluster.Address; import com.hazelcast.config.NetworkConfig; import com.hazelcast.function.SupplierEx; import com.hazelcast.instance.BuildInfoProvider; import com.hazelcast.internal.serialization.SerializationService; import com.hazelcast.internal.serialization.SerializationServiceAware; import com.hazelcast.internal.serialization.impl.DefaultSerializationServiceBuilder; import com.hazelcast.internal.util.concurrent.BackoffIdleStrategy; import com.hazelcast.internal.util.concurrent.IdleStrategy; import com.hazelcast.jet.JetInstance; import com.hazelcast.jet.config.EdgeConfig; import com.hazelcast.jet.core.Processor; import com.hazelcast.jet.core.Processor.Context; import com.hazelcast.jet.core.ProcessorMetaSupplier; import com.hazelcast.jet.core.ProcessorSupplier; import com.hazelcast.jet.core.Watermark; import com.hazelcast.logging.ILogger; import com.hazelcast.logging.impl.LoggingServiceImpl; import com.hazelcast.spi.impl.SerializationServiceSupport; import javax.annotation.Nonnull; import java.net.UnknownHostException; import java.time.LocalTime; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.SortedMap; import java.util.TreeMap; import java.util.function.BiConsumer; import java.util.function.BiPredicate; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import static com.hazelcast.function.FunctionEx.identity; import static com.hazelcast.internal.util.Preconditions.checkNotNegative; import static com.hazelcast.jet.core.test.JetAssert.assertEquals; import static com.hazelcast.jet.core.test.JetAssert.assertFalse; import static com.hazelcast.jet.core.test.JetAssert.assertTrue; import static com.hazelcast.jet.impl.util.ExceptionUtil.sneakyThrow; import static com.hazelcast.jet.impl.util.Util.subtractClamped; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.concurrent.TimeUnit.MICROSECONDS; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static java.util.stream.Collectors.toMap; /** * A utility to test processors. It will initialize the processor instance, * pass input items to it and assert the outbox contents. * <p> * The test process does the following: * <ul> * <li>initializes the processor by calling {@link Processor#init} * * <li>does snapshot or snapshot+restore (optional, see below) * * <li>calls {@link Processor#process}, in two scenarios:<ul> * <li>the inbox contains one input item</li> * <li>the inbox contains all input items (if snapshots are not restored)</li> * </ul> * * <li>every time the inbox gets empty does snapshot or snapshot+restore * * <li>{@linkplain #disableCompleteCall() optionally} calls {@link * Processor#complete()} until it returns {@code true} or until the * {@linkplain #runUntilOutputMatches output matches} (for streaming * sources) * * <li>does snapshot or snapshot+restore each time the {@code complete()} * method returned {@code false} and made a progress * </ul> * * The {@code init()} and {@code complete()} methods of {@link * ProcessorSupplier} and {@link ProcessorMetaSupplier} are called if you call * the {@link #verifyProcessor} using one of these. * * <h3>Snapshot &amp; restore</h3> * The {@link #disableSnapshots() optional} snapshot+restore test procedure: * <ul> * <li>{@code saveToSnapshot()} is called. If we are not doing restore, this * is the last step. * * <li>new processor instance is created, from now on only this * instance will be used * * <li>snapshot is restored using {@code restoreFromSnapshot()} * * <li>{@code finishSnapshotRestore()} is called * </ul> * * <h3>Watermark handling</h3> * The input can contain {@link Watermark}s. They will be passed to the * {@link Processor#tryProcessWatermark} method. * * <h3>Progress assertion</h3> * For each call to any processing method the progress is asserted ({@link * #disableProgressAssertion() optional}). The processor must do at least one * of these: * <ul> * <li>take something from inbox * <li>put something to outbox * <li>for boolean-returning methods, returning {@code true} is * considered as making progress * </ul> * * <h3>Outbox rejection</h3> * A 1-capacity outbox will be provided, which will additionally be full in * every other call to {@code process()}. This will test the edge case: the * {@code process()} method is called even when the outbox is full to give * the processor a chance to process the inbox. The snapshot bucket will * also have capacity of 1. * * <h3>Cooperative processors</h3> * For cooperative processors, time spent in each call to processing method * must not exceed {@link #cooperativeTimeout(long)}. * * <h3>Non-covered cases</h3> * This class does not cover these cases: * <ul> * <li>Checking that the state of a stateful processor is empty at the * end (you can do that yourself afterwards with the last instance * returned from your supplier). * <li>This utility never calls {@link Processor#tryProcess()}. * </ul> * <p/> * * <h3>Example usage</h3> * This will test one of the jet-provided processors: * <pre>{@code * TestSupport.verifyProcessor(Processors.map((String s) -> s.toUpperCase())) * .disableCompleteCall() // enabled by default * .disableLogging() // enabled by default * .disableProgressAssertion() // enabled by default * .disableSnapshots() // enabled by default * .cooperativeTimeout(<timeoutInMs>) // default is 1000 * .outputChecker(<function>) // default is `Objects::equal` * .input(asList("foo", "bar")) // default is `emptyList()` * .expectOutput(asList("FOO", "BAR")); * }</pre> * * @since 3.0 */ public final class TestSupport { /** * An output checker that will claim actual and expected object lists as * equal if they both contain the same items, in any order. If some item is * expected multiple times, it must also be present the same number of * times in the actual output. * <p> * Use as an argument for {@link #outputChecker(BiPredicate)}. */ public static final BiPredicate<List<?>, List<?>> SAME_ITEMS_ANY_ORDER = (expected, actual) -> { if (expected.size() != actual.size()) { // shortcut return false; } Map<Object, Integer> expectedMap = expected.stream().collect(toMap(identity(), e -> 1, Integer::sum)); Map<Object, Integer> actualMap = actual.stream().collect(toMap(identity(), e -> 1, Integer::sum)); return expectedMap.equals(actualMap); }; private static final Address LOCAL_ADDRESS; // 1ms should be enough for a cooperative call. We warn, when it's more than 5ms and // fail when it's more than 100ms, possibly due to other activity in the system, such as // tests running in parallel or a GC. private static final long COOPERATIVE_TIME_LIMIT_MS_FAIL = 5_000; private static final long COOPERATIVE_TIME_LIMIT_MS_WARN = 5; private static final long BLOCKING_TIME_LIMIT_MS_WARN = 10000; private static final LoggingServiceImpl LOGGING_SERVICE = new LoggingServiceImpl( "test-group", null, BuildInfoProvider.getBuildInfo(), true ); static { try { LOCAL_ADDRESS = new Address("localhost", NetworkConfig.DEFAULT_PORT); } catch (UnknownHostException e) { throw new RuntimeException(e); } } private ProcessorMetaSupplier metaSupplier; private ProcessorSupplier supplier; private List<List<?>> inputs = emptyList(); private int[] priorities = {}; private boolean assertProgress = true; private boolean doSnapshots = true; private boolean logInputOutput = true; private boolean callComplete = true; private int outputOrdinalCount; private Runnable beforeEachRun = () -> { }; private int localProcessorIndex; private int globalProcessorIndex; private int localParallelism = 1; private int totalParallelism = 1; private JetInstance jetInstance; private long cooperativeTimeout = COOPERATIVE_TIME_LIMIT_MS_FAIL; private long runUntilOutputMatchesTimeoutMillis = -1; private long runUntilOutputMatchesExtraTimeMillis; private BiConsumer<TestMode, List<List<Object>>> assertOutputFn; private BiPredicate<? super List<?>, ? super List<?>> outputChecker = Objects::equals; private TestSupport(@Nonnull ProcessorMetaSupplier metaSupplier) { this.metaSupplier = metaSupplier; } /** * @param supplier a processor supplier create processor instances */ public static TestSupport verifyProcessor(@Nonnull SupplierEx<Processor> supplier) { return new TestSupport(ProcessorMetaSupplier.of(supplier)); } /** * @param supplier a processor supplier create processor instances */ public static TestSupport verifyProcessor(@Nonnull ProcessorSupplier supplier) { return new TestSupport(ProcessorMetaSupplier.of(supplier)); } /** * @param supplier a processor supplier create processor instances */ public static TestSupport verifyProcessor(@Nonnull ProcessorMetaSupplier supplier) { return new TestSupport(supplier); } /** * Sets the input objects for processor. * <p> * The {@code input} can contain {@link Watermark}s; * they will be delivered to the {@link Processor#tryProcessWatermark} * method. * <p> * Defaults to empty list. * * @return {@code this} instance for fluent API */ public TestSupport input(@Nonnull List<?> input) { this.inputs = singletonList(input); this.priorities = new int[]{0}; return this; } /** * Sets the input objects for the processor on multiple input ordinals. * Items will be passed to the processor in round-robin fashion: * item0 from input0, item0 from input1, item1 from input0 etc. * <p> * See also:<ul> * <li>{@link #input(List)} - if you have just one input ordinal * <li>{@link #inputs(List, int[])} - if you want to specify input * priorities * </ul> * * @param inputs one list of input items for each input edge * @return {@code this} instance for fluent API */ public TestSupport inputs(@Nonnull List<List<?>> inputs) { return inputs(inputs, new int[inputs.size()]); } /** * Sets the input objects for the processor on multiple input ordinals. * Items will be passed to the processor according to priority: lower * is higher. If two inputs have the same priority, they will be passed in * round-robin fashion. * <p> * See also:<ul> * <li>{@link #input(List)} - if you have just one input ordinal * <li>{@link #inputs(List)} - if all inputs are of equal priority * </ul> * * @param inputs one list of input items for each input edge * @return {@code this} instance for fluent API */ public TestSupport inputs(@Nonnull List<List<?>> inputs, int[] priorities) { if (inputs.size() != priorities.length) { throw new IllegalArgumentException("Number of inputs must be equal to number of priorities"); } this.inputs = inputs; this.priorities = priorities; return this; } /** * Sets the expected output and runs the test. * <p> * The {@code expectedOutput} can contain {@link * com.hazelcast.jet.core.Watermark}s. Each Watermark in the input will be * found in the output, as well as other watermarks the processor emits. * * @throws AssertionError If some assertion does not hold. */ public void expectOutput(@Nonnull List<?> expectedOutput) { expectOutputs(singletonList(expectedOutput)); } /** * Specifies the expected outputs and runs the test. * <p> * The {@code expectedOutput} can contain {@link Watermark}s to assert the * watermarks emitted by the processor. * * @param expectedOutputs one list for each output ordinal * @throws AssertionError if some assertion does not hold */ public void expectOutputs(@Nonnull List<List<?>> expectedOutputs) { assertOutput( expectedOutputs.size(), (mode, actual) -> assertExpectedOutput(mode, expectedOutputs, actual) ); } /** * Runs the test with the specified custom assertion. * <p> * The consumer takes a list of collected output and the current test mode which * can be used in the assertion message. * * @param outputOrdinalCount how many output ordinals should be created * @param assertFn an assertion function which takes the current mode and the collected output */ public void assertOutput(int outputOrdinalCount, BiConsumer<TestMode, List<List<Object>>> assertFn) { assertOutputFn = assertFn; this.outputOrdinalCount = outputOrdinalCount; try { TestProcessorMetaSupplierContext metaSupplierContext = new TestProcessorMetaSupplierContext(); if (jetInstance != null) { metaSupplierContext.setJetInstance(jetInstance); } metaSupplier.init(metaSupplierContext); Address address = jetInstance != null ? jetInstance.getHazelcastInstance().getCluster().getLocalMember().getAddress() : LOCAL_ADDRESS; supplier = metaSupplier.get(singletonList(address)).apply(address); TestProcessorSupplierContext supplierContext = new TestProcessorSupplierContext(); if (jetInstance != null) { supplierContext.setJetInstance(jetInstance); } supplier.init(supplierContext); runTest(new TestMode(false, 0, 1)); if (inputs.stream().mapToInt(List::size).sum() > 0) { // only run this version if there is an input runTest(new TestMode(false, 0, EdgeConfig.DEFAULT_QUEUE_SIZE)); } if (doSnapshots) { runTest(new TestMode(true, 1, 1)); runTest(new TestMode(true, 2, 1)); runTest(new TestMode(true, Integer.MAX_VALUE, 1)); } supplier.close(null); } catch (Exception e) { throw sneakyThrow(e); } } /** * Disables checking of progress of processing methods (see {@link * TestSupport class javadoc} for information on what is "progress"). * * @return {@code this} instance for fluent API */ public TestSupport disableProgressAssertion() { this.assertProgress = false; return this; } /** * Normally, the {@code complete()} method is run repeatedly until it * returns {@code true}. This is suitable for processors processing the * input or for batch sources. However, if you test a streaming source, the * {@code complete()} method never returns {@code true}. To be able to test * such processors, this method allows you to change the behavior to run * {@code complete()} until the output matches. * <p> * The {@code extraTimeMillis} parameter specifies an extra time to call * {@code complete()} after the output matches. It can be used to ensure * that no more items are produced after the output matches. * <p> * Has no effect if calling {@code complete()} is {@linkplain * #disableCompleteCall() disabled}. * * @param timeoutMillis maximum time to wait for the output to match * @param extraTimeMillis for how long to call {@code complete()} * after the output matches * @return {@code this} instance for fluent API */ public TestSupport runUntilOutputMatches(long timeoutMillis, long extraTimeMillis) { checkNotNegative(timeoutMillis, "timeoutMillis must be >= 0"); checkNotNegative(extraTimeMillis, "extraTimeMillis must be >= 0"); this.runUntilOutputMatchesTimeoutMillis = timeoutMillis; this.runUntilOutputMatchesExtraTimeMillis = extraTimeMillis; return this; } /** * Disable snapshot save and restore before first item and after each * {@code process()} and {@code complete()} call. * * @return {@code this} instance for fluent API */ public TestSupport disableSnapshots() { this.doSnapshots = false; return this; } /** * Disables logging of input and output objects. Normally they are logged * as they are processed to standard output. * * @return {@code this} instance for fluent API */ public TestSupport disableLogging() { this.logInputOutput = false; return this; } /** * Disables calling {@code complete()} method during the test. Suitable for * testing of streaming processors to make sure that the flushing code in * {@code complete()} method is not executed. * * @return {@code this} instance for fluent API */ public TestSupport disableCompleteCall() { this.callComplete = false; return this; } /** * If {@code timeout > 0}, the test will fail if any call to processing * method in a cooperative processor exceeds this timeout. Has no effect * for non-cooperative processors. * <p> * Default value is {@link #COOPERATIVE_TIME_LIMIT_MS_FAIL} ms. Useful to * set to 0 during debugging. * * @return {@code this} instance for fluent API */ public TestSupport cooperativeTimeout(long timeout) { this.cooperativeTimeout = timeout; return this; } /** * Sets the localProcessorIndex for the Processor * * @param localProcessorIndex localProcessorIndex, defaults to 0 */ public TestSupport localProcessorIndex(int localProcessorIndex) { this.localProcessorIndex = localProcessorIndex; return this; } /** * Sets the globalProcessorIndex for the Processor * * @param globalProcessorIndex globalProcessorIndex, default to 0 */ public TestSupport globalProcessorIndex(int globalProcessorIndex) { this.globalProcessorIndex = globalProcessorIndex; return this; } /** * Sets the localParallelism for the Processor * * @param localParallelism localParallelism, defaults to 1 */ public TestSupport localParallelism(int localParallelism) { this.localParallelism = localParallelism; return this; } /** * Sets the totalParallelism for the Processor * * @param totalParallelism totalParallelism, defaults to 1 */ public TestSupport totalParallelism(int totalParallelism) { this.totalParallelism = totalParallelism; return this; } /** * Predicate to compare expected and actual output. Parameters to the * {@code BiPredicate} are the list of expected items and the list of actual * processor output. * <p> * Defaults to {@code Objects::equals}, which will pass, if both lists * contain equal objects in the same order. If the ordering doesn't matter, * you can use {@link #SAME_ITEMS_ANY_ORDER}. * * @return {@code this} instance for fluent API */ public TestSupport outputChecker(@Nonnull BiPredicate<? super List<?>, ? super List<?>> outputChecker) { this.outputChecker = outputChecker; return this; } /** * Use the given instance for {@link Context#jetInstance()} * * @return {@code this} instance for fluent API */ public TestSupport jetInstance(@Nonnull JetInstance jetInstance) { this.jetInstance = jetInstance; return this; } /** * Execute test before each test run * * @param runnable runnable to be executed before each test run * @return {@code this} instance for fluent API */ public TestSupport executeBeforeEachRun(Runnable runnable) { this.beforeEachRun = runnable; return this; } private void runTest(TestMode testMode) throws Exception { beforeEachRun.run(); assert testMode.isSnapshotsEnabled() || testMode.snapshotRestoreInterval() == 0 : "Illegal combination: don't do snapshots, but do restore"; boolean doSnapshots = testMode.doSnapshots; int doRestoreEvery = testMode.restoreInterval; IdleStrategy idler = new BackoffIdleStrategy(0, 0, MICROSECONDS.toNanos(1), MILLISECONDS.toNanos(1)); int idleCount = 0; System.out.println("### Running the test, mode=" + testMode.toString()); TestInbox inbox = new TestInbox(); int inboxOrdinal = -1; Processor[] processor = {newProcessorFromSupplier()}; boolean isCooperative = processor[0].isCooperative(); // we'll use 1-capacity outbox to test outbox rejection TestOutbox[] outbox = {createOutbox()}; List<List<Object>> actualOutputs = new ArrayList<>(outputOrdinalCount); for (int i = 0; i < outputOrdinalCount; i++) { actualOutputs.add(new ArrayList<>()); } // create instance of your processor and call the init() method initProcessor(processor[0], outbox[0]); int[] restoreCount = {0}; // do snapshot+restore before processing any item. This will test saveToSnapshot() in this edge case snapshotAndRestore(processor, outbox, actualOutputs, doSnapshots, doRestoreEvery, restoreCount); // call the process() method List<ObjectWithOrdinal> input = mixInputs(inputs, priorities); int inputPosition = 0; while (inputPosition < input.size() || !inbox.isEmpty()) { if (inbox.isEmpty() && inputPosition < input.size()) { inboxOrdinal = input.get(inputPosition).ordinal; for (int added = 0; inputPosition < input.size() && added < testMode.inboxLimit() && inboxOrdinal == input.get(inputPosition).ordinal && (added == 0 || !(input.get(inputPosition).item instanceof Watermark)); added++ ) { ObjectWithOrdinal objectWithOrdinal = input.get(inputPosition++); inbox.queue().add(objectWithOrdinal.item); inboxOrdinal = objectWithOrdinal.ordinal; } if (logInputOutput) { System.out.println(LocalTime.now() + " Input-" + inboxOrdinal + ": " + inbox); } } int lastInboxSize = inbox.size(); String methodName; methodName = processInbox(inbox, inboxOrdinal, isCooperative, processor); boolean madeProgress = inbox.size() < lastInboxSize || (outbox[0].bucketCount() > 0 && !outbox[0].queue(0).isEmpty()); assertTrue(methodName + "() call without progress", !assertProgress || madeProgress); idleCount = idle(idler, idleCount, madeProgress); if (outbox[0].bucketCount() > 0 && outbox[0].queue(0).size() == 1 && !inbox.isEmpty()) { // if the outbox is full, call the process() method again. Cooperative // processor must be able to cope with this situation and not try to put // more items to the outbox. outbox[0].reset(); processInbox(inbox, inboxOrdinal, isCooperative, processor); } outbox[0].drainQueuesAndReset(actualOutputs, logInputOutput); if (inbox.isEmpty()) { snapshotAndRestore(processor, outbox, actualOutputs, doSnapshots, doRestoreEvery, restoreCount); } } if (logInputOutput && !inputs.isEmpty()) { System.out.println(LocalTime.now() + " Input processed, calling complete()"); } // call the complete() method if (callComplete) { long completeStart = System.nanoTime(); long outputMatchedAt = Long.MAX_VALUE; boolean[] done = {false}; do { doCall("complete", isCooperative, () -> done[0] = processor[0].complete()); boolean madeProgress = done[0] || (outbox[0].bucketCount() > 0 && !outbox[0].queue(0).isEmpty()); assertTrue("complete() call without progress", !assertProgress || madeProgress); outbox[0].drainQueuesAndReset(actualOutputs, logInputOutput); if (outbox[0].hasUnfinishedItem()) { assertFalse("outbox has unfinished items, but complete() claims to be done", done[0]); outbox[0].block(); } else { outbox[0].unblock(); snapshotAndRestore(processor, outbox, actualOutputs, madeProgress && doSnapshots && !done[0], doRestoreEvery, restoreCount); } idleCount = idle(idler, idleCount, madeProgress); long now = System.nanoTime(); if (runUntilOutputMatchesTimeoutMillis >= 0) { try { assertOutputFn.accept(testMode, actualOutputs); outputMatchedAt = Math.min(outputMatchedAt, now); } catch (AssertionError e) { if (outputMatchedAt < Long.MAX_VALUE) { throw new AssertionError("the output already matched, but doesn't match now", e); } // ignore the failure otherwise and continue calling complete() } long elapsedSinceStart = NANOSECONDS.toMillis(now - completeStart); long elapsedSinceMatch = NANOSECONDS.toMillis(subtractClamped(now, outputMatchedAt)); if (elapsedSinceStart > runUntilOutputMatchesTimeoutMillis || elapsedSinceMatch > runUntilOutputMatchesExtraTimeMillis) { break; } } } while (!done[0]); assertTrue("complete returned true in a run-until-output-matches mode", !done[0] || runUntilOutputMatchesTimeoutMillis <= 0); } processor[0].close(); assertOutputFn.accept(testMode, actualOutputs); } private void assertExpectedOutput(TestMode mode, List<List<?>> expected , List<List<Object>> actual) { for (int i = 0; i < expected.size(); i++) { List<?> expectedOutput = expected.get(i); List<?> actualOutput = actual.get(i); if (!outputChecker.test(expectedOutput, actualOutput)) { assertEquals("processor output in mode \"" + mode + "\" doesn't match", listToString(expectedOutput), listToString(actualOutput)); } } } private Processor newProcessorFromSupplier() { return supplier.get(1).iterator().next(); } /** * Sorts the objects from multiple inputs into an order in which they will * be passed to processor, based on priorities. */ private static List<ObjectWithOrdinal> mixInputs(List<List<?>> inputs, int[] priorities) { SortedMap<Integer, List<Integer>> ordinalsByPriority = new TreeMap<>(); for (int i = 0; i < priorities.length; i++) { ordinalsByPriority.computeIfAbsent(priorities[i], k -> new ArrayList<>()) .add(i); } List<ObjectWithOrdinal> result = new ArrayList<>(); for (List<Integer> ordinals : ordinalsByPriority.values()) { boolean allDone; int index = 0; do { allDone = true; for (Integer ordinal : ordinals) { if (inputs.get(ordinal).size() > index) { Object item = inputs.get(ordinal).get(index); result.add(new ObjectWithOrdinal(ordinal, item)); allDone = false; } } index++; } while (!allDone); } return result; } private TestOutbox createOutbox() { return new TestOutbox(IntStream.generate(() -> 1).limit(outputOrdinalCount).toArray(), 1); } private String processInbox(TestInbox inbox, int inboxOrdinal, boolean isCooperative, Processor[] processor) { if (inbox.peek() instanceof Watermark) { Watermark wm = ((Watermark) inbox.peek()); doCall("tryProcessWatermark", isCooperative, () -> { if (processor[0].tryProcessWatermark(wm)) { inbox.remove(); } }); return "tryProcessWatermark"; } else { doCall("process", isCooperative, () -> processor[0].process(inboxOrdinal, inbox)); return "process"; } } private int idle(IdleStrategy idler, int idleCount, boolean madeProgress) { if (!madeProgress) { idler.idle(++idleCount); } else { idleCount = 0; } return idleCount; } private void snapshotAndRestore( Processor[] processor, TestOutbox[] outbox, List<List<Object>> actualOutput, boolean doSnapshot, int doRestoreEvery, int[] restoreCount) throws Exception { if (!doSnapshot) { return; } restoreCount[0]++; boolean willRestore = restoreCount[0] % doRestoreEvery == 0; if (logInputOutput) { System.out.println(LocalTime.now() + (willRestore ? " Saving & restoring snapshot" : " Saving snapshot without restoring it")); } // save state of current processor TestInbox snapshotInbox = new TestInbox(); boolean[] done = {false}; boolean isCooperative = processor[0].isCooperative(); do { doCall("saveSnapshot", isCooperative, () -> done[0] = processor[0].saveToSnapshot()); assertTrue("saveToSnapshot() call without progress", !assertProgress || done[0] || !outbox[0].snapshotQueue().isEmpty() || !outbox[0].queue(0).isEmpty()); outbox[0].drainSnapshotQueueAndReset(snapshotInbox.queue(), false); outbox[0].drainQueuesAndReset(actualOutput, logInputOutput); } while (!done[0]); if (!willRestore) { return; } // restore state to new processor assert outbox[0].queue(0).isEmpty(); assert outbox[0].snapshotQueue().isEmpty(); processor[0].close(); processor[0] = newProcessorFromSupplier(); outbox[0] = createOutbox(); initProcessor(processor[0], outbox[0]); int lastInboxSize = snapshotInbox.queue().size(); while (!snapshotInbox.isEmpty()) { doCall("restoreSnapshot", isCooperative, () -> processor[0].restoreFromSnapshot(snapshotInbox)); assertTrue("restoreFromSnapshot() call without progress", !assertProgress || lastInboxSize > snapshotInbox.queue().size() || !outbox[0].queue(0).isEmpty()); outbox[0].drainQueuesAndReset(actualOutput, logInputOutput); lastInboxSize = snapshotInbox.queue().size(); } do { doCall("finishSnapshotRestore", isCooperative, () -> done[0] = processor[0].finishSnapshotRestore()); assertTrue("finishSnapshotRestore() call without progress", !assertProgress || done[0] || !outbox[0].queue(0).isEmpty()); outbox[0].drainQueuesAndReset(actualOutput, logInputOutput); } while (!done[0]); } private void doCall(String methodName, boolean isCooperative, Runnable r) { long start = System.nanoTime(); r.run(); long elapsed = System.nanoTime() - start; if (isCooperative) { if (cooperativeTimeout > 0) { assertTrue(String.format("call to %s() took %.1fms, it should be <%dms", methodName, toMillis(elapsed), COOPERATIVE_TIME_LIMIT_MS_FAIL), elapsed < MILLISECONDS.toNanos(COOPERATIVE_TIME_LIMIT_MS_FAIL)); } // print warning if (elapsed > MILLISECONDS.toNanos(COOPERATIVE_TIME_LIMIT_MS_WARN)) { System.out.println(String.format("Warning: call to %s() took %.2fms, it should be <%dms normally", methodName, toMillis(elapsed), COOPERATIVE_TIME_LIMIT_MS_WARN)); } } else { if (elapsed > MILLISECONDS.toNanos(BLOCKING_TIME_LIMIT_MS_WARN)) { System.out.println(String.format("Warning: call to %s() took %.2fms in non-cooperative processor. Is " + "this expected?", methodName, toMillis(elapsed))); } } } private void initProcessor(Processor processor, TestOutbox outbox) { SerializationService serializationService; if (jetInstance != null && jetInstance.getHazelcastInstance() instanceof SerializationServiceSupport) { SerializationServiceSupport impl = (SerializationServiceSupport) jetInstance.getHazelcastInstance(); serializationService = impl.getSerializationService(); } else { serializationService = new DefaultSerializationServiceBuilder() .setManagedContext(e -> e) .build(); } TestProcessorContext context = new TestProcessorContext() .setLogger(getLogger(processor.getClass().getName())) .setManagedContext(serializationService.getManagedContext()) .setLocalProcessorIndex(localProcessorIndex) .setGlobalProcessorIndex(globalProcessorIndex) .setLocalParallelism(localParallelism) .setTotalParallelism(totalParallelism); if (jetInstance != null) { context.setJetInstance(jetInstance); } if (processor instanceof SerializationServiceAware) { ((SerializationServiceAware) processor).setSerializationService(serializationService); } try { processor.init(outbox, context); } catch (Exception e) { throw sneakyThrow(e); } } private static double toMillis(long nanos) { return nanos / (double) MILLISECONDS.toNanos(1); } /** * Wraps the provided {@code ProcessorSupplier} with a {@code * Supplier<Processor>} that returns processors obtained from it. */ public static Supplier<Processor> supplierFrom(ProcessorSupplier supplier) { return supplierFrom(supplier, new TestProcessorSupplierContext()); } /** * Wraps the provided {@code ProcessorSupplier} with a {@code * Supplier<Processor>} that returns processors obtained from it. */ public static Supplier<Processor> supplierFrom(ProcessorSupplier supplier, ProcessorSupplier.Context context) { try { supplier.init(context); } catch (Exception e) { throw sneakyThrow(e); } return () -> supplier.get(1).iterator().next(); } /** * Wraps the provided {@code ProcessorMetaSupplier} with a {@code * Supplier<Processor>} that returns processors obtained from it. */ public static Supplier<Processor> supplierFrom(ProcessorMetaSupplier supplier) { return supplierFrom(supplier, new TestProcessorSupplierContext()); } /** * Wraps the provided {@code ProcessorMetaSupplier} with a {@code * Supplier<Processor>} that returns processors obtained from it. */ public static Supplier<Processor> supplierFrom(ProcessorMetaSupplier supplier, ProcessorSupplier.Context context) { try { supplier.init(context); } catch (Exception e) { throw sneakyThrow(e); } return supplierFrom(supplier.get(singletonList(LOCAL_ADDRESS)).apply(LOCAL_ADDRESS), context); } static ILogger getLogger(String name) { return LOGGING_SERVICE.getLogger(name); } static ILogger getLogger(Class clazz) { return LOGGING_SERVICE.getLogger(clazz); } /** * Converts a list to a string putting {@code toString()} of each element * on a separate line. It is useful to transform list inputs to {@code * assertEquals()}: the exception will show the entire collections instead * of just non-equal sizes or the first non-equal element. * * @param list Input list * @return Output string */ private static String listToString(List<?> list) { return list.stream() .map(String::valueOf) .collect(Collectors.joining("\n")); } private static class ObjectWithOrdinal { final int ordinal; final Object item; ObjectWithOrdinal(int ordinal, Object item) { this.ordinal = ordinal; this.item = item; } } /** * Describes the current test mode. */ public static final class TestMode { private final boolean doSnapshots; private final int restoreInterval; private final int inboxLimit; /** * Construct a new instance. */ private TestMode(boolean doSnapshots, int restoreInterval, int inboxLimit) { this.doSnapshots = doSnapshots; this.restoreInterval = restoreInterval; this.inboxLimit = inboxLimit; } /** * Are snapshots enabled. */ public boolean isSnapshotsEnabled() { return doSnapshots; } /** * How often the snapshot is restored. 1 means restore every snapshot, * 2 every other snapshot. */ public int snapshotRestoreInterval() { return restoreInterval; } /** * Size limit of the inbox. */ public int inboxLimit() { return inboxLimit; } @Override public String toString() { String sInboxSize = inboxLimit == Integer.MAX_VALUE ? "unlimited" : String.valueOf(inboxLimit); if (!doSnapshots && restoreInterval == 0) { return "snapshots disabled, inboxLimit=" + sInboxSize; } else if (doSnapshots && restoreInterval == 1) { assert inboxLimit == 1; return "snapshots enabled, restoring every snapshot"; } else if (doSnapshots && restoreInterval == 2) { assert inboxLimit == 1; return "snapshots enabled, restoring every other snapshot"; } else if (doSnapshots && restoreInterval == Integer.MAX_VALUE) { return "snapshots enabled, never restoring them, inboxLimit=" + sInboxSize; } else { throw new IllegalArgumentException("Unknown mode, doSnapshots=" + doSnapshots + ", restoreInterval=" + restoreInterval + ", inboxLimit=" + inboxLimit); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.amqp; import java.io.Serializable; import java.util.ArrayList; import javax.jms.BytesMessage; import javax.jms.Connection; import javax.jms.MapMessage; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.ObjectMessage; import javax.jms.Queue; import javax.jms.Session; import javax.jms.StreamMessage; import javax.jms.TextMessage; import org.apache.activemq.artemis.api.core.QueueConfiguration; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.management.AddressControl; import org.apache.activemq.artemis.tests.integration.management.ManagementControlHelper; import org.apache.activemq.artemis.tests.util.Wait; import org.apache.activemq.artemis.utils.Base64; import org.apache.activemq.artemis.utils.ByteUtil; import org.apache.activemq.artemis.utils.RandomUtil; import org.junit.Assert; import org.junit.Test; /** * Test that various message types are handled as expected with an AMQP JMS client. */ public class JMSMessageTypesTest extends JMSClientTestSupport { final int NUM_MESSAGES = 10; @Override protected String getConfiguredProtocols() { return "AMQP,OPENWIRE,CORE"; } @Test(timeout = 60000) public void testAddressControlSendMessage() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); server.createQueue(new QueueConfiguration(address).setRoutingType(RoutingType.ANYCAST)); AddressControl addressControl = ManagementControlHelper.createAddressControl(address, mBeanServer); Assert.assertEquals(1, addressControl.getQueueNames().length); addressControl.sendMessage(null, org.apache.activemq.artemis.api.core.Message.BYTES_TYPE, Base64.encodeBytes("test".getBytes()), false, fullUser, fullPass); Wait.assertEquals(1, addressControl::getMessageCount); Connection connection = createConnection("myClientId"); try { Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); javax.jms.Queue queue = session.createQueue(address.toString()); MessageConsumer consumer = session.createConsumer(queue); Message message = consumer.receive(5000); assertNotNull(message); byte[] buffer = new byte[(int)((BytesMessage)message).getBodyLength()]; ((BytesMessage)message).readBytes(buffer); assertEquals("test", new String(buffer)); session.close(); connection.close(); } finally { if (connection != null) { connection.close(); } } } @Test(timeout = 60000) public void testAddressControlSendMessageWithText() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); server.createQueue(new QueueConfiguration(address).setRoutingType(RoutingType.ANYCAST)); AddressControl addressControl = ManagementControlHelper.createAddressControl(address, mBeanServer); Assert.assertEquals(1, addressControl.getQueueNames().length); addressControl.sendMessage(null, org.apache.activemq.artemis.api.core.Message.TEXT_TYPE, "test", false, fullUser, fullPass); Wait.assertEquals(1, addressControl::getMessageCount); Assert.assertEquals(1, addressControl.getMessageCount()); Connection connection = createConnection("myClientId"); try { Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); javax.jms.Queue queue = session.createQueue(address.toString()); MessageConsumer consumer = session.createConsumer(queue); Message message = consumer.receive(500); assertNotNull(message); String text = ((TextMessage) message).getText(); assertEquals("test", text); session.close(); connection.close(); } finally { if (connection != null) { connection.close(); } } } private void testBytesMessageSendReceive(Connection producerConnection, Connection consumerConnection) throws Throwable { long time = System.currentTimeMillis(); Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); byte[] bytes = new byte[0xf + 1]; for (int i = 0; i <= 0xf; i++) { bytes[i] = (byte) i; } MessageProducer producer = session.createProducer(queue); for (int i = 0; i < NUM_MESSAGES; i++) { instanceLog.debug("Sending " + i); BytesMessage message = session.createBytesMessage(); message.writeBytes(bytes); message.setIntProperty("count", i); producer.send(message); } Session sessionConsumer = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = sessionConsumer.createQueue(getQueueName()); final MessageConsumer consumer = sessionConsumer.createConsumer(consumerQueue); for (int i = 0; i < NUM_MESSAGES; i++) { BytesMessage m = (BytesMessage) consumer.receive(5000); Assert.assertNotNull("Could not receive message count=" + i + " on consumer", m); m.reset(); long size = m.getBodyLength(); byte[] bytesReceived = new byte[(int) size]; m.readBytes(bytesReceived); instanceLog.debug("Received " + ByteUtil.bytesToHex(bytesReceived, 1) + " count - " + m.getIntProperty("count")); Assert.assertArrayEquals(bytes, bytesReceived); } long taken = (System.currentTimeMillis() - time) / 1000; instanceLog.debug("taken = " + taken); } @Test(timeout = 60000) public void testBytesMessageSendReceiveFromAMQPToAMQP() throws Throwable { testBytesMessageSendReceive(createConnection(), createConnection()); } @Test(timeout = 60000) public void testBytesMessageSendReceiveFromCoreToAMQP() throws Throwable { testBytesMessageSendReceive(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testBytesMessageSendReceiveFromAMQPToCore() throws Throwable { testBytesMessageSendReceive(createConnection(), createCoreConnection()); } private void testMessageSendReceive(Connection producerConnection, Connection consumerConnection) throws Throwable { long time = System.currentTimeMillis(); Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); byte[] bytes = new byte[0xf + 1]; for (int i = 0; i <= 0xf; i++) { bytes[i] = (byte) i; } MessageProducer producer = session.createProducer(queue); for (int i = 0; i < NUM_MESSAGES; i++) { instanceLog.debug("Sending " + i); Message message = session.createMessage(); message.setIntProperty("count", i); producer.send(message); } Session sessionConsumer = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = sessionConsumer.createQueue(getQueueName()); final MessageConsumer consumer = sessionConsumer.createConsumer(consumerQueue); for (int i = 0; i < NUM_MESSAGES; i++) { Message m = consumer.receive(5000); Assert.assertNotNull("Could not receive message count=" + i + " on consumer", m); } long taken = (System.currentTimeMillis() - time) / 1000; instanceLog.debug("taken = " + taken); } @Test(timeout = 60000) public void testMessageSendReceiveFromAMQPToAMQP() throws Throwable { testMessageSendReceive(createConnection(), createConnection()); } @Test(timeout = 60000) public void testMessageSendReceiveFromCoreToAMQP() throws Throwable { testMessageSendReceive(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testMessageSendReceiveFromAMQPToCore() throws Throwable { testMessageSendReceive(createConnection(), createCoreConnection()); } private void testMapMessageSendReceive(Connection producerConnection, Connection consumerConnection) throws Throwable { long time = System.currentTimeMillis(); Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); MessageProducer producer = session.createProducer(queue); for (int i = 0; i < NUM_MESSAGES; i++) { instanceLog.debug("Sending " + i); MapMessage message = session.createMapMessage(); message.setInt("i", i); message.setIntProperty("count", i); producer.send(message); } Session sessionConsumer = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = sessionConsumer.createQueue(getQueueName()); final MessageConsumer consumer = sessionConsumer.createConsumer(consumerQueue); for (int i = 0; i < NUM_MESSAGES; i++) { MapMessage m = (MapMessage) consumer.receive(5000); Assert.assertNotNull("Could not receive message count=" + i + " on consumer", m); Assert.assertEquals(i, m.getInt("i")); Assert.assertEquals(i, m.getIntProperty("count")); } long taken = (System.currentTimeMillis() - time) / 1000; instanceLog.debug("taken = " + taken); } @Test(timeout = 60000) public void testMapMessageSendReceiveFromAMQPToAMQP() throws Throwable { testMapMessageSendReceive(createConnection(), createConnection()); } @Test(timeout = 60000) public void testMapMessageSendReceiveFromCoreToAMQP() throws Throwable { testMapMessageSendReceive(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testMapMessageSendReceiveFromAMQPToCore() throws Throwable { testMapMessageSendReceive(createConnection(), createCoreConnection()); } private void testTextMessageSendReceive(Connection producerConnection, Connection consumerConnection) throws Throwable { long time = System.currentTimeMillis(); Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); MessageProducer producer = session.createProducer(queue); for (int i = 0; i < NUM_MESSAGES; i++) { instanceLog.debug("Sending " + i); TextMessage message = session.createTextMessage("text" + i); message.setStringProperty("text", "text" + i); producer.send(message); } Session sessionConsumer = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = sessionConsumer.createQueue(getQueueName()); final MessageConsumer consumer = sessionConsumer.createConsumer(consumerQueue); for (int i = 0; i < NUM_MESSAGES; i++) { TextMessage m = (TextMessage) consumer.receive(5000); Assert.assertNotNull("Could not receive message count=" + i + " on consumer", m); Assert.assertEquals("text" + i, m.getText()); } long taken = (System.currentTimeMillis() - time) / 1000; instanceLog.debug("taken = " + taken); } @Test(timeout = 60000) public void testTextMessageSendReceiveFromAMQPToAMQP() throws Throwable { testTextMessageSendReceive(createConnection(), createConnection()); } @Test(timeout = 60000) public void testTextMessageSendReceiveFromCoreToAMQP() throws Throwable { testTextMessageSendReceive(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testTextMessageSendReceiveFromAMQPToCore() throws Throwable { testTextMessageSendReceive(createConnection(), createCoreConnection()); } private void testStreamMessageSendReceive(Connection producerConnection, Connection consumerConnection) throws Throwable { Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); MessageProducer producer = session.createProducer(queue); for (int i = 0; i < NUM_MESSAGES; i++) { StreamMessage message = session.createStreamMessage(); message.writeInt(i); message.writeBoolean(true); message.writeString("test"); producer.send(message); } Session sessionConsumer = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = sessionConsumer.createQueue(getQueueName()); final MessageConsumer consumer = sessionConsumer.createConsumer(consumerQueue); for (int i = 0; i < NUM_MESSAGES; i++) { StreamMessage m = (StreamMessage) consumer.receive(5000); Assert.assertNotNull("Could not receive message count=" + i + " on consumer", m); Assert.assertEquals(i, m.readInt()); Assert.assertEquals(true, m.readBoolean()); Assert.assertEquals("test", m.readString()); } } @Test(timeout = 60000) public void testStreamMessageSendReceiveFromAMQPToAMQP() throws Throwable { testStreamMessageSendReceive(createConnection(), createConnection()); } @Test(timeout = 60000) public void testStreamMessageSendReceiveFromCoreToAMQP() throws Throwable { testStreamMessageSendReceive(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testStreamMessageSendReceiveFromAMQPToCore() throws Throwable { testStreamMessageSendReceive(createConnection(), createCoreConnection()); } private void testObjectMessageWithArrayListPayload(Connection producerConnection, Connection consumerConnection) throws Throwable { ArrayList<String> payload = new ArrayList<>(); payload.add("aString"); Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); MessageProducer producer = session.createProducer(queue); ObjectMessage objectMessage = session.createObjectMessage(payload); producer.send(objectMessage); session.close(); session = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = session.createQueue(getQueueName()); MessageConsumer cons = session.createConsumer(consumerQueue); consumerConnection.start(); objectMessage = (ObjectMessage) cons.receive(5000); assertNotNull(objectMessage); @SuppressWarnings("unchecked") ArrayList<String> received = (ArrayList<String>) objectMessage.getObject(); assertEquals(received.get(0), "aString"); consumerConnection.close(); } @Test(timeout = 60000) public void testObjectMessageWithArrayListPayloadFromAMQPToAMQP() throws Throwable { testObjectMessageWithArrayListPayload(createConnection(), createConnection()); } @Test(timeout = 60000) public void testObjectMessageWithArrayListPayloadFromCoreToAMQP() throws Throwable { testObjectMessageWithArrayListPayload(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testObjectMessageWithArrayListPayloadFromAMQPToCore() throws Throwable { testObjectMessageWithArrayListPayload(createConnection(), createCoreConnection()); } private void testObjectMessageUsingCustomType(Connection producerConnection, Connection consumerConnection) throws Throwable { long time = System.currentTimeMillis(); Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); MessageProducer producer = session.createProducer(queue); for (int i = 0; i < NUM_MESSAGES; i++) { instanceLog.debug("Sending " + i); ObjectMessage message = session.createObjectMessage(new AnythingSerializable(i)); producer.send(message); } Session sessionConsumer = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = sessionConsumer.createQueue(getQueueName()); final MessageConsumer consumer = sessionConsumer.createConsumer(consumerQueue); for (int i = 0; i < NUM_MESSAGES; i++) { ObjectMessage msg = (ObjectMessage) consumer.receive(5000); Assert.assertNotNull("Could not receive message count=" + i + " on consumer", msg); AnythingSerializable someSerialThing = (AnythingSerializable) msg.getObject(); Assert.assertEquals(i, someSerialThing.getCount()); } long taken = (System.currentTimeMillis() - time) / 1000; instanceLog.debug("taken = " + taken); } @Test(timeout = 60000) public void testObjectMessageUsingCustomTypeFromAMQPToAMQP() throws Throwable { testObjectMessageUsingCustomType(createConnection(), createConnection()); } @Test(timeout = 60000) public void testObjectMessageUsingCustomTypeFromCoreToAMQP() throws Throwable { testObjectMessageUsingCustomType(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testObjectMessageUsingCustomTypeFromAMQPToCore() throws Throwable { testObjectMessageUsingCustomType(createConnection(), createCoreConnection()); } public static class AnythingSerializable implements Serializable { private static final long serialVersionUID = 5972085029690947807L; private int count; public AnythingSerializable(int count) { this.count = count; } public int getCount() { return count; } } private void testPropertiesArePreserved(Connection producerConnection, Connection consumerConnection) throws Exception { Session session = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue(getQueueName()); MessageProducer producer = session.createProducer(queue); TextMessage message = session.createTextMessage(); message.setText("msg:0"); message.setBooleanProperty("true", true); message.setBooleanProperty("false", false); message.setStringProperty("foo", "bar"); message.setDoubleProperty("double", 66.6); message.setFloatProperty("float", 56.789f); message.setIntProperty("int", 8); message.setByteProperty("byte", (byte) 10); producer.send(message); producer.send(message); consumerConnection.start(); Session consumerSession = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue consumerQueue = consumerSession.createQueue(getQueueName()); MessageConsumer messageConsumer = consumerSession.createConsumer(consumerQueue); TextMessage received = (TextMessage) messageConsumer.receive(5000); Assert.assertNotNull(received); Assert.assertEquals("msg:0", received.getText()); Assert.assertEquals(received.getBooleanProperty("true"), true); Assert.assertEquals(received.getBooleanProperty("false"), false); Assert.assertEquals(received.getStringProperty("foo"), "bar"); Assert.assertEquals(received.getDoubleProperty("double"), 66.6, 0.0001); Assert.assertEquals(received.getFloatProperty("float"), 56.789f, 0.0001); Assert.assertEquals(received.getIntProperty("int"), 8); Assert.assertEquals(received.getByteProperty("byte"), (byte) 10); received = (TextMessage) messageConsumer.receive(5000); Assert.assertNotNull(received); consumerConnection.close(); } @Test(timeout = 60000) public void testPropertiesArePreservedFromAMQPToAMQP() throws Throwable { testPropertiesArePreserved(createConnection(), createConnection()); } @Test(timeout = 60000) public void testPropertiesArePreservedFromCoreToAMQP() throws Throwable { testPropertiesArePreserved(createCoreConnection(), createConnection()); } @Test(timeout = 60000) public void testPropertiesArePreservedFromAMQPToCore() throws Throwable { testPropertiesArePreserved(createConnection(), createCoreConnection()); } }
package ca.pfv.spmf.algorithms.frequentpatterns.upgrowth_ihup; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; /** * This is an implementation of the UP-Tree used by UPGrowth algorithm. * Copyright (c) 2014 Prashant Barhate * * This file is part of the SPMF DATA MINING SOFTWARE * * (http://www.philippe-fournier-viger.com/spmf). * * SPMF is free software: you can redistribute it and/or modify it under the * * terms of the GNU General Public License as published by the Free Software * * Foundation, either version 3 of the License, or (at your option) any later * * version. SPMF is distributed in the hope that it will be useful, but WITHOUT * ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License along with * SPMF. If not, see <http://www.gnu.org/licenses/>. * * @see AlgoUPGrowth * * @author Prashant Barhate */ public class UPTree { // List of items in the header table List<Integer> headerList = null; // flag that indicate if the tree has more than one path boolean hasMoreThanOnePath = false; // List of pairs (item, Utility) of the header table Map<Integer, UPNode> mapItemNodes = new HashMap<Integer, UPNode>(); // root of the tree UPNode root = new UPNode(); // null node // Map that indicates the last node for each item using the node links // key: item value: an fp tree node (added by Philippe) Map<Integer, UPNode> mapItemLastNode = new HashMap<Integer, UPNode>(); public UPTree() { } /** * Method for adding a transaction to the up-tree (for the initial * construction of the UP-Tree). * * @param transaction reorganised transaction * @param RTU reorganised transaction utility */ public void addTransaction(List<Item> transaction, int RTU) { UPNode currentNode = root; int i = 0; int RemainingUtility = 0; int size = transaction.size(); // For each item in the transaction for (i = 0; i < size; i++) { for (int k = i + 1; k < transaction.size(); k++) { // remaining utility is calculated as sum of utilities of all // itms behind currnt one RemainingUtility += transaction.get(k).getUtility(); } int item = transaction.get(i).getName(); // int itm=Integer.parseInt(item); // look if there is a node already in the FP-Tree UPNode child = currentNode.getChildWithID(item); if (child == null) { int nodeUtility = (RTU - RemainingUtility); // Nodeutility= previous + (RTU - utility of // descendent items) RemainingUtility = 0; // reset RemainingUtility for next item // there is no node, we create a new one currentNode = insertNewNode(currentNode, item, nodeUtility); } else { // there is a node already, we update it int currentNU = child.nodeUtility; // current node utility // Nodeutility= previous + (RTU - utility of // descendent items) int nodeUtility = currentNU + (RTU - RemainingUtility); RemainingUtility = 0; // reset RemainingUtility for next item child.count++; child.nodeUtility = nodeUtility; currentNode = child; } } } /** * Add a transaction to the UP-Tree (for a local UP-Tree) * @param localPath the path to be inserted * @param pathUtility the path utility * @param pathCount the path count * @param mapMinimumItemUtility the map storing minimum item utility */ public void addLocalTransaction(List<Integer> localPath, int pathUtility, Map<Integer, Integer> mapMinimumItemUtility, int pathCount) { UPNode currentlocalNode = root; int i = 0; int RemainingUtility = 0; int size = localPath.size(); // For each item in the transaction for (i = 0; i < size; i++) { for (int k = i + 1; k < localPath.size(); k++) { int search = localPath.get(k); // remaining utility is calculated as sum of utilities of all // items behind current one RemainingUtility += mapMinimumItemUtility.get(search) * pathCount; } int item = localPath.get(i); // look if there is a node already in the UP-Tree UPNode child = currentlocalNode.getChildWithID(item); if (child == null) { int nodeUtility = (pathUtility - RemainingUtility); ; // Nodeutility= previous + (RTU - utility of // descendent items) RemainingUtility = 0; // reset RU for next item // there is no node, we create a new one currentlocalNode = insertNewNode(currentlocalNode, item, nodeUtility); } else { // there is a node already, we update it int currentNU = child.nodeUtility; // current node utility // Nodeutility= previous + (RTU - utility of // descendent items) int nodeUtility = currentNU + (pathUtility - RemainingUtility); RemainingUtility = 0; child.count++; child.nodeUtility = nodeUtility; currentlocalNode = child; } } } /** * Insert a new node in the UP-Tree as child of a parent node * @param currentlocalNode the parent node * @param item the item in the new node * @param nodeUtility the node utility of the new node * @return the new node */ private UPNode insertNewNode(UPNode currentlocalNode, int item, int nodeUtility) { // create the new node UPNode newNode = new UPNode(); newNode.itemID = item; newNode.nodeUtility = nodeUtility; newNode.count = 1; newNode.parent = currentlocalNode; // we link the new node to its parrent currentlocalNode.childs.add(newNode); // check if more than one path if (!hasMoreThanOnePath && currentlocalNode.childs.size() > 1) { hasMoreThanOnePath = true; } // We update the header table. // We check if there is already a node with this id in the // header table UPNode localheadernode = mapItemNodes.get(item); if (localheadernode == null) { // there is not mapItemNodes.put(item, newNode); mapItemLastNode.put(item, newNode); } else { // there is // we find the last node with this id. // get the latest node in the tree with this item UPNode lastNode = mapItemLastNode.get(item); // we add the new node to the node link of the last node lastNode.nodeLink = newNode; // Finally, we set the new node as the last node mapItemLastNode.put(item, newNode); } // we return this node as the current node for the next loop // iteration return newNode; } /** * Method for creating the list of items in the header table, in descending * order of TWU or path utility. * * @param mapItemToEstimatedUtility * the Utilities of each item (key: item value: TWU or path * utility) */ void createHeaderList(final Map<Integer, Integer> mapItemToEstimatedUtility) { // create an array to store the header list with // all the items stored in the map received as parameter headerList = new ArrayList<Integer>(mapItemNodes.keySet()); // sort the header table by decreasing order of utility Collections.sort(headerList, new Comparator<Integer>() { public int compare(Integer id1, Integer id2) { // compare the Utility int compare = mapItemToEstimatedUtility.get(id2) - mapItemToEstimatedUtility.get(id1); // if the same utility, we check the lexical ordering! if (compare == 0) { return (id1 - id2); } // otherwise we use the utility return compare; } }); } @Override public String toString() { String output = ""; output += "HEADER TABLE: " + mapItemNodes + " \n"; output += "hasMoreThanOnePath: " + hasMoreThanOnePath + " \n"; return output + toString("", root); } public String toString(String indent, UPNode node) { String output = indent + node.toString() + "\n"; String childsOutput = ""; for (UPNode child : node.childs) { childsOutput += toString(indent + " ", child); } return output + childsOutput; } }
/******************************************************************************* * Copyright (c) Intel Corporation * Copyright (c) 2017 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package org.osc.core.broker.rest.client.openstack.vmidc.notification.runner; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import javax.persistence.EntityManager; import org.osc.core.broker.model.entities.virtualization.SecurityGroup; import org.osc.core.broker.model.entities.virtualization.SecurityGroupMember; import org.osc.core.broker.model.entities.virtualization.SecurityGroupMemberType; import org.osc.core.broker.model.entities.virtualization.VirtualizationConnector; import org.osc.core.broker.rest.client.openstack.vmidc.notification.OsNotificationObjectType; import org.osc.core.broker.rest.client.openstack.vmidc.notification.OsNotificationUtil; import org.osc.core.broker.rest.client.openstack.vmidc.notification.listener.NotificationListenerFactory; import org.osc.core.broker.rest.client.openstack.vmidc.notification.listener.OsNotificationListener; import org.osc.core.broker.service.broadcast.BroadcastListener; import org.osc.core.broker.service.broadcast.BroadcastMessage; import org.osc.core.broker.service.broadcast.EventType; import org.osc.core.broker.service.exceptions.VmidcBrokerInvalidEntryException; import org.osc.core.broker.service.exceptions.VmidcBrokerValidationException; import org.osc.core.broker.service.exceptions.VmidcException; import org.osc.core.broker.service.persistence.OSCEntityManager; import org.osc.core.broker.service.persistence.SecurityGroupEntityMgr; import org.osc.core.broker.util.TransactionalBroadcastUtil; import org.osc.core.broker.util.db.DBConnectionManager; import org.slf4j.LoggerFactory; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ServiceScope; import org.osgi.service.transaction.control.ScopedWorkException; import org.slf4j.Logger; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; /** * * Class will be instantiated whenever server is started and will run forever until server shutdown. * */ @Component(scope=ServiceScope.PROTOTYPE, service=OsSecurityGroupNotificationRunner.class) public class OsSecurityGroupNotificationRunner implements BroadcastListener { @Reference private NotificationListenerFactory notificationListenerFactory; @Reference private TransactionalBroadcastUtil txBroadcastUtil; @Reference private DBConnectionManager dbConnectionManager; private final Multimap<Long, OsNotificationListener> sgToListenerMap = ArrayListMultimap.create(); private final HashMap<Long, VirtualizationConnector> sgToVCMap = new HashMap<Long, VirtualizationConnector>(); private static final Logger log = LoggerFactory.getLogger(OsSecurityGroupNotificationRunner.class); private ServiceRegistration<BroadcastListener> registration; @Activate void start(BundleContext ctx) throws InterruptedException, VmidcException { // This is not done automatically by DS as we do not want the broadcast whiteboard // to activate another instance of this component, only people getting the runner! this.registration = ctx.registerService(BroadcastListener.class, this, null); try { EntityManager em = this.dbConnectionManager.getTransactionalEntityManager(); this.dbConnectionManager.getTransactionControl().required(() -> { OSCEntityManager<SecurityGroup> sgEmgr = new OSCEntityManager<SecurityGroup>(SecurityGroup.class, em, this.txBroadcastUtil); for (SecurityGroup sg : sgEmgr.listAll()) { if (sg.getVirtualizationConnector().getVirtualizationType().isOpenstack()) { addListener(sg); } } return null; }); } catch (ScopedWorkException swe) { throw swe.asRuntimeException(); } } @Override public void receiveBroadcast(BroadcastMessage msg) { if (msg.getReceiver().equals("SecurityGroup")) { updateListenerMap(msg); } } @Deactivate void shutdown() { try { this.registration.unregister(); } catch (IllegalStateException ise) { // No problem - this means the service was // already unregistered (e.g. by bundle stop) } this.sgToListenerMap.clear(); this.sgToVCMap.clear(); } private void updateListenerMap(BroadcastMessage msg) { if (msg.getEventType() == EventType.DELETED) { removeListener(msg.getEntityId()); } else { try { EntityManager em = this.dbConnectionManager.getTransactionalEntityManager(); this.dbConnectionManager.getTransactionControl().required(() -> { SecurityGroup sg = SecurityGroupEntityMgr.findById(em, msg.getEntityId()); if (sg == null) { log.error("Processing " + msg.getEventType() + " notification for Security Group (" + msg.getEntityId() + ") but couldn't find it in the DB"); } else if (sg.getVirtualizationConnector().getVirtualizationType().isKubernetes()) { return null; } else if (msg.getEventType() == EventType.ADDED) { addListener(sg); } else if (msg.getEventType() == EventType.UPDATED) { updateListeners(sg); } return null; }); } catch (ScopedWorkException e) { log.error("An error occurred updating the Security Group Listeners", e.getCause()); throw e.asRuntimeException(); } catch (Exception e) { log.error("An error occurred updating the Security Group Listeners", e); throw new RuntimeException("Failed to consume a broadcast message", e); } } } /** * * This method will return a list of Open stack IDs based on given Security Group member type. If SGM type is null * then this will return a list of IDs for all SGM for the given SG * * @param sg * Security Group in context * @param type * Security Group member type * @return * List of open stack UUIDs */ private List<String> getMemberIdsFromSG(SecurityGroup sg, SecurityGroupMemberType type) { ArrayList<String> idList = new ArrayList<>(); for (SecurityGroupMember sgm : sg.getSecurityGroupMembers()) { try { if (!sgm.getMarkedForDeletion() && (type == null || sgm.getType().equals(type))) { idList.add(getMemberOpenstackId(sgm)); } } catch (VmidcBrokerValidationException ex) { log.error("Failed to add SGM id to list", ex); } } return idList; } private String getMemberOpenstackId(SecurityGroupMember sgm) throws VmidcBrokerValidationException { switch (sgm.getType()) { case VM: return sgm.getVm().getOpenstackId(); case NETWORK: return sgm.getNetwork().getOpenstackId(); case SUBNET: return sgm.getSubnet().getOpenstackId(); default: throw new VmidcBrokerValidationException("Region is not applicable for Members of type '" + sgm.getType() + "'"); } } private void addListener(SecurityGroup sg) { try { // create VM Listener addMemberListener(sg, OsNotificationObjectType.VM, SecurityGroupMemberType.VM); // create Network Listener addMemberListener(sg, OsNotificationObjectType.NETWORK, SecurityGroupMemberType.NETWORK); // create port listener for Subnets... addMemberListener(sg, OsNotificationObjectType.PORT, SecurityGroupMemberType.SUBNET); // create Deletion Project Listener addProjectDeletionListener(sg, OsNotificationObjectType.PROJECT); if (sg.isProtectAll()) { // create Port Listener with Project Id in context addPortToProjectListener(sg, OsNotificationObjectType.PORT); } else { // create Port Listener with Network ID in context addMemberListener(sg, OsNotificationObjectType.PORT, SecurityGroupMemberType.NETWORK); } // Add new entry in SG-to-VC map this.sgToVCMap.put(sg.getId(), sg.getVirtualizationConnector()); } catch (VmidcBrokerInvalidEntryException e) { log.error("Invalid Object Type requested to register this listener with", e); } } private void updateListeners(SecurityGroup sg) { for (OsNotificationListener listener : this.sgToListenerMap.get(sg.getId())) { if (listener.getObjectType().equals(OsNotificationObjectType.VM)) { // Updating VM listener OsNotificationUtil.updateListener(listener, sg, getMemberIdsFromSG(sg, SecurityGroupMemberType.VM)); } else if (listener.getObjectType().equals(OsNotificationObjectType.NETWORK)) { // Updating Network listener OsNotificationUtil .updateListener(listener, sg, getMemberIdsFromSG(sg, SecurityGroupMemberType.NETWORK)); } else if (listener.getObjectType().equals(OsNotificationObjectType.PORT)) { if (sg.isProtectAll()) { // type = protectALL /* * if SG is protectAll or is being changed by user to protectAll * Update Port Listener with Project ID instead of Network ID(s) */ OsNotificationUtil.updateListener(listener, sg, Arrays.asList(sg.getProjectId())); } else { // type = not protectALL /* * User changed SG from Protect All to VM/Network/Subnet. * Remove Project ID and add Network Id(s) for port listeners.. * or * SG is not protect all and SG type is not modified... Update Member ID(s) */ OsNotificationUtil.updateListener(listener, sg, getMemberIdsFromSG(sg, null)); } } } } private void addPortToProjectListener(SecurityGroup sg, OsNotificationObjectType type) throws VmidcBrokerInvalidEntryException { // Creating member change Notification Listener OsNotificationListener listener = this.notificationListenerFactory .createAndRegisterNotificationListener(sg.getVirtualizationConnector(), type, Arrays.asList(sg.getProjectId()), sg); // Register Member change listener this.sgToListenerMap.put(sg.getId(), listener); } private void addProjectDeletionListener(SecurityGroup sg, OsNotificationObjectType type) throws VmidcBrokerInvalidEntryException { OsNotificationListener listener = this.notificationListenerFactory .createAndRegisterNotificationListener(sg.getVirtualizationConnector(), type, Arrays.asList(sg.getProjectId()), sg); // Register Member change listener this.sgToListenerMap.put(sg.getId(), listener); } private void addMemberListener(SecurityGroup sg, OsNotificationObjectType type, SecurityGroupMemberType memberType) throws VmidcBrokerInvalidEntryException { OsNotificationListener listener = null; if (type == OsNotificationObjectType.PORT && !sg.isProtectAll()) { // Create Notification Listener listener = this.notificationListenerFactory.createAndRegisterNotificationListener( sg.getVirtualizationConnector(), type, getMemberIdsFromSG(sg, null), sg); } else { listener = this.notificationListenerFactory.createAndRegisterNotificationListener( sg.getVirtualizationConnector(), type, getMemberIdsFromSG(sg, memberType), sg); } // Register Member change listener this.sgToListenerMap.put(sg.getId(), listener); } private void removeListener(Long sgId) { for (OsNotificationListener listener : this.sgToListenerMap.get(sgId)) { listener.unRegister(this.sgToVCMap.get(sgId), listener.getObjectType()); } } }
package com.dev.thiago.ambientmonitoring.view.fragment; import android.app.AlertDialog; import android.app.ProgressDialog; import android.content.DialogInterface; import android.widget.ListView; import android.widget.Toast; import com.dev.thiago.ambientmonitoring.R; import com.dev.thiago.ambientmonitoring.model.Room; import com.dev.thiago.ambientmonitoring.service.RoomService; import com.dev.thiago.ambientmonitoring.util.MeasurerUtils; import com.dev.thiago.ambientmonitoring.util.RetrofitUtils; import com.dev.thiago.ambientmonitoring.util.SessionUtils; import com.dev.thiago.ambientmonitoring.view.MainActivity; import com.dev.thiago.ambientmonitoring.view.adapter.RoomsListAdapter; import org.androidannotations.annotations.AfterViews; import org.androidannotations.annotations.Background; import org.androidannotations.annotations.Bean; import org.androidannotations.annotations.EFragment; import org.androidannotations.annotations.ItemClick; import org.androidannotations.annotations.UiThread; import org.androidannotations.annotations.ViewById; import java.io.IOException; import java.util.List; import io.realm.Realm; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; @EFragment(R.layout.fragment_rooms) public class RoomsFragment extends GenericFragment implements DialogInterface.OnClickListener { @ViewById ListView roomsListView; ProgressDialog progressDialog; @Bean RoomsListAdapter adapter; MainActivity activity; Room clickedRoom; @AfterViews void afterViews() { activity = (MainActivity) getActivity(); setTitle("Choose a room to attach"); roomsListView.setAdapter(adapter); requestRooms(); } private void requestRooms() { RoomService service = RetrofitUtils.getRetrofit().create(RoomService.class); String auth = SessionUtils.getAuthHeader(getActivity()); Realm realm = Realm.getInstance(getActivity()); Integer userId = SessionUtils.getLoggedUser(getActivity(), realm).getId(); realm.close(); Call<List<Room>> call = service.getRooms(auth, userId); progressDialog = new ProgressDialog(getActivity()); progressDialog.setMessage("Loading rooms..."); if (adapter == null || adapter.getCount() == 0) { progressDialog.show(); } call.enqueue(new Callback<List<Room>>() { @Override public void onResponse(Call<List<Room>> call, Response<List<Room>> response) { if (response.isSuccess()) { requestRoomsSuccessful(response); } else { requestRoomsFailed(response); } } @Override public void onFailure(Call<List<Room>> call, Throwable t) { } }); } void requestRoomsSuccessful(Response<List<Room>> response) { progressDialog.hide(); Realm realm = Realm.getInstance(getActivity()); realm.beginTransaction(); realm.copyToRealmOrUpdate(response.body()); realm.commitTransaction(); realm.close(); adapter.notifyDataSetChanged(); } void requestRoomsFailed(Response<List<Room>> response) { progressDialog.hide(); } @ItemClick void roomsListView(Room room) { clickedRoom = room; String message; if (room.getIsTracked()) { message = room.getName() + " is already attached to another device. Do you wish to remove the other device and attach to the current one?"; } else { message = "Do you want to start tracking " + room.getName() + "?"; } AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setMessage(message); builder.setNegativeButton("Cancel", this); builder.setPositiveButton("Yes", this); builder.show(); } @Override public void onClick(DialogInterface dialog, int which) { switch (which) { case DialogInterface.BUTTON_POSITIVE: dialog.dismiss(); progressDialog.setMessage("Waiting for server..."); progressDialog.show(); attach(clickedRoom.getIsTracked(), clickedRoom.getId()); case DialogInterface.BUTTON_NEGATIVE: dialog.dismiss(); } } @Background void attach(Boolean detach, Integer roomId) { final RoomService service = RetrofitUtils.getRetrofit().create(RoomService.class); final String auth = SessionUtils.getAuthHeader(getActivity()); Realm realm = Realm.getInstance(getActivity()); final Integer userId = SessionUtils.getLoggedUser(getActivity(), realm).getId(); realm.close(); if (detach) { final Boolean untrackIsSuccess = untrackRoom(service, auth, userId, roomId); if (!untrackIsSuccess) { requestFailed("Could not detach the other device from the room"); return; } } if (trackRoom(service, auth, userId, roomId)) { showDashboardFragment(); } else { requestFailed("Could not attach this device to the room"); } } @UiThread void requestFailed(String message) { progressDialog.dismiss(); if (message != null) Toast.makeText(activity, message, Toast.LENGTH_SHORT).show(); } @UiThread void showDashboardFragment() { MeasurerUtils.setIsAttached(activity, true); MeasurerUtils.setTrackedRoomId(activity, clickedRoom.getId()); progressDialog.dismiss(); activity.showDashboardFragment(false); } private Boolean untrackRoom(RoomService service, String auth, Integer userId, Integer roomId) { Call<Void> call = service.untrackRoom(auth, userId, roomId); try { Response<Void> response = call.execute(); return response.isSuccess(); } catch (IOException e) { e.printStackTrace(); return false; } } private Boolean trackRoom(RoomService service, String auth, Integer userId, Integer roomId) { Call<Void> call = service.trackRoom(auth, userId, roomId); try { Response<Void> response = call.execute(); return response.isSuccess(); } catch (IOException e) { e.printStackTrace(); return false; } } }
package com.github.andlyticsproject; import android.accounts.Account; import android.accounts.AccountAuthenticatorActivity; import android.accounts.AccountManager; import android.app.Dialog; import android.app.ProgressDialog; import android.content.ContentResolver; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.provider.ContactsContract; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.view.Window; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; import com.github.andlyticsproject.admob.AdmobAccountAuthenticator; import com.github.andlyticsproject.admob.AdmobAuthenticationUtilities; import com.github.andlyticsproject.admob.AdmobRequest; public class AdmobAuthenticatorActivity extends AccountAuthenticatorActivity { public static final String PARAM_AUTHTOKEN_TYPE = "authtokenType"; public static final String PARAM_USERNAME = "username"; public static final String PARAM_CONFIRM_CREDENTIALS = "confirmCredentials"; public static final String PARAM_PASSWORD = "password"; private AccountManager mAccountManager; private Thread mAuthThread; private String mAuthtoken; private String mAuthtokenType; private Boolean mConfirmCredentials = false; private final Handler mHandler = new Handler(); private TextView mMessageView; private String mPassword; private String mUsername; private EditText mPasswordEdit; private EditText mUsernameEdit; private boolean mRequestNewAccount = false; private View mOkButton; @Override public void onCreate(Bundle neato) { super.onCreate(neato); mAccountManager = AccountManager.get(this); final Intent intent = getIntent(); mUsername = intent.getStringExtra(PARAM_USERNAME); mAuthtokenType = intent.getStringExtra(PARAM_AUTHTOKEN_TYPE); mRequestNewAccount = mUsername == null; mConfirmCredentials = intent.getBooleanExtra(PARAM_CONFIRM_CREDENTIALS, false); initLayout(); } private void initLayout() { requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.admob_login); mMessageView = (TextView) findViewById(R.id.admob_login_message); mUsernameEdit = (EditText) findViewById(R.id.admob_login_username_edit); mPasswordEdit = (EditText) findViewById(R.id.admob_login_password_edit); if (mUsername == null) { mUsername = DeveloperAccountManager.getInstance(this).getSelectedDeveloperAccount() .getName(); } mUsernameEdit.setText(mUsername); if (mUsername != null) { mPasswordEdit.requestFocusFromTouch(); } if (getMessage() != null) { mMessageView.setText(getMessage()); } mOkButton = findViewById(R.id.admob_login_ok_button); mOkButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { handleLogin(v); } }); } private CharSequence getMessage() { if (TextUtils.isEmpty(mUsername)) { CharSequence msg = getString(R.string.admob_data_prompt); return msg; } if (TextUtils.isEmpty(mPassword)) { return getString(R.string.admob_password_prompt); } return null; } @Override protected Dialog onCreateDialog(int id) { ProgressDialog dialog = new ProgressDialog(this); dialog.setMessage(getString(R.string.admob_authenticating)); dialog.setIndeterminate(true); dialog.setCancelable(true); dialog.setOnCancelListener(new DialogInterface.OnCancelListener() { @Override public void onCancel(DialogInterface dialog) { if (mAuthThread != null) { mAuthThread.interrupt(); finish(); } } }); return dialog; } public void handleLogin(View v) { if (mRequestNewAccount) { mUsername = mUsernameEdit.getText().toString(); } mPassword = mPasswordEdit.getText().toString(); if (TextUtils.isEmpty(mUsername) || TextUtils.isEmpty(mPassword)) { mMessageView.setText(getMessage()); } else { showProgress(); mAuthThread = AdmobAuthenticationUtilities.attemptAuth(mUsername, mPassword, mHandler, AdmobAuthenticatorActivity.this); } } private void finishConfirmCredentials(boolean result) { Account account = new Account(mUsername, AdmobAccountAuthenticator.ACCOUNT_TYPE_ADMOB); mAccountManager.setPassword(account, mPassword); Intent intent = new Intent(); intent.putExtra(AccountManager.KEY_BOOLEAN_RESULT, result); setAccountAuthenticatorResult(intent.getExtras()); setResult(RESULT_OK, intent); finish(); } private void finishLogin() { Account account = new Account(mUsername, AdmobAccountAuthenticator.ACCOUNT_TYPE_ADMOB); if (mRequestNewAccount) { mAccountManager.addAccountExplicitly(account, mPassword, null); ContentResolver.setSyncAutomatically(account, ContactsContract.AUTHORITY, true); } else { mAccountManager.setPassword(account, mPassword); } Intent intent = new Intent(); mAuthtoken = mPassword; intent.putExtra(AccountManager.KEY_ACCOUNT_NAME, mUsername); intent.putExtra(AccountManager.KEY_ACCOUNT_TYPE, AdmobAccountAuthenticator.ACCOUNT_TYPE_ADMOB); if (mAuthtokenType != null && mAuthtokenType.equals(AdmobAccountAuthenticator.AUTHTOKEN_TYPE_ADMOB)) { intent.putExtra(AccountManager.KEY_AUTHTOKEN, mAuthtoken); } setAccountAuthenticatorResult(intent.getExtras()); setResult(RESULT_OK, intent); finish(); } @SuppressWarnings("deprecation") private void showProgress() { showDialog(0); } @SuppressWarnings("deprecation") private void hideProgress() { try { dismissDialog(0); } catch (IllegalArgumentException e) { Log.e("AdMobAuthenticator", "dismissDialog without open", e); } } public void onAuthenticationResult(String result) { hideProgress(); if ("true".equalsIgnoreCase(result)) { if (!mConfirmCredentials) { finishLogin(); } else { finishConfirmCredentials(true); } } else { if (AdmobRequest.ERROR_NETWORK_ERROR.equals(result)) { Toast.makeText(AdmobAuthenticatorActivity.this, getString(R.string.network_error), Toast.LENGTH_SHORT).show(); } if (AdmobRequest.ERROR_RATE_LIMIT_EXCEEDED.equals(result)) { Toast.makeText(AdmobAuthenticatorActivity.this, getString(R.string.admob_ratelimit_error), Toast.LENGTH_LONG).show(); } if (AdmobRequest.ERROR_REQUEST_INVALID.equals(result)) { Toast.makeText(AdmobAuthenticatorActivity.this, getString(R.string.admob_linked_accounts_error), Toast.LENGTH_LONG).show(); } if (mRequestNewAccount) { mMessageView.setText(getString(R.string.admob_auth_failed)); } else { mMessageView.setText(getString(R.string.admob_wrong_password)); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.ipojo.test.composite.provides; import java.util.Properties; import org.apache.felix.ipojo.ComponentFactory; import org.apache.felix.ipojo.ComponentInstance; import org.apache.felix.ipojo.Factory; import org.apache.felix.ipojo.architecture.Architecture; import org.apache.felix.ipojo.junit4osgi.OSGiTestCase; import org.apache.felix.ipojo.test.composite.component.TotoProvider; import org.apache.felix.ipojo.test.composite.service.Tota; import org.apache.felix.ipojo.test.composite.util.Utils; import org.osgi.framework.ServiceReference; public class TestComp2 extends OSGiTestCase { private ComponentFactory tataFactory; private ComponentFactory totoFactory; private ComponentInstance totoProv, totoProv2; private ComponentInstance under; private ComponentFactory tataFactory2; public void setUp() { tataFactory = (ComponentFactory) Utils.getFactoryByName(getContext(), "tata"); totoFactory = (ComponentFactory) Utils.getFactoryByName(getContext(), "toto"); tataFactory2 = (ComponentFactory) Utils.getFactoryByName(getContext(), "comp-6"); tataFactory2.stop(); tataFactory.stop(); Properties props = new Properties(); props.put("instance.name","toto provider"); try { totoProv = totoFactory.createComponentInstance(props); } catch(Exception e) { e.printStackTrace(); } Properties props3 = new Properties(); props3.put("instance.name","toto provider 2"); try { totoProv2 = totoFactory.createComponentInstance(props3); } catch(Exception e) { e.printStackTrace(); } totoProv.stop(); totoProv2.stop(); Factory factory = Utils.getFactoryByName(getContext(), "comp-2"); Properties props2 = new Properties(); props2.put("instance.name","ff"); try { under = factory.createComponentInstance(props2); } catch(Exception e) { e.printStackTrace(); } } public void tearDown() { tataFactory.start(); totoProv.dispose(); totoProv = null; totoProv2.dispose(); totoProv2 = null; tataFactory2.start(); // Reset counters TotoProvider.toto = 0; TotoProvider.toto_2 = 0; TotoProvider.toto_3 = 0; TotoProvider.toto_4 = 0; TotoProvider.toto1 = 0; } public void testSimple() { // Neither factory nor instance assertTrue("Assert under state - 1", under.getState() == ComponentInstance.INVALID); assertNull("Assert no tota service - 1", getContext().getServiceReference(Tota.class.getName())); // Start the importer totoProv.start(); assertTrue("Assert under state - 2 ("+under.getState()+")", under.getState() == ComponentInstance.INVALID); assertNull("Assert no tota service - 2", getContext().getServiceReference(Tota.class.getName())); // Start the factory tataFactory.start(); assertTrue("Assert under state - 3", under.getState() == ComponentInstance.VALID); assertNotNull("Assert tota service - 3", getContext().getServiceReference(Tota.class.getName())); ServiceReference ref = getContext().getServiceReference(Tota.class.getName()); Tota tota = (Tota) getContext().getService(ref); invokeAll(tota); // Check toto Properties props = tota.getProps(); Integer toto = (Integer) props.get("toto"); Integer toto_2 = (Integer) props.get("toto_2"); Integer toto_3 = (Integer) props.get("toto_3"); Integer toto_4 = (Integer) props.get("toto_4"); Integer toto_1 = (Integer) props.get("toto1"); assertEquals("Assert toto - 3 ("+toto.intValue()+")", toto.intValue(), 1); assertEquals("Assert toto_2 - 3", toto_2.intValue(), 1); assertEquals("Assert toto_3 - 3", toto_3.intValue(), 1); assertEquals("Assert toto_4 - 3", toto_4.intValue(), 0); assertEquals("Assert toto1 - 3 (" + toto_1.intValue() + ")", toto_1.intValue(), 1); //Check tata props = tota.getPropsTata(); Integer tata = (Integer) props.get("tata"); assertEquals("Assert tata - 3", tata.intValue(), 1); getContext().ungetService(ref); tota = null; // Start a second import totoProv2.start(); assertTrue("Assert under state - 4", under.getState() == ComponentInstance.VALID); assertNotNull("Assert tota service - 4", getContext().getServiceReference(Tota.class.getName())); ref = getContext().getServiceReference(Tota.class.getName()); tota = (Tota) getContext().getService(ref); invokeAll(tota); // Check toto props = tota.getProps(); toto = (Integer) props.get("toto"); toto_2 = (Integer) props.get("toto_2"); toto_3 = (Integer) props.get("toto_3"); toto_4 = (Integer) props.get("toto_4"); toto_1 = (Integer) props.get("toto1"); assertEquals("Assert toto - 4 ("+toto.intValue()+")", toto.intValue(), 2); assertEquals("Assert toto_2 - 4 ("+toto_2.intValue()+")", toto_2.intValue(), 2); assertEquals("Assert toto_3 - 4", toto_3.intValue(), 2); assertEquals("Assert toto_4 - 4", toto_4.intValue(), 0); assertEquals("Assert toto1 - 4", toto_1.intValue(), 3); //Check tata props = tota.getPropsTata(); tata = (Integer) props.get("tata"); assertEquals("Assert tata - 4", tata.intValue(), 2); getContext().ungetService(ref); tota = null; tataFactory.stop(); assertTrue("Assert under state - 5", under.getState() == ComponentInstance.INVALID); assertNull("Assert no tota service - 5", getContext().getServiceReference(Tota.class.getName())); totoProv2.stop(); tataFactory.start(); assertTrue("Assert under state - 6", under.getState() == ComponentInstance.VALID); assertNotNull("Assert tota service - 6", getContext().getServiceReference(Tota.class.getName())); ref = getContext().getServiceReference(Tota.class.getName()); tota = (Tota) getContext().getService(ref); invokeAll(tota); // Check toto props = tota.getProps(); toto = (Integer) props.get("toto"); toto_2 = (Integer) props.get("toto_2"); toto_3 = (Integer) props.get("toto_3"); toto_4 = (Integer) props.get("toto_4"); toto_1 = (Integer) props.get("toto1"); assertEquals("Assert toto - 6 ("+toto.intValue()+")", toto.intValue(), 3); assertEquals("Assert toto_2 - 6 ("+toto_2.intValue()+")", toto_2.intValue(), 3); assertEquals("Assert toto_3 - 6", toto_3.intValue(), 3); assertEquals("Assert toto_4 - 6", toto_4.intValue(), 0); assertEquals("Assert toto1 - 6", toto_1.intValue(), 4); //Check tata props = tota.getPropsTata(); tata = (Integer) props.get("tata"); assertEquals("Assert tata - 6", tata.intValue(), 1); getContext().ungetService(ref); tota = null; // Is arch exposed assertNotNull("Test arch", Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "ff")); totoProv.stop(); assertTrue("Assert under state - 7", under.getState() == ComponentInstance.INVALID); assertNotNull("Test arch-2", Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "ff")); assertNull("Assert no tota service - 7", getContext().getServiceReference(Tota.class.getName())); under.dispose(); under = null; } private void invoke(Tota tota) { tota.tata(); assertEquals("Assert invoke tataint", tota.tataInt(2), 2); assertEquals("Assert invoke tataLong", tota.tataLong(2), 2); assertEquals("Assert invoke tataDouble", tota.tataDouble(2), 2); assertEquals("Assert invoke tataChar", tota.tataChar('a'), 'a'); assertTrue("Assert invoke tataBoolean", tota.tataBoolean(true)); assertEquals("Assert invoke tataByte", tota.tataByte((byte)2), 2); assertEquals("Assert invoke tataShort", tota.tataShort((short)5), 5); assertEquals("Assert invoke tataFloat", tota.tataFloat(5), 5); } private void invokeToto(Tota tota) { tota.toto(); assertEquals("Assert toto", tota.toto("foo"), "foo"); tota.toto(1,2); tota.toto1("foo2"); } private void invokeAll(Tota tota) { invoke(tota); invokeToto(tota); } }
/* * Copyright 2010-2014 Ning, Inc. * Copyright 2014-2020 Groupon, Inc * Copyright 2020-2020 Equinix, Inc * Copyright 2014-2021 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.plugin.notification.setup; import java.io.IOException; import java.sql.SQLException; import java.util.UUID; import org.apache.commons.mail.EmailException; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.killbill.billing.account.api.Account; import org.killbill.billing.account.api.AccountApiException; import org.killbill.billing.account.api.AccountEmail; import org.killbill.billing.catalog.api.BillingActionPolicy; import org.killbill.billing.entitlement.api.Entitlement; import org.killbill.billing.entitlement.api.EntitlementSpecifier; import org.killbill.billing.entitlement.api.Subscription; import org.killbill.billing.entitlement.api.SubscriptionApiException; import org.killbill.billing.entitlement.api.SubscriptionEventType; import org.killbill.billing.invoice.api.DryRunArguments; import org.killbill.billing.invoice.api.DryRunType; import org.killbill.billing.invoice.api.Invoice; import org.killbill.billing.invoice.api.InvoiceApiException; import org.killbill.billing.invoice.api.InvoicePayment; import org.killbill.billing.notification.plugin.api.ExtBusEvent; import org.killbill.billing.notification.plugin.api.ExtBusEventType; import org.killbill.billing.notification.plugin.api.NotificationPluginApiRetryException; import org.killbill.billing.osgi.libs.killbill.OSGIConfigPropertiesService; import org.killbill.billing.osgi.libs.killbill.OSGIKillbillAPI; import org.killbill.billing.osgi.libs.killbill.OSGIKillbillClock; import org.killbill.billing.osgi.libs.killbill.OSGIKillbillDataSource; import org.killbill.billing.osgi.libs.killbill.OSGIKillbillEventDispatcher; import org.killbill.billing.payment.api.Payment; import org.killbill.billing.payment.api.PaymentApiException; import org.killbill.billing.payment.api.PaymentTransaction; import org.killbill.billing.payment.api.PluginProperty; import org.killbill.billing.payment.api.TransactionStatus; import org.killbill.billing.payment.api.TransactionType; import org.killbill.billing.plugin.notification.api.InvoiceFormatterFactory; import org.killbill.billing.plugin.notification.dao.ConfigurationDao; import org.killbill.billing.plugin.notification.dao.gen.tables.pojos.EmailNotificationsConfiguration; import org.killbill.billing.plugin.notification.email.EmailContent; import org.killbill.billing.plugin.notification.email.EmailSender; import org.killbill.billing.plugin.notification.exception.EmailNotificationException; import org.killbill.billing.plugin.notification.generator.ResourceBundleFactory; import org.killbill.billing.plugin.notification.generator.TemplateRenderer; import org.killbill.billing.plugin.notification.templates.MustacheTemplateEngine; import org.killbill.billing.tenant.api.TenantApiException; import org.killbill.billing.util.callcontext.TenantContext; import org.osgi.util.tracker.ServiceTracker; import org.skife.config.TimeSpan; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.samskivert.mustache.MustacheException; public class EmailNotificationListener implements OSGIKillbillEventDispatcher.OSGIKillbillEventHandler { private static final Logger logger = LoggerFactory.getLogger(EmailNotificationListener.class); private static final String INVOICE_DRY_RUN_TIME_PROPERTY = "org.killbill.invoice.dryRunNotificationSchedule"; private static final NullDryRunArguments NULL_DRY_RUN_ARGUMENTS = new NullDryRunArguments(); private final OSGIKillbillAPI osgiKillbillAPI; private final TemplateRenderer templateRenderer; private final OSGIConfigPropertiesService configProperties; private final EmailSender emailSender; private final OSGIKillbillClock clock; private final ConfigurationDao dao; private final EmailNotificationConfigurationHandler emailNotificationConfigurationHandler; public static final ImmutableList<ExtBusEventType> EVENTS_TO_CONSIDER = new ImmutableList.Builder() .add(ExtBusEventType.INVOICE_NOTIFICATION) .add(ExtBusEventType.INVOICE_CREATION) .add(ExtBusEventType.INVOICE_PAYMENT_SUCCESS) .add(ExtBusEventType.INVOICE_PAYMENT_FAILED) .add(ExtBusEventType.SUBSCRIPTION_CANCEL) .build(); public EmailNotificationListener(final OSGIKillbillClock clock, final OSGIKillbillAPI killbillAPI, final OSGIConfigPropertiesService configProperties, OSGIKillbillDataSource dataSource, EmailNotificationConfigurationHandler emailNotificationConfigurationHandler, final ServiceTracker<InvoiceFormatterFactory, InvoiceFormatterFactory> invoiceFormatterTracker) throws SQLException { this.osgiKillbillAPI = killbillAPI; this.configProperties = configProperties; this.clock = clock; this.emailSender = new EmailSender(configProperties); this.templateRenderer = new TemplateRenderer(new MustacheTemplateEngine(), new ResourceBundleFactory(killbillAPI.getTenantUserApi()), killbillAPI.getTenantUserApi()); this.templateRenderer.setInvoiceFormatterTracker(invoiceFormatterTracker); this.dao = new ConfigurationDao(dataSource.getDataSource()); this.emailNotificationConfigurationHandler = emailNotificationConfigurationHandler; } @Override public void handleKillbillEvent(final ExtBusEvent killbillEvent) { if (!EVENTS_TO_CONSIDER.contains(killbillEvent.getEventType())) { return; } if(!isEventTypeAllowed(killbillEvent.getAccountId(),killbillEvent.getTenantId(),killbillEvent.getEventType())) { return; } // TODO see https://github.com/killbill/killbill-platform/issues/5 final ClassLoader previousClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); try { final Account account = osgiKillbillAPI.getAccountUserApi().getAccountById(killbillEvent.getAccountId(), new EmailNotificationContext(killbillEvent.getAccountId(), killbillEvent.getTenantId())); final String to = account.getEmail(); if (to == null) { logger.info("Account {} does not have an email address configured, skip...", account.getId()); return; } final EmailNotificationContext context = new EmailNotificationContext(killbillEvent.getAccountId(), killbillEvent.getTenantId()); switch (killbillEvent.getEventType()) { case INVOICE_NOTIFICATION: sendEmailForUpComingInvoice(account, killbillEvent, context); break; case INVOICE_PAYMENT_SUCCESS: case INVOICE_PAYMENT_FAILED: sendEmailForPayment(account, killbillEvent, context); break; case SUBSCRIPTION_CANCEL: sendEmailForCancelledSubscription(account, killbillEvent, context); break; case INVOICE_CREATION: sendEmailForInvoiceCreation(account, killbillEvent, context); break; default: break; } logger.info("Received event {} for object type = {}, id = {}", killbillEvent.getEventType(), killbillEvent.getObjectType(), killbillEvent.getObjectId()); } catch (final EmailNotificationException e) { logger.warn(e.getMessage(), e); } catch (final AccountApiException e) { logger.warn("Unable to find account: {}", killbillEvent.getAccountId(), e); } catch (InvoiceApiException e) { logger.warn("Fail to retrieve invoice for account {}", killbillEvent.getAccountId(), e); } catch (SubscriptionApiException e) { logger.warn("Fail to retrieve subscription for account {}", killbillEvent.getAccountId(), e); } catch (PaymentApiException e) { logger.warn("Fail to send email for account {}", killbillEvent.getAccountId(), e); } catch (EmailException e) { logger.warn("Fail to send email for account {}", killbillEvent.getAccountId(), e); // Attempt a retry throw new NotificationPluginApiRetryException(e); } catch (IOException e) { logger.warn("Fail to send email for account {}", killbillEvent.getAccountId(), e); } catch (TenantApiException e) { logger.warn("Fail to send email for account {}", killbillEvent.getAccountId(), e); } catch (IllegalArgumentException e) { logger.warn(e.getMessage(), e); } catch (MustacheException e) { logger.warn(e.getMessage(), e); } finally { Thread.currentThread().setContextClassLoader(previousClassLoader); } } private boolean isEventTypeAllowed(final UUID kbAccountId, final UUID kbTenantId, final ExtBusEventType eventType) { final EmailNotificationsConfiguration registeredEventType; final EmailNotificationConfiguration configuration = emailNotificationConfigurationHandler.getConfigurable(kbTenantId); if (configuration.getEventTypes().contains(eventType.toString())) { return true; } try { registeredEventType = this.dao.getEventTypePerAccount(kbAccountId,kbTenantId,eventType); } catch (SQLException e) { logger.error("Error retrieving email notification event registry", e); return false; } if (registeredEventType == null) { logger.warn("Registration of event {} is not available for account {}", eventType, kbAccountId); return false; } return true; } private void sendEmailForUpComingInvoice(final Account account, final ExtBusEvent killbillEvent, final TenantContext context) throws IOException, InvoiceApiException, EmailException, TenantApiException, EmailNotificationException { Preconditions.checkArgument(killbillEvent.getEventType() == ExtBusEventType.INVOICE_NOTIFICATION, String.format("Unexpected event %s", killbillEvent.getEventType())); final String dryRunTimePropValue = configProperties.getString(INVOICE_DRY_RUN_TIME_PROPERTY); Preconditions.checkArgument(dryRunTimePropValue != null, String.format("Cannot find property %s", INVOICE_DRY_RUN_TIME_PROPERTY)); final TimeSpan span = new TimeSpan(dryRunTimePropValue); final DateTime now = clock.getClock().getUTCNow(); final DateTime targetDateTime = now.plus(span.getMillis()); final PluginCallContext callContext = new PluginCallContext(EmailNotificationActivator.PLUGIN_NAME, now, context.getAccountId(), context.getTenantId()); final Invoice invoice = osgiKillbillAPI.getInvoiceUserApi().triggerDryRunInvoiceGeneration(account.getId(), new LocalDate(targetDateTime, account.getTimeZone()), NULL_DRY_RUN_ARGUMENTS, callContext); if (invoice != null) { final EmailContent emailContent = templateRenderer.generateEmailForUpComingInvoice(account, invoice, context); sendEmail(account, emailContent, context); } } private void sendEmailForCancelledSubscription(final Account account, final ExtBusEvent killbillEvent, final TenantContext context) throws SubscriptionApiException, IOException, EmailException, TenantApiException, EmailNotificationException { Preconditions.checkArgument(killbillEvent.getEventType() == ExtBusEventType.SUBSCRIPTION_CANCEL, String.format("Unexpected event %s", killbillEvent.getEventType())); final UUID subscriptionId = killbillEvent.getObjectId(); final Subscription subscription = osgiKillbillAPI.getSubscriptionApi().getSubscriptionForEntitlementId(subscriptionId, context); if (subscription != null) { final EmailContent emailContent = subscription.getState() == Entitlement.EntitlementState.CANCELLED ? templateRenderer.generateEmailForSubscriptionCancellationEffective(account, subscription, context) : templateRenderer.generateEmailForSubscriptionCancellationRequested(account, subscription, context); sendEmail(account, emailContent, context); } } private void sendEmailForPayment(final Account account, final ExtBusEvent killbillEvent, final TenantContext context) throws InvoiceApiException, IOException, EmailException, PaymentApiException, TenantApiException, EmailNotificationException { final UUID invoiceId = killbillEvent.getObjectId(); if (invoiceId == null) { return; } Preconditions.checkArgument(killbillEvent.getEventType() == ExtBusEventType.INVOICE_PAYMENT_FAILED || killbillEvent.getEventType() == ExtBusEventType.INVOICE_PAYMENT_SUCCESS, String.format("Unexpected event %s", killbillEvent.getEventType())); final Invoice invoice = osgiKillbillAPI.getInvoiceUserApi().getInvoice(invoiceId, context); if (invoice.getNumberOfPayments() == 0) { // Aborted payment? Maybe no default payment method... return; } final InvoicePayment invoicePayment = invoice.getPayments().get(invoice.getNumberOfPayments() - 1); final Payment payment = osgiKillbillAPI.getPaymentApi().getPayment(invoicePayment.getPaymentId(), false, false, ImmutableList.<PluginProperty>of(), context); final PaymentTransaction lastTransaction = payment.getTransactions().get(payment.getTransactions().size() - 1); if (lastTransaction.getTransactionType() != TransactionType.PURCHASE && lastTransaction.getTransactionType() != TransactionType.REFUND) { // Ignore for now, but this is easy to add... return; } EmailContent emailContent = null; if (lastTransaction.getTransactionType() == TransactionType.REFUND && lastTransaction.getTransactionStatus() == TransactionStatus.SUCCESS) { emailContent = templateRenderer.generateEmailForPaymentRefund(account, lastTransaction, context); } else { if (lastTransaction.getTransactionType() == TransactionType.PURCHASE && lastTransaction.getTransactionStatus() == TransactionStatus.SUCCESS) { emailContent = templateRenderer.generateEmailForSuccessfulPayment(account, invoice, context); } else if (lastTransaction.getTransactionType() == TransactionType.PURCHASE && lastTransaction.getTransactionStatus() == TransactionStatus.PAYMENT_FAILURE) { emailContent = templateRenderer.generateEmailForFailedPayment(account, invoice, context); } } if (emailContent != null) { sendEmail(account, emailContent, context); } } private void sendEmailForInvoiceCreation(final Account account, final ExtBusEvent killbillEvent, final TenantContext context) throws InvoiceApiException, IOException, TenantApiException, EmailException, EmailNotificationException { Preconditions.checkArgument(killbillEvent.getEventType() == ExtBusEventType.INVOICE_CREATION, String.format("Unexpected event %s", killbillEvent.getEventType())); final Invoice invoice = osgiKillbillAPI.getInvoiceUserApi().getInvoice(killbillEvent.getObjectId(), context); if (invoice != null) { final EmailContent emailContent = templateRenderer.generateEmailForInvoiceCreation(account, invoice, context); sendEmail(account, emailContent, context); } else { logger.warn("Fail to send email for account {}. Invoice not found for object {}", killbillEvent.getAccountId().toString(), killbillEvent.getObjectId().toString()); } } private void sendEmail(final Account account, final EmailContent emailContent, final TenantContext context) throws IOException, EmailException, EmailNotificationException { final Iterable<String> cc = Iterables.transform(osgiKillbillAPI.getAccountUserApi().getEmails(account.getId(), context), new Function<AccountEmail, String>() { @Override public String apply(final AccountEmail input) { return input == null ? null : input.getEmail(); } }); final EmailNotificationConfiguration emailNotificationConfiguration = getConfiguration(context); if (emailNotificationConfiguration.sendHTMLEmail()) { emailSender.sendHTMLEmail(ImmutableList.of(account.getEmail()), ImmutableList.copyOf(cc), emailContent.getSubject(), emailContent.getBody(), emailNotificationConfiguration.getSmtp()); } else { emailSender.sendPlainTextEmail(ImmutableList.of(account.getEmail()), ImmutableList.copyOf(cc), emailContent.getSubject(), emailContent.getBody(), emailNotificationConfiguration.getSmtp()); } } private static final class EmailNotificationContext implements TenantContext { private final UUID accountId; private final UUID tenantId; private EmailNotificationContext(final UUID accountId, final UUID tenantId) { this.accountId = accountId; this.tenantId = tenantId; } @Override public UUID getAccountId() { return accountId; } @Override public UUID getTenantId() { return tenantId; } } private final static class NullDryRunArguments implements DryRunArguments { @Override public DryRunType getDryRunType() { return null; } @Override public EntitlementSpecifier getEntitlementSpecifier() { return null; } @Override public SubscriptionEventType getAction() { return null; } @Override public UUID getSubscriptionId() { return null; } @Override public LocalDate getEffectiveDate() { return null; } @Override public UUID getBundleId() { return null; } @Override public BillingActionPolicy getBillingActionPolicy() { return null; } } private EmailNotificationConfiguration getConfiguration(final TenantContext context){ return emailNotificationConfigurationHandler.getConfigurable(context.getTenantId()); } }
package jdepend.swingui; import jdepend.framework.*; import javax.swing.*; import javax.swing.border.BevelBorder; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.InputEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.IOException; import java.util.*; import java.util.List; /** * The <code>JDepend</code> class analyzes directories of Java class files, * generates metrics for each Java package, and reports the metrics in a Swing * tree. * * @author <b>Mike Clark</b> * @author Clarkware Consulting, Inc. */ public class JDepend implements ParserListener { private jdepend.framework.JDepend analyzer; private JFrame frame; private StatusPanel statusPanel; private JTextField statusField; private JProgressBar progressBar; private DependTree afferentTree; private DependTree efferentTree; private Hashtable<String, String> resourceStrings; private Hashtable<String, Action> actions; private static Font BOLD_FONT = new Font("dialog", Font.BOLD, 12); /** * Constructs a <code>JDepend</code> instance. */ public JDepend() { analyzer = new jdepend.framework.JDepend(); analyzer.addParseListener(this); // // Force the cross platform L&F. // try { UIManager.setLookAndFeel(UIManager .getCrossPlatformLookAndFeelClassName()); //UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { e.printStackTrace(); } // // Install the resource string table. // resourceStrings = new Hashtable<String, String>(); resourceStrings.put("menubar", "File"); resourceStrings.put("File", "About Exit"); // // Install the action table. // actions = new Hashtable<String, Action>(); actions.put("About", new AboutAction()); actions.put("Exit", new ExitAction()); } /** * Adds the specified directory name to the collection of directories to be * analyzed. * * @param name Directory name. * @throws IOException If the directory does not exist. */ public void addDirectory(String name) throws IOException { analyzer.addDirectory(name); } /** * Sets the package filter. * * @param filter Package filter. */ public void setFilter(PackageFilter filter) { analyzer.setFilter(filter); } /** * Sets the comma-separated list of components. */ public void setComponents(String components) { analyzer.setComponents(components); } /** * Analyzes the registered directories, generates metrics for each Java * package, and reports the metrics in a graphical format. */ public void analyze() { display(); startProgressMonitor(analyzer.countClasses()); List<JavaPackage> packages = new ArrayList<JavaPackage>(analyzer.analyze()); Collections.sort(packages, new PackageComparator(PackageComparator.byName())); stopProgressMonitor(); updateTree(packages); } /** * Called whenever a Java source file is parsed into the specified * <code>JavaClass</code> instance. * * @param jClass Parsed Java class. */ public void onParsedJavaClass(final JavaClass jClass) { SwingUtilities.invokeLater(new Runnable() { public void run() { getProgressBar().setValue(getProgressBar().getValue() + 1); } }); } private void display() { frame = createUI(); frame.setVisible(true); } private void updateTree(List<JavaPackage> packages) { JavaPackage jPackage = new JavaPackage("root"); jPackage.setAfferents(packages); jPackage.setEfferents(packages); AfferentNode ah = new AfferentNode(null, jPackage); getAfferentTree().setModel(new DependTreeModel(ah)); EfferentNode eh = new EfferentNode(null, jPackage); getEfferentTree().setModel(new DependTreeModel(eh)); } private void startProgressMonitor(final int maxValue) { SwingUtilities.invokeLater(new Runnable() { public void run() { getProgressBar().setMinimum(0); getProgressBar().setMaximum(maxValue); getStatusPanel().setStatusComponent(getProgressBar()); } }); } private void stopProgressMonitor() { SwingUtilities.invokeLater(new Runnable() { public void run() { getStatusPanel().setStatusComponent(getStatusField()); int classCount = analyzer.countClasses(); int packageCount = analyzer.countPackages(); showStatusMessage("Analyzed " + packageCount + " packages (" + classCount + " classes)."); } }); } private JFrame createUI() { JFrame frame = createFrame("JDepend"); JMenuBar menuBar = createMenubar(); frame.setJMenuBar(menuBar); JPanel treePanel = createTreePanel(); StatusPanel statusPanel = getStatusPanel(); frame.getContentPane().add("Center", treePanel); frame.getContentPane().add("South", statusPanel); frame.pack(); Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); int width = 700; int height = 500; int x = (screenSize.width - width) / 2; int y = (screenSize.height - height) / 2; frame.setBounds(x, y, width, height); frame.setSize(width, height); return frame; } private JFrame createFrame(String title) { JFrame frame = new JFrame(title); frame.getContentPane().setLayout(new BorderLayout()); frame.setBackground(SystemColor.control); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { new ExitAction().actionPerformed(null); } }); return frame; } private JPanel createTreePanel() { JPanel panel = new JPanel(); panel.setLayout(new GridLayout(2, 1)); panel.add(getEfferentTree()); panel.add(getAfferentTree()); /* * panel.setLayout(new GridLayout(1,1)); JSplitPane splitPane = new * JSplitPane(JSplitPane.VERTICAL_SPLIT); * splitPane.setOneTouchExpandable(true); * splitPane.setTopComponent(getEfferentTree()); * splitPane.setBottomComponent(getAfferentTree()); * panel.add(splitPane); */ return panel; } private StatusPanel createStatusPanel() { StatusPanel panel = new StatusPanel(); panel.setStatusComponent(getStatusField()); return panel; } private JProgressBar createProgressBar() { JProgressBar bar = new JProgressBar(); bar.setStringPainted(true); return bar; } private JTextField createStatusField() { JTextField statusField = new JTextField(); statusField.setFont(BOLD_FONT); statusField.setEditable(false); statusField.setForeground(Color.black); statusField.setBorder(BorderFactory .createBevelBorder(BevelBorder.LOWERED)); Insets insets = new Insets(5, 5, 5, 5); statusField.setMargin(insets); return statusField; } private JMenuBar createMenubar() { JMenuBar menuBar = new JMenuBar(); String[] menuKeys = tokenize(resourceStrings.get("menubar")); for (String menuKey : menuKeys) { JMenu m = createMenu(menuKey); if (m != null) { menuBar.add(m); } } return menuBar; } private JMenu createMenu(String key) { String[] itemKeys = tokenize(resourceStrings.get(key)); JMenu menu = new JMenu(key); for (String itemKey : itemKeys) { if (itemKey.equals("-")) { menu.addSeparator(); } else { JMenuItem mi = createMenuItem(itemKey); menu.add(mi); } } char mnemonic = key.charAt(0); menu.setMnemonic(mnemonic); return menu; } private JMenuItem createMenuItem(String key) { JMenuItem mi = new JMenuItem(key); char mnemonic = key.charAt(0); mi.setMnemonic(mnemonic); char accelerator = key.charAt(0); mi.setAccelerator(KeyStroke.getKeyStroke(accelerator, InputEvent.CTRL_MASK)); mi.setActionCommand(key); Action a = getActionForCommand(key); if (a != null) { mi.addActionListener(a); mi.setEnabled(a.isEnabled()); } else { mi.setEnabled(false); } return mi; } private void showStatusMessage(final String message) { getStatusField().setFont(BOLD_FONT); getStatusField().setForeground(Color.black); getStatusField().setText(" " + message); } private void showStatusError(final String message) { getStatusField().setFont(BOLD_FONT); getStatusField().setForeground(Color.red); getStatusField().setText(" " + message); } private DependTree getAfferentTree() { if (afferentTree == null) { afferentTree = new DependTree(); afferentTree.addTreeSelectionListener(new TreeListener()); } return afferentTree; } private DependTree getEfferentTree() { if (efferentTree == null) { efferentTree = new DependTree(); efferentTree.addTreeSelectionListener(new TreeListener()); } return efferentTree; } private StatusPanel getStatusPanel() { if (statusPanel == null) { statusPanel = createStatusPanel(); } return statusPanel; } private JProgressBar getProgressBar() { if (progressBar == null) { progressBar = createProgressBar(); } return progressBar; } private JTextField getStatusField() { if (statusField == null) { statusField = createStatusField(); } return statusField; } private Action getActionForCommand(String command) { return actions.get(command); } /* * Parses the specified string into an array of strings on whitespace * boundaries. @param input String to tokenize. @return Strings. */ private String[] tokenize(String input) { Vector<String> v = new Vector<String>(); StringTokenizer t = new StringTokenizer(input); while (t.hasMoreTokens()) { v.addElement(t.nextToken()); } String cmd[] = new String[v.size()]; for (int i = 0; i < cmd.length; i++) { cmd[i] = v.elementAt(i); } return cmd; } private void postStatusMessage(final String message) { SwingUtilities.invokeLater(new Runnable() { public void run() { showStatusMessage(message); } }); } private void postStatusError(final String message) { SwingUtilities.invokeLater(new Runnable() { public void run() { showStatusError(message); } }); } // // Tree selection handler. // private class TreeListener implements TreeSelectionListener { /** * Constructs a <code>TreeListener</code> instance. */ TreeListener() { } /** * Callback method triggered whenever the value of the tree selection * changes. * * @param te Event that characterizes the change. */ public void valueChanged(TreeSelectionEvent te) { TreePath path = te.getNewLeadSelectionPath(); if (path != null) { PackageNode node = (PackageNode) path.getLastPathComponent(); showStatusMessage(node.toMetricsString()); } } } // // About action handler. // private class AboutAction extends AbstractAction { /** * Constructs an <code>AboutAction</code> instance. */ AboutAction() { super("About"); } /** * Handles the action. */ public void actionPerformed(ActionEvent e) { AboutDialog d = new AboutDialog(frame); d.setModal(true); d.setLocation(300, 300); d.show(); } } // // Exit action handler. // private class ExitAction extends AbstractAction { /** * Constructs an <code>ExitAction</code> instance. */ ExitAction() { super("Exit"); } /** * Handles the action. */ public void actionPerformed(ActionEvent e) { frame.dispose(); System.exit(0); } } private void usage(String message) { if (message != null) { System.err.println("\n" + message); } String baseUsage = "\nJDepend "; System.err.println(""); System.err.println("usage: "); System.err.println(baseUsage + "-components <components> " + "<directory> [directory2 [directory 3] ...]"); System.exit(1); } private void instanceMain(String[] args) { if (args.length < 1) { usage("Must specify at least one directory."); } int directoryCount = 0; for (int i = 0; i < args.length; i++) { if (args[i].startsWith("-")) { if (args[i].equalsIgnoreCase("-components")) { if (args.length <= i + 1) { usage("Components not specified."); } setComponents(args[++i]); } else { usage("Invalid argument: " + args[i]); } } else { try { addDirectory(args[i]); directoryCount++; } catch (IOException ioe) { usage("Directory does not exist: " + args[i]); } } } if (directoryCount == 0) { usage("Must specify at least one directory."); } analyze(); } public static void main(String[] args) { new JDepend().instanceMain(args); } }
/* Copyright (c) 2014, Colorado State University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. This software is provided by the copyright holders and contributors "as is" and any express or implied warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose are disclaimed. In no event shall the copyright holder or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. */ package galileo.event; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.logging.Level; import java.util.logging.Logger; import galileo.net.GalileoMessage; import galileo.net.MessageListener; import galileo.net.NetworkDestination; import galileo.serialization.SerializationException; /** * Implements the reactor pattern for processing incoming events * ({@link GalileoMessage} instances). * * @author malensek */ public class EventReactor implements MessageListener { private static final Logger logger = Logger.getLogger("galileo"); private static final int DEFAULT_QUEUE_SZ = 100000; private Class<?> handlerClass; private Object handlerObject; private EventWrapper eventWrapper; private Map<Class<?>, Method> classToMethod = new HashMap<>(); private BlockingQueue<GalileoMessage> messageQueue = new LinkedBlockingQueue<>(); /** * Creates an EventReactor with the default {@link BasicEventWrapper} * EventWrapper implementation. * * @param handlerObject an Object instance that contains the implementations * for event handlers, denoted by the {@link EventHandler} annotation. * @param eventMap a EventMap implementation that provides a mapping from * integer identification numbers to specific classes that represent an * event. */ public EventReactor(Object handlerObject, EventMap eventMap) { this.handlerClass = handlerObject.getClass(); this.handlerObject = handlerObject; this.eventWrapper = new BasicEventWrapper(eventMap); linkEventHandlers(); messageQueue = new LinkedBlockingQueue<>(DEFAULT_QUEUE_SZ); } /** * Creates an EventReactor with a custom EventWrapper implementation. * * @param handlerObject an Object instance that contains the implementations * for event handlers, denoted by the {@link EventHandler} annotation. * @param wrapper A problem-specific {@link EventWrapper} implementation. */ public EventReactor(Object handlerObject, EventWrapper wrapper) { this.handlerClass = handlerObject.getClass(); this.handlerObject = handlerObject; this.eventWrapper = wrapper; linkEventHandlers(); messageQueue = new LinkedBlockingQueue<>(DEFAULT_QUEUE_SZ); } /** * This method links incoming event types to their relevant event handlers * found in the handlerObject. */ protected void linkEventHandlers() { classToMethod.clear(); for (Method m : handlerClass.getMethods()) { for (Annotation a : m.getAnnotations()) { if (a.annotationType().equals(EventHandler.class)) { /* This method is an event handler */ logger.log(Level.FINE, "Found EventHandler annotation on " + "method: {0}", m.getName()); Class<?>[] params = m.getParameterTypes(); if (params.length != 2) { logger.log(Level.WARNING, "Incorrect number of method " + "parameters found. Ignoring method."); break; } if (params[1].equals(EventContext.class) == false) { logger.log(Level.WARNING, "Second method parameter must" + " be EventContext. Ignoring method."); break; } Class<?> eventClass; try { eventClass = extractEventClass(params); } catch (EventException e) { logger.log(Level.WARNING, "Could not determine type of " + "event handled by method: " + m, e); break; } logger.log(Level.FINE, "Linking handler method [{0}] to class [{1}]", new Object[] { m.getName(), eventClass.getName() }); classToMethod.put(eventClass, m); break; } } } } /** * Determines the class responsible for encapsulating an Event. This is * achieved by providing a list of parameter types, where the first * parameter will be the the class that represents the event. * * @param parameters A list of method parameters */ private Class<?> extractEventClass(Class<?>[] parameters) throws EventException { if (parameters.length <= 0) { throw new EventException( "Event handler method does not have any parameters"); } List<Class<?>> interfaces = Arrays.asList(parameters[0].getInterfaces()); if (interfaces.contains(Event.class) == false) { throw new EventException("EventHandler parameter does not " + "implement the Event interface"); } return parameters[0]; } /** * Retrieves the next message from the queue, and calls the appropriate * event handler method to process the message. If no message is present in * the queue, this method will block until one becomes available. * * @throws EventException when the incoming event is unknown, or errors * occur while trying to call the appropriate handler method * @throws InterruptedException if the calling thread is interrupted while * waiting for a new message to arrive */ public void processNextEvent() throws EventException, IOException, InterruptedException, SerializationException { GalileoMessage message = messageQueue.take(); try { Event event = eventWrapper.unwrap(message); Method method = classToMethod.get(event.getClass()); EventContext context = new EventContext(message, eventWrapper); method.invoke(handlerObject, event, context); } catch (InvocationTargetException e) { throw new EventException("Unhandled exception in invoked " + "event handler method", e); } catch (IOException | SerializationException e) { throw e; } catch (Exception e) { /* Propagating all the possible reflection-related exceptions up to * clients seemed undesirable from a usability perspective here, so * we wrap this up in a catch-all exception. */ throw new EventException("Error processing event!", e); } } @Override public void onConnect(NetworkDestination endpoint) { //TODO full implementation } @Override public void onDisconnect(NetworkDestination endpoint) { //TODO full implementation } @Override public void onMessage(GalileoMessage message) { try { messageQueue.put(message); } catch (InterruptedException e) { logger.warning("Interrupted during onMessage delivery"); Thread.currentThread().interrupt(); } } /** * Convenience function for wrapping an outgoing event with this * EventReactor's {@link EventWrapper} implementation. */ public GalileoMessage wrapEvent(Event e) throws IOException { return eventWrapper.wrap(e); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.shims; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URI; import java.nio.ByteBuffer; import java.security.AccessControlException; import java.security.NoSuchAlgorithmException; import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.TreeMap; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.ClusterStatus; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobProfile; import org.apache.hadoop.mapred.JobStatus; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.lib.CombineFileSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; /** * In order to be compatible with multiple versions of Hadoop, all parts * of the Hadoop interface that are not cross-version compatible are * encapsulated in an implementation of this class. Users should use * the ShimLoader class as a factory to obtain an implementation of * HadoopShims corresponding to the version of Hadoop currently on the * classpath. */ public interface HadoopShims { /** * Constructs and Returns TaskAttempt Logger Url * or null if the TaskLogServlet is not available * * @return TaskAttempt Logger Url */ String getTaskAttemptLogUrl(JobConf conf, String taskTrackerHttpAddress, String taskAttemptId) throws MalformedURLException; /** * Returns a shim to wrap MiniMrCluster */ public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, String nameNode, int numDir) throws IOException; public MiniMrShim getMiniTezCluster(Configuration conf, int numberOfTaskTrackers, String nameNode, boolean usingLlap) throws IOException; public MiniMrShim getLocalMiniTezCluster(Configuration conf, boolean usingLlap); public MiniMrShim getMiniSparkCluster(Configuration conf, int numberOfTaskTrackers, String nameNode, int numDir) throws IOException; /** * Shim for MiniMrCluster */ public interface MiniMrShim { public int getJobTrackerPort() throws UnsupportedOperationException; public void shutdown() throws IOException; public void setupConfiguration(Configuration conf); } /** * Returns a shim to wrap MiniDFSCluster. This is necessary since this class * was moved from org.apache.hadoop.dfs to org.apache.hadoop.hdfs */ MiniDFSShim getMiniDfs(Configuration conf, int numDataNodes, boolean format, String[] racks) throws IOException; MiniDFSShim getMiniDfs(Configuration conf, int numDataNodes, boolean format, String[] racks, boolean isHA) throws IOException; /** * Shim around the functions in MiniDFSCluster that Hive uses. */ public interface MiniDFSShim { FileSystem getFileSystem() throws IOException; void shutdown() throws IOException; } CombineFileInputFormatShim getCombineFileInputFormat(); enum JobTrackerState { INITIALIZING, RUNNING }; /** * Convert the ClusterStatus to its Thrift equivalent: JobTrackerState. * See MAPREDUCE-2455 for why this is a part of the shim. * @param clusterStatus * @return the matching JobTrackerState * @throws Exception if no equivalent JobTrackerState exists */ public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception; public TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable); public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id); public JobContext newJobContext(Job job); /** * Check wether MR is configured to run in local-mode * @param conf * @return */ public boolean isLocalMode(Configuration conf); /** * All retrieval of jobtracker/resource manager rpc address * in the configuration should be done through this shim * @param conf * @return */ public String getJobLauncherRpcAddress(Configuration conf); /** * All updates to jobtracker/resource manager rpc address * in the configuration should be done through this shim * @param conf * @return */ public void setJobLauncherRpcAddress(Configuration conf, String val); /** * All references to jobtracker/resource manager http address * in the configuration should be done through this shim * @param conf * @return */ public String getJobLauncherHttpAddress(Configuration conf); /** * Get the default block size for the path. FileSystem alone is not sufficient to * determine the same, as in case of CSMT the underlying file system determines that. * @param fs * @param path * @return */ public long getDefaultBlockSize(FileSystem fs, Path path); /** * Get the default replication for a path. In case of CSMT the given path will be used to * locate the actual filesystem. * @param fs * @param path * @return */ public short getDefaultReplication(FileSystem fs, Path path); /** * Reset the default fair scheduler queue mapping to end user. * * @param conf * @param userName end user name */ public void refreshDefaultQueue(Configuration conf, String userName) throws IOException; /** * The method sets to set the partition file has a different signature between * hadoop versions. * @param jobConf * @param partition */ void setTotalOrderPartitionFile(JobConf jobConf, Path partition); Comparator<LongWritable> getLongComparator(); /** * CombineFileInputFormatShim. * * @param <K> * @param <V> */ interface CombineFileInputFormatShim<K, V> { Path[] getInputPathsShim(JobConf conf); void createPool(JobConf conf, PathFilter... filters); CombineFileSplit[] getSplits(JobConf job, int numSplits) throws IOException; CombineFileSplit getInputSplitShim() throws IOException; RecordReader getRecordReader(JobConf job, CombineFileSplit split, Reporter reporter, Class<RecordReader<K, V>> rrClass) throws IOException; } List<HdfsFileStatusWithId> listLocatedHdfsStatus( FileSystem fs, Path path, PathFilter filter) throws IOException; /** * For file status returned by listLocatedStatus, convert them into a list * of block locations. * @param fs the file system * @param status the file information * @return the block locations of the file * @throws IOException */ BlockLocation[] getLocations(FileSystem fs, FileStatus status) throws IOException; /** * For the block locations returned by getLocations() convert them into a Treemap * <Offset,blockLocation> by iterating over the list of blockLocation. * Using TreeMap from offset to blockLocation, makes it O(logn) to get a particular * block based upon offset. * @param fs the file system * @param status the file information * @return TreeMap<Long, BlockLocation> * @throws IOException */ TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs, FileStatus status) throws IOException; /** * Flush and make visible to other users the changes to the given stream. * @param stream the stream to hflush. * @throws IOException */ public void hflush(FSDataOutputStream stream) throws IOException; public interface HdfsFileStatusWithId { public FileStatus getFileStatus(); public Long getFileId(); } public HCatHadoopShims getHCatShim(); public interface HCatHadoopShims { enum PropertyName {CACHE_ARCHIVES, CACHE_FILES, CACHE_SYMLINK, CLASSPATH_ARCHIVES, CLASSPATH_FILES} public TaskID createTaskID(); public TaskAttemptID createTaskAttemptID(); public org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf, TaskAttemptID taskId); public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(JobConf conf, org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable); public JobContext createJobContext(Configuration conf, JobID jobId); public org.apache.hadoop.mapred.JobContext createJobContext(JobConf conf, JobID jobId, Progressable progressable); public void commitJob(OutputFormat outputFormat, Job job) throws IOException; public void abortJob(OutputFormat outputFormat, Job job) throws IOException; /* Referring to job tracker in 0.20 and resource manager in 0.23 */ public InetSocketAddress getResourceManagerAddress(Configuration conf); public String getPropertyName(PropertyName name); /** * Checks if file is in HDFS filesystem. * * @param fs * @param path * @return true if the file is in HDFS, false if the file is in other file systems. */ public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException; } /** * Provides a Hadoop JobTracker shim. * @param conf not {@code null} */ public WebHCatJTShim getWebHCatShim(Configuration conf, UserGroupInformation ugi) throws IOException; public interface WebHCatJTShim { /** * Grab a handle to a job that is already known to the JobTracker. * * @return Profile of the job, or null if not found. */ public JobProfile getJobProfile(org.apache.hadoop.mapred.JobID jobid) throws IOException; /** * Grab a handle to a job that is already known to the JobTracker. * * @return Status of the job, or null if not found. */ public JobStatus getJobStatus(org.apache.hadoop.mapred.JobID jobid) throws IOException; /** * Kill a job. */ public void killJob(org.apache.hadoop.mapred.JobID jobid) throws IOException; /** * Get all the jobs submitted. */ public JobStatus[] getAllJobs() throws IOException; /** * Close the connection to the Job Tracker. */ public void close(); /** * Does exactly what org.apache.hadoop.mapreduce.Job#addCacheFile(URI) in Hadoop 2. * Assumes that both parameters are not {@code null}. */ public void addCacheFile(URI uri, Job job); /** * Kills all jobs tagged with the given tag that have been started after the * given timestamp. */ public void killJobs(String tag, long timestamp); /** * Returns all jobs tagged with the given tag that have been started after the * given timestamp. Returned jobIds are MapReduce JobIds. */ public Set<String> getJobs(String tag, long timestamp); } /** * Create a proxy file system that can serve a given scheme/authority using some * other file system. */ public FileSystem createProxyFileSystem(FileSystem fs, URI uri); /** * Create a shim for DFS storage policy. */ public enum StoragePolicyValue { MEMORY, /* 1-replica memory */ SSD, /* 3-replica ssd */ DEFAULT /* system defaults (usually 3-replica disk) */; public static StoragePolicyValue lookup(String name) { if (name == null) { return DEFAULT; } return StoragePolicyValue.valueOf(name.toUpperCase().trim()); } }; public interface StoragePolicyShim { void setStoragePolicy(Path path, StoragePolicyValue policy) throws IOException; } /** * obtain a storage policy shim associated with the filesystem. * Returns null when the filesystem has no storage policies. */ public StoragePolicyShim getStoragePolicyShim(FileSystem fs); /** * Get configuration from JobContext */ public Configuration getConfiguration(JobContext context); /** * Get job conf from the old style JobContext. * @param context job context * @return job conf */ public JobConf getJobConf(org.apache.hadoop.mapred.JobContext context); public FileSystem getNonCachedFileSystem(URI uri, Configuration conf) throws IOException; public void getMergedCredentials(JobConf jobConf) throws IOException; public void mergeCredentials(JobConf dest, JobConf src) throws IOException; /** * Check if the configured UGI has access to the path for the given file system action. * Method will return successfully if action is permitted. AccessControlExceptoin will * be thrown if user does not have access to perform the action. Other exceptions may * be thrown for non-access related errors. * @param fs * @param status * @param action * @throws IOException * @throws AccessControlException * @throws Exception */ public void checkFileAccess(FileSystem fs, FileStatus status, FsAction action) throws IOException, AccessControlException, Exception; /** * Use password API (if available) to fetch credentials/password * @param conf * @param name * @return */ public String getPassword(Configuration conf, String name) throws IOException; /** * check whether current hadoop supports sticky bit * @return */ boolean supportStickyBit(); /** * Check stick bit in the permission * @param permission * @return sticky bit */ boolean hasStickyBit(FsPermission permission); /** * @return True if the current hadoop supports trash feature. */ boolean supportTrashFeature(); /** * @return Path to HDFS trash, if current hadoop supports trash feature. Null otherwise. */ Path getCurrentTrashPath(Configuration conf, FileSystem fs); /** * Check whether file is directory. */ boolean isDirectory(FileStatus fileStatus); /** * Returns a shim to wrap KerberosName */ public KerberosNameShim getKerberosNameShim(String name) throws IOException; /** * Shim for KerberosName */ public interface KerberosNameShim { public String getDefaultRealm(); public String getServiceName(); public String getHostName(); public String getRealm(); public String getShortName() throws IOException; } /** * Copies a source dir/file to a destination by orchestrating the copy between hdfs nodes. * This distributed process is meant to copy huge files that could take some time if a single * copy is done. * * @param src Path to the source file or directory to copy * @param dst Path to the destination file or directory * @param conf The hadoop configuration object * @return True if it is successfull; False otherwise. */ public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException; /** * This interface encapsulates methods used to get encryption information from * HDFS paths. */ public interface HdfsEncryptionShim { /** * Checks if a given HDFS path is encrypted. * * @param path Path to HDFS file system * @return True if it is encrypted; False otherwise. * @throws IOException If an error occurred attempting to get encryption information */ public boolean isPathEncrypted(Path path) throws IOException; /** * Checks if two HDFS paths are on the same encrypted or unencrypted zone. * * @param path1 Path to HDFS file system * @param path2 Path to HDFS file system * @return True if both paths are in the same zone; False otherwise. * @throws IOException If an error occurred attempting to get encryption information */ public boolean arePathsOnSameEncryptionZone(Path path1, Path path2) throws IOException; /** * Checks if two HDFS paths are on the same encrypted or unencrypted zone. * * @param path1 Path to HDFS file system * @param path2 Path to HDFS file system * @param encryptionShim2 The encryption-shim corresponding to path2. * @return True if both paths are in the same zone; False otherwise. * @throws IOException If an error occurred attempting to get encryption information */ public boolean arePathsOnSameEncryptionZone(Path path1, Path path2, HdfsEncryptionShim encryptionShim2) throws IOException; /** * Compares two encrypted path strengths. * * @param path1 HDFS path to compare. * @param path2 HDFS path to compare. * @return 1 if path1 is stronger; 0 if paths are equals; -1 if path1 is weaker. * @throws IOException If an error occurred attempting to get encryption/key metadata */ public int comparePathKeyStrength(Path path1, Path path2) throws IOException; /** * create encryption zone by path and keyname * @param path HDFS path to create encryption zone * @param keyName keyname * @throws IOException */ @VisibleForTesting public void createEncryptionZone(Path path, String keyName) throws IOException; /** * Creates an encryption key. * * @param keyName Name of the key * @param bitLength Key encryption length in bits (128 or 256). * @throws IOException If an error occurs while creating the encryption key * @throws NoSuchAlgorithmException If cipher algorithm is invalid. */ @VisibleForTesting public void createKey(String keyName, int bitLength) throws IOException, NoSuchAlgorithmException; @VisibleForTesting public void deleteKey(String keyName) throws IOException; @VisibleForTesting public List<String> getKeys() throws IOException; } /** * This is a dummy class used when the hadoop version does not support hdfs encryption. */ public static class NoopHdfsEncryptionShim implements HdfsEncryptionShim { @Override public boolean isPathEncrypted(Path path) throws IOException { /* not supported */ return false; } @Override public boolean arePathsOnSameEncryptionZone(Path path1, Path path2) throws IOException { /* not supported */ return true; } @Override public boolean arePathsOnSameEncryptionZone(Path path1, Path path2, HdfsEncryptionShim encryptionShim2) throws IOException { // Not supported. return true; } @Override public int comparePathKeyStrength(Path path1, Path path2) throws IOException { /* not supported */ return 0; } @Override public void createEncryptionZone(Path path, String keyName) { /* not supported */ } @Override public void createKey(String keyName, int bitLength) { /* not supported */ } @Override public void deleteKey(String keyName) throws IOException { /* not supported */ } @Override public List<String> getKeys() throws IOException{ /* not supported */ return null; } } /** * Returns a new instance of the HdfsEncryption shim. * * @param fs A FileSystem object to HDFS * @param conf A Configuration object * @return A new instance of the HdfsEncryption shim. * @throws IOException If an error occurred while creating the instance. */ public HdfsEncryptionShim createHdfsEncryptionShim(FileSystem fs, Configuration conf) throws IOException; public Path getPathWithoutSchemeAndAuthority(Path path); /** * Reads data into ByteBuffer. * @param file File. * @param dest Buffer. * @return Number of bytes read, just like file.read. If any bytes were read, dest position * will be set to old position + number of bytes read. */ int readByteBuffer(FSDataInputStream file, ByteBuffer dest) throws IOException; /** * Get Delegation token and add it to Credential. * @param fs FileSystem object to HDFS * @param cred Credentials object to add the token to. * @param uname user name. * @throws IOException If an error occurred on adding the token. */ public void addDelegationTokens(FileSystem fs, Credentials cred, String uname) throws IOException; /** * Gets file ID. Only supported on hadoop-2. * @return inode ID of the file. */ long getFileId(FileSystem fs, String path) throws IOException; /** Clones the UGI and the Subject. */ UserGroupInformation cloneUgi(UserGroupInformation baseUgi) throws IOException; }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.parsepasses.contextautoesc; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.template.soy.base.SourceLocation; import com.google.template.soy.base.internal.IdGenerator; import com.google.template.soy.error.ExplodingErrorReporter; import com.google.template.soy.exprtree.VarRefNode; import com.google.template.soy.parsepasses.contextautoesc.Context.State; import com.google.template.soy.parsepasses.contextautoesc.SlicedRawTextNode.RawTextSlice; import com.google.template.soy.soytree.ExprUnion; import com.google.template.soy.soytree.IfCondNode; import com.google.template.soy.soytree.IfNode; import com.google.template.soy.soytree.PrintNode; import com.google.template.soy.soytree.RawTextNode; import com.google.template.soy.soytree.SoyFileSetNode; import com.google.template.soy.soytree.SoyNode; import com.google.template.soy.soytree.defn.InjectedParam; import com.google.template.soy.types.primitive.StringType; import java.util.Collections; import java.util.List; /** * Inserts attributes into templates to bless inline {@code <script>} and {@code <style>} elements * and inline event handler and style attributes so that the browser can distinguish scripts * specified by the template author from ones injected via XSS. * * This class converts templates by adding {@code nonce="..."} to {@code <script>} and * {@code <style>} elements, so * <blockquote> * {@code <script>...</script>} * </blockquote> * becomes * <blockquote> * {@code <script{if $ij.csp_nonce} nonce="{$ij.csp_nonce}"{/if}>...</script>} * </blockquote> * which authorize scripts in HTML pages that are governed by the <i>Content Security Policy</i>. * * <p> * This class assumes that the value of {@code $ij.csp_nonce} will either be null or a valid * <a href="//dvcs.w3.org/hg/content-security-policy/raw-file/tip/csp-specification.dev.html#dfn-a-valid-nonce" * >CSP-style "nonce"</a>, an unguessable string consisting of Latin Alpha-numeric characters, * plus ({@code '+'}), and solidus ({@code '/'}). * <blockquote> * {@code nonce-value = 1*( ALPHA / DIGIT / "+" / "/" )} * </blockquote> * * <h3>Dependencies</h3> * <p> * If inline event handlers or styles are used, then the page should also load * {@code security.CspVerifier} which verifies event handler values. * * <h3>Caveats</h3> * <p> * This class does not add any {@code <meta http-equiv="content-security-policy" ...>} elements to * the template. The application developer must specify the CSP policy headers and include the * nonce there. * * <p> * Nonces should be of sufficient length, and from a crypto-strong source of randomness. * The stock <code>java.util.Random</code> is not strong enough, though a properly seeded * <code>SecureRandom</code> is ok. * */ public final class ContentSecurityPolicyPass { private ContentSecurityPolicyPass() { // Not instantiable. } /** The unprefixed name of the injected variable that holds the CSP nonce value for the page. */ public static final String CSP_NONCE_VARIABLE_NAME = "csp_nonce"; /** The name of the CSP nonce attribute, equals sign, and opening double quote. */ private static final String NONCE_ATTR_BEFORE_VALUE = " nonce=\""; /** The closing double quote that appears after an attribute value. */ private static final String ATTR_AFTER_VALUE = "\""; /** * A variable definition for {@code $ij.csp_nonce}. * Since this pass implicitly blesses scripts that appear in the template text, authors should not * explicitly mention {@code $id.csp_nonce} in their template signatures, so we do not look for a * declared variable definition. */ private static final InjectedParam IMPLICIT_CSP_NONCE_DEFN = new InjectedParam(CSP_NONCE_VARIABLE_NAME, StringType.getInstance()); // --------------------------------------------------------------------------------------------- // Predicates used to identify HTML element and attribute boundaries in templates. // --------------------------------------------------------------------------------------------- /** * True for any context that occurs within a {@code <script>} or {@code <style>} open tag. * {@code [START]} and {@code [END]} mark ranges of positions for which this predicate is true. * {@code <script[START] src=[END]foo[START]>[END]body()</script>}. */ private static final Predicate<? super Context> IN_SCRIPT_OR_STYLE_TAG_PREDICATE = new Predicate<Context>() { public boolean apply(Context c) { return ( // In a script tag or style, (c.elType == Context.ElementType.SCRIPT || c.elType == Context.ElementType.STYLE) && c.state == Context.State.HTML_TAG // but not in an attribute && c.attrType == Context.AttributeType.NONE ); } }; /** * True between the end of a {@code <script>} or {@code <style>}tag and the start of its end tag. * {@code [START]} and {@code [END]} mark ranges of positions for which this predicate is true. * {@code <script src=foo]>[START]body()[END]</script>}. */ private static final Predicate<? super Context> IN_SCRIPT_OR_STYLE_BODY_PREDICATE = new Predicate<Context>() { public boolean apply(Context c) { return ( // If we're not in an attribute, c.attrType == Context.AttributeType.NONE // but we're in JS or CSS, then we must be in a script or style body. && (c.state == Context.State.JS || c.state == Context.State.CSS) ); } }; /** * True immediately before an HTML attribute value. */ public static final Predicate<? super Context> HTML_BEFORE_ATTRIBUTE_VALUE = new Predicate<Context>() { @Override public boolean apply(Context c) { return c.state == State.HTML_BEFORE_ATTRIBUTE_VALUE; } }; /** * True inside an inline event handler value or style attribute. * {@code [START]} and {@code [END]} mark ranges of positions for which this predicate is true. * {@code <a onclick="[START]foo()"[END]>}. * Any close quote is part of the attribute value though the open quote is excluded as it is in * the BEFORE_ATTRIBUTE_VALUE state. */ private static final Predicate<? super Context> IN_SCRIPT_OR_STYLE_ATTR_VALUE = new Predicate<Context>() { public boolean apply(Context c) { return c.elType != Context.ElementType.NONE && (isScriptAttr(c) || isStyleAttr(c)); } private boolean isScriptAttr(Context c) { return c.attrType == Context.AttributeType.SCRIPT && c.state == Context.State.JS; } private boolean isStyleAttr(Context c) { return c.attrType == Context.AttributeType.STYLE && c.state == Context.State.CSS; } }; // --------------------------------------------------------------------------------------------- // Generators for Soy nodes that mark JS as safe to run. // --------------------------------------------------------------------------------------------- /** * Generates Soy nodes to inject at a specific location in a raw text node. */ private abstract static class InjectedSoyGenerator implements Comparable<InjectedSoyGenerator> { /** The raw text node into which to inject nodes. */ final RawTextNode rawTextNode; /** The offset into rawTextNode's text at which to inject the nodes. */ final int offset; /** * @param rawTextNode The raw text node into which to inject nodes. * @param offset the offset into rawTextNode's text at which to inject the nodes. */ InjectedSoyGenerator(RawTextNode rawTextNode, int offset) { Preconditions.checkElementIndex(offset, rawTextNode.getRawText().length(), "text offset"); this.rawTextNode = rawTextNode; this.offset = offset; } /** * Generates standalone Soy nodes to inject at {@link #offset} in {@link #rawTextNode} and adds * them to out. * * @param idGenerator generates IDs for newly created nodes. * @param out receives nodes to add in the order they should be added. */ abstract void addNodesToInject( IdGenerator idGenerator, ImmutableList.Builder<? super SoyNode.StandaloneNode> out); /** Order first by raw text node ID and then by offset within the text node. */ public final int compareTo(InjectedSoyGenerator other) { int delta = this.rawTextNode.getId() - other.rawTextNode.getId(); if (delta == 0) { delta = this.offset - other.offset; } return delta; } } private static final class NonceAttrGenerator extends InjectedSoyGenerator { NonceAttrGenerator(RawTextNode rawTextNode, int offset) { super(rawTextNode, offset); } /** Adds `<code> nonce="{$ij.csp_nonce}"</code>`. */ @Override void addNodesToInject( IdGenerator idGenerator, ImmutableList.Builder<? super SoyNode.StandaloneNode> out) { out.add( new RawTextNode( idGenerator.genId(), NONCE_ATTR_BEFORE_VALUE, rawTextNode.getSourceLocation())); out.add(makeInjectedCspNoncePrintNode(idGenerator)); out.add( new RawTextNode(idGenerator.genId(), ATTR_AFTER_VALUE, rawTextNode.getSourceLocation())); } } private static final class InlineContentPrefixGenerator extends InjectedSoyGenerator { InlineContentPrefixGenerator(RawTextNode rawTextNode, int offset) { super(rawTextNode, offset); } /** * Adds `<code>/*{$ij.csp_verifier}*\/</code>` at the start of an event handler or style * attribute so that {@code template/security/csp_verify.js} can use a policy violation event * handler to lazily mark them safe to execute by prefix checking. * <p> * We use a block comment instead of several alternatives: * <ol> * <li>A statement label prefix: {@code onclick="nonce:event_handler()"}</li> * <li>A second attribute: * {@code onclick="event_handler()" csp-safe="nonce event_handler()"}</li> * <li>A cryptographic hash of the attribute value: * {@code onclick="event_handler()" onclick-hash="A%^09t..."}</li> * </ol> * because each of these has at least one of these undesirable properties: * <ol> * <li>Changes the meaning of {@code onclick="use strict; doStuff()"} because an event handler * is a JS FunctionBody production, and only first statement of a FunctionBody may be a * DirectivePrologue like a {@code "use strict"} directive, and that directive must be * unlabelled.</li> * <li>Requires translating nonces into JS identifiers or restricting nonces to a subset of * the published grammar.</li> * <li>Expands code size by duplicating large event handlers.</li> * <li>Requires shipping large libraries like {@code goog.crypto}.</li> * <li>Requires a separate mechanism for blessing inline styles.</li> * </ol> */ @Override void addNodesToInject( IdGenerator idGenerator, ImmutableList.Builder<? super SoyNode.StandaloneNode> out) { // We re-use the CSP nonce as the inline-event-handler secret. out.add(new RawTextNode(idGenerator.genId(), "/*", rawTextNode.getSourceLocation())); out.add(makeInjectedCspNoncePrintNode(idGenerator)); // Nonces may contain '/' but not '*' so the nonce will not be truncated as long as the nonce // generator produces valid nonces instead of arbitrary ASCII. out.add(new RawTextNode(idGenerator.genId(), "*/", rawTextNode.getSourceLocation())); } } /** * A group of InjectedSoyGenerators with the same raw text node and offset. */ private static final class GroupOfInjectedSoyGenerator extends InjectedSoyGenerator { final ImmutableList<InjectedSoyGenerator> members; /** * @param group InjectedSoyGenerator with the same raw text node and offset. */ GroupOfInjectedSoyGenerator(List<? extends InjectedSoyGenerator> group) { super(group.get(0).rawTextNode, group.get(0).offset); members = ImmutableList.copyOf(group); for (InjectedSoyGenerator member : members) { if (member.rawTextNode != rawTextNode || member.offset != offset) { throw new IllegalArgumentException("Invalid group member"); } } } /** delegates to each member in-order to add nodes to out. */ @Override void addNodesToInject( IdGenerator idGenerator, ImmutableList.Builder<? super SoyNode.StandaloneNode> out) { for (InjectedSoyGenerator member : members) { member.addNodesToInject(idGenerator, out); } } } // --------------------------------------------------------------------------------------------- // Soy tree traversal that injects Soy nodes to mark JS in templates as safe to run. // --------------------------------------------------------------------------------------------- /** * Add attributes to author-specified scripts and styles so that they will continue to run even * though the browser's CSP policy blocks injected scripts and styles. */ public static void blessAuthorSpecifiedScripts( Iterable<? extends SlicedRawTextNode> slicedRawTextNodes) { // Given // <script type="text/javascript"> // alert(1337) // </script> // we want to produce // <script type="text/javascript"{if $ij.csp_nonce} nonce="{$ij.csp_nonce}"{/if}> // alert(1337) // </script> // We need the nonce value to be unguessable which means not reliably reusing the same value // from one page render to the next. // We do this in several steps. // 1. Identify the start of the value of each inline event handler and style: // <a onclick="foobar(this)"> // ^-- start // 2. Create an InlineContentPrefixGenerator instance that injects an prefix that can be used // by javascript/security/csp_verifier.js to allow the event handler. // 3. Identify the end of each <script> and <style> tag. // <script type="text/javascript">alert(1337)</script> // ^-- Can insert more attributes here // We use the contexts from the contextual auto-escaper to identify the boundary between // the tag that starts a script element and its body. // 4. Walk backwards over ">" and "/>" to find a place where it is safe to insert atttributes. // 5. Create an InjectedSoyGenerator instance that encapsulates the content to insert. // <script type="text/javascript">alert(1337)</script> // ^-- Remember this location. // 6. Group InjectedSoyGenerators at the same location so that we could inject multiple chunks // of content at the same slice offset. // 7. Create a conditional check at each unique location, {if $ij.csp_nonce}...{/if}, so that we // don't insert CSP attributes when the template is applied without a secret. // 8. Create Soy nodes to fill out the {if} // <script> -> <script{if $ij.csp_nonce} nonce="{$ij.csp_nonce}"{/if}> ImmutableList.Builder<InjectedSoyGenerator> injectedSoyGenerators = ImmutableList.builder(); // We look for the end of attributes before the end of tags so that the stable sort we use to // group generators leaves any at attribute ends before the ones at the end of a tag. findCompleteInlineEventHandlers(slicedRawTextNodes, injectedSoyGenerators); findNonceAttrLocations(slicedRawTextNodes, injectedSoyGenerators); List<InjectedSoyGenerator> groupedInjectedAttrs = sortAndGroup(injectedSoyGenerators.build()); generateAndInsertSoyNodesWrappedInIfNode(groupedInjectedAttrs); } /** * Handles steps 1 and 2 by finding event handler attributes that appear entirely within * a raw text node. */ private static void findCompleteInlineEventHandlers( Iterable<? extends SlicedRawTextNode> slicedRawTextNodes, ImmutableList.Builder<InjectedSoyGenerator> out) { Iterable<RawTextSlice> valueSlices = SlicedRawTextNode.find( slicedRawTextNodes, HTML_BEFORE_ATTRIBUTE_VALUE, IN_SCRIPT_OR_STYLE_ATTR_VALUE, null /* nextContextPredicate */); // Step 1: identify the beginning of an inline event handler. for (SlicedRawTextNode.RawTextSlice valueSlice : valueSlices) { Context.AttributeEndDelimiter delimType = valueSlice.context.delimType; if (delimType != Context.AttributeEndDelimiter.DOUBLE_QUOTE && delimType != Context.AttributeEndDelimiter.SINGLE_QUOTE) { // Bail on unquoted event handlers since we might accidentally // bless an untrusted suffix as in // <button onclick=foo(){if $c} bar={$c} {/if}{$d|noescape}> // where $d might merge into the content of onclick unnoticed if $c is almost always true. // If $d were ";doEvil()" then it would result in an injection. continue; } // Add a prefix after the open quote, which happens to be at the beginning of the slice. out.add(new InlineContentPrefixGenerator( valueSlice.slicedRawTextNode.getRawTextNode(), valueSlice.getStartOffset())); } } /** * Handles steps 3-5 by creating a NonceAttrGenerator for each location at the ^ in * {@code <script foo=bar^>} immediately after the run of attributes in a script tag. */ private static void findNonceAttrLocations( Iterable<? extends SlicedRawTextNode> slicedRawTextNodes, ImmutableList.Builder<InjectedSoyGenerator> out) { // Step 3: identify slices that end a <script> element so we can find a location where it it is // safe to insert an attribute. for (SlicedRawTextNode.RawTextSlice slice : SlicedRawTextNode.find( slicedRawTextNodes, null, IN_SCRIPT_OR_STYLE_TAG_PREDICATE, IN_SCRIPT_OR_STYLE_BODY_PREDICATE)) { String rawText = slice.getRawText(); int rawTextLen = rawText.length(); // Step 4: find a safe place to insert attributes. if (rawText.charAt(rawTextLen - 1) != '>') { throw new IllegalStateException("Invalid tag end: " + rawText); } int insertionPoint = rawTextLen - 1; // We can't put an attribute in the middle of an XML-style "/>" tag terminator. if (insertionPoint - 1 >= 0 && rawText.charAt(insertionPoint - 1) == '/') { --insertionPoint; } // Step 5: create a generator for the CSP nonce attribute. out.add(new NonceAttrGenerator( slice.slicedRawTextNode.getRawTextNode(), slice.getStartOffset() + insertionPoint)); } } /** * Handles step 6 by converting a list of InjectedSoyGenerators into an equivalent list where * there is only one per text node and offset, and where the list is sorted by text node ID and * offset. */ private static List<InjectedSoyGenerator> sortAndGroup(List<InjectedSoyGenerator> ungrouped) { // Sort by node ID & offset ungrouped = Lists.newArrayList(ungrouped); Collections.sort(ungrouped); // Walk over list grouping members with the same raw text node and offset. ImmutableList.Builder<InjectedSoyGenerator> grouped = ImmutableList.builder(); int n = ungrouped.size(); for (int i = 0, end; i < n; i = end) { InjectedSoyGenerator firstGroupMember = ungrouped.get(i); end = i + 1; while (end < n && ungrouped.get(end).rawTextNode == firstGroupMember.rawTextNode && ungrouped.get(end).offset == firstGroupMember.offset) { ++end; } grouped.add(new GroupOfInjectedSoyGenerator(ungrouped.subList(i, end))); } return grouped.build(); } /** * Handles steps 7 and 8 by applying the generators to create Soy nodes and injects them at the * location in the template specified by {@link InjectedSoyGenerator#rawTextNode} and * {@link InjectedSoyGenerator#offset}, splitting and replacing text nodes as necessary. * * <p> * {@link RawTextNode}'s text cannot be changed, so generators with the same {@link RawTextNode} * cannot be applied separately. This method takes a list of generators, so it can apply them in * a batch and avoid conflicts. * * @param injectedSoyGenerators A sorted, grouped, list of generators. */ private static void generateAndInsertSoyNodesWrappedInIfNode( List<? extends InjectedSoyGenerator> injectedSoyGenerators) { int n = injectedSoyGenerators.size(); for (int i = 0, end; i < n; i = end) { // Group by RawTextNode. end = i + 1; InjectedSoyGenerator first = injectedSoyGenerators.get(i); while (end < n) { InjectedSoyGenerator atEnd = injectedSoyGenerators.get(end); if (first.rawTextNode == atEnd.rawTextNode) { ++end; } else { break; } } // Find the text node that we're going to split and inject into. RawTextNode rawTextNode = first.rawTextNode; String rawText = rawTextNode.getRawText(); SoyNode.BlockNode parent = rawTextNode.getParent(); IdGenerator idGenerator = parent.getNearestAncestor(SoyFileSetNode.class).getNodeIdGenerator(); // Split rawTextNode on the offsets, and at each split, insert a nonce value. int textStart = 0; int childIndex = parent.getChildIndex(rawTextNode); parent.removeChild(rawTextNode); for (InjectedSoyGenerator generator : injectedSoyGenerators.subList(i, end)) { int offset = generator.offset; if (offset != textStart) { RawTextNode textBefore = new RawTextNode( idGenerator.genId(), rawText.substring(textStart, offset), rawTextNode.getSourceLocation()); parent.addChild(childIndex, textBefore); ++childIndex; textStart = offset; } // Step 7: add an {if $ij.csp_nonce}...{/if} to prevent generation of CSP nonce when the // template is applied without a secret. IfNode ifNode = new IfNode(idGenerator.genId(), rawTextNode.getSourceLocation()); IfCondNode ifCondNode = new IfCondNode( idGenerator.genId(), rawTextNode.getSourceLocation(), "if", new ExprUnion(makeReferenceToInjectedCspNonce())); parent.addChild(childIndex, ifNode); ++childIndex; ifNode.addChild(ifCondNode); // Step 8: inject Soy nodes into the {if}. ImmutableList.Builder<SoyNode.StandaloneNode> newChildren = ImmutableList.builder(); generator.addNodesToInject(idGenerator, newChildren); ifCondNode.addChildren(newChildren.build()); } if (textStart != rawText.length()) { RawTextNode textTail = new RawTextNode( idGenerator.genId(), rawText.substring(textStart), rawTextNode.getSourceLocation()); parent.addChild(childIndex, textTail); } } } // --------------------------------------------------------------------------------------------- // Methods to programmatically create Soy commands and expressions. // --------------------------------------------------------------------------------------------- /** * Builds the Soy expression {@code $ij.csp_nonce} with an appropriate type. */ private static VarRefNode makeReferenceToInjectedCspNonce() { return new VarRefNode( CSP_NONCE_VARIABLE_NAME, SourceLocation.UNKNOWN, true /*injected*/, IMPLICIT_CSP_NONCE_DEFN); } /** * Builds the Soy command {@code {$ij.csp_nonce}}. */ private static PrintNode makeInjectedCspNoncePrintNode(IdGenerator idGenerator) { return new PrintNode.Builder( idGenerator.genId(), true, // Implicit. {$ij.csp_nonce} not {print $ij.csp_nonce} SourceLocation.UNKNOWN) .exprUnion(new ExprUnion(makeReferenceToInjectedCspNonce())) .build(ExplodingErrorReporter.get()); } }
/* * Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.awt.image; import java.awt.GraphicsConfiguration; import java.awt.GraphicsEnvironment; import java.awt.Image; import java.awt.ImageCapabilities; import java.awt.image.BufferedImage; import java.awt.image.VolatileImage; import java.util.Iterator; import java.util.concurrent.ConcurrentHashMap; import sun.java2d.InvalidPipeException; import sun.java2d.SurfaceData; import sun.java2d.SurfaceDataProxy; /** * The abstract base class that manages the various SurfaceData objects that * represent an Image's contents. Subclasses can customize how the surfaces * are organized, whether to cache the original contents in an accelerated * surface, and so on. * <p> * The SurfaceManager also maintains an arbitrary "cache" mechanism which * allows other agents to store data in it specific to their use of this * image. The most common use of the caching mechanism is for destination * SurfaceData objects to store cached copies of the source image. */ public abstract class SurfaceManager { public abstract static class ImageAccessor { public abstract SurfaceManager getSurfaceManager(Image img); public abstract void setSurfaceManager(Image img, SurfaceManager mgr); } private static ImageAccessor imgaccessor; public static void setImageAccessor(ImageAccessor ia) { if (imgaccessor != null) { throw new InternalError("Attempt to set ImageAccessor twice"); } imgaccessor = ia; } /** * Returns the SurfaceManager object contained within the given Image. */ public static SurfaceManager getManager(Image img) { SurfaceManager sMgr = imgaccessor.getSurfaceManager(img); if (sMgr == null) { /* * In practice only a BufferedImage will get here. */ try { BufferedImage bi = (BufferedImage) img; sMgr = new BufImgSurfaceManager(bi); setManager(bi, sMgr); } catch (ClassCastException e) { throw new InvalidPipeException("Invalid Image variant"); } } return sMgr; } public static void setManager(Image img, SurfaceManager mgr) { imgaccessor.setSurfaceManager(img, mgr); } private ConcurrentHashMap<Object,Object> cacheMap; /** * Return an arbitrary cached object for an arbitrary cache key. * Other objects can use this mechanism to store cached data about * the source image that will let them save time when using or * manipulating the image in the future. * <p> * Note that the cache is maintained as a simple Map with no * attempts to keep it up to date or invalidate it so any data * stored here must either not be dependent on the state of the * image or it must be individually tracked to see if it is * outdated or obsolete. * <p> * The SurfaceData object of the primary (destination) surface * has a StateTracker mechanism which can help track the validity * and "currentness" of any data stored here. * For convenience and expediency an object stored as cached * data may implement the FlushableCacheData interface specified * below so that it may be notified immediately if the flush() * method is ever called. */ public Object getCacheData(Object key) { return (cacheMap == null) ? null : cacheMap.get(key); } /** * Store an arbitrary cached object for an arbitrary cache key. * See the getCacheData() method for notes on tracking the * validity of data stored using this mechanism. */ public void setCacheData(Object key, Object value) { if (cacheMap == null) { synchronized (this) { if (cacheMap == null) { cacheMap = new ConcurrentHashMap<>(2); } } } cacheMap.put(key, value); } /** * Returns the main SurfaceData object that "owns" the pixels for * this SurfaceManager. This SurfaceData is used as the destination * surface in a rendering operation and is the most authoritative * storage for the current state of the pixels, though other * versions might be cached in other locations for efficiency. */ public abstract SurfaceData getPrimarySurfaceData(); /** * Restores the primary surface being managed, and then returns the * replacement surface. This is called when an accelerated surface has * been "lost", in an attempt to auto-restore its contents. */ public abstract SurfaceData restoreContents(); /** * Notification that any accelerated surfaces associated with this manager * have been "lost", which might mean that they need to be manually * restored or recreated. * * The default implementation does nothing, but platform-specific * variants which have accelerated surfaces should perform any necessary * actions. */ public void acceleratedSurfaceLost() {} /** * Returns an ImageCapabilities object which can be * inquired as to the specific capabilities of this * Image. The capabilities object will return true for * isAccelerated() if the image has a current and valid * SurfaceDataProxy object cached for the specified * GraphicsConfiguration parameter. * <p> * This class provides a default implementation of the * ImageCapabilities that will try to determine if there * is an associated SurfaceDataProxy object and if it is * up to date, but only works for GraphicsConfiguration * objects which implement the ProxiedGraphicsConfig * interface defined below. In practice, all configs * which can be accelerated are currently implementing * that interface. * <p> * A null GraphicsConfiguration returns a value based on whether the * image is currently accelerated on its default GraphicsConfiguration. * * @see java.awt.Image#getCapabilities * @since 1.5 */ public ImageCapabilities getCapabilities(GraphicsConfiguration gc) { return new ImageCapabilitiesGc(gc); } class ImageCapabilitiesGc extends ImageCapabilities { GraphicsConfiguration gc; public ImageCapabilitiesGc(GraphicsConfiguration gc) { super(false); this.gc = gc; } public boolean isAccelerated() { // Note that when img.getAccelerationPriority() gets set to 0 // we remove SurfaceDataProxy objects from the cache and the // answer will be false. GraphicsConfiguration tmpGc = gc; if (tmpGc == null) { tmpGc = GraphicsEnvironment.getLocalGraphicsEnvironment(). getDefaultScreenDevice().getDefaultConfiguration(); } if (tmpGc instanceof ProxiedGraphicsConfig) { Object proxyKey = ((ProxiedGraphicsConfig) tmpGc).getProxyKey(); if (proxyKey != null) { SurfaceDataProxy sdp = (SurfaceDataProxy) getCacheData(proxyKey); return (sdp != null && sdp.isAccelerated()); } } return false; } } /** * An interface for GraphicsConfiguration objects to implement if * their surfaces accelerate images using SurfaceDataProxy objects. * * Implementing this interface facilitates the default * implementation of getImageCapabilities() above. */ public static interface ProxiedGraphicsConfig { /** * Return the key that destination surfaces created on the * given GraphicsConfiguration use to store SurfaceDataProxy * objects for their cached copies. */ public Object getProxyKey(); } /** * Releases system resources in use by ancillary SurfaceData objects, * such as surfaces cached in accelerated memory. Subclasses should * override to release any of their flushable data. * <p> * The default implementation will visit all of the value objects * in the cacheMap and flush them if they implement the * FlushableCacheData interface. */ public synchronized void flush() { flush(false); } synchronized void flush(boolean deaccelerate) { if (cacheMap != null) { Iterator<Object> i = cacheMap.values().iterator(); while (i.hasNext()) { Object o = i.next(); if (o instanceof FlushableCacheData) { if (((FlushableCacheData) o).flush(deaccelerate)) { i.remove(); } } } } } /** * An interface for Objects used in the SurfaceManager cache * to implement if they have data that should be flushed when * the Image is flushed. */ public static interface FlushableCacheData { /** * Flush all cached resources. * The deaccelerated parameter indicates if the flush is * happening because the associated surface is no longer * being accelerated (for instance the acceleration priority * is set below the threshold needed for acceleration). * Returns a boolean that indicates if the cached object is * no longer needed and should be removed from the cache. */ public boolean flush(boolean deaccelerated); } /** * Called when image's acceleration priority is changed. * <p> * The default implementation will visit all of the value objects * in the cacheMap when the priority gets set to 0.0 and flush them * if they implement the FlushableCacheData interface. */ public void setAccelerationPriority(float priority) { if (priority == 0.0f) { flush(true); } } /** * Returns a horizontal scale factor of the image. This is utility method, * which fetches information from the SurfaceData of the image. * * @see SurfaceData#getDefaultScaleX */ public static double getImageScaleX(final Image img) { if (!(img instanceof VolatileImage)) { return 1; } final SurfaceManager sm = getManager(img); return sm.getPrimarySurfaceData().getDefaultScaleX(); } /** * Returns a vertical scale factor of the image. This is utility method, * which fetches information from the SurfaceData of the image. * * @see SurfaceData#getDefaultScaleY */ public static double getImageScaleY(final Image img) { if (!(img instanceof VolatileImage)) { return 1; } final SurfaceManager sm = getManager(img); return sm.getPrimarySurfaceData().getDefaultScaleY(); } }
/* * Copyright 2010-2013 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.subscription.api.migration; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.UUID; import org.joda.time.DateTime; import org.killbill.billing.callcontext.InternalCallContext; import org.killbill.billing.catalog.api.CatalogApiException; import org.killbill.billing.catalog.api.CatalogService; import org.killbill.billing.catalog.api.ProductCategory; import org.killbill.clock.Clock; import org.killbill.billing.subscription.alignment.MigrationPlanAligner; import org.killbill.billing.subscription.alignment.TimedMigration; import org.killbill.billing.subscription.api.SubscriptionApiBase; import org.killbill.billing.subscription.api.SubscriptionBaseApiService; import org.killbill.billing.subscription.api.migration.AccountMigrationData.BundleMigrationData; import org.killbill.billing.subscription.api.migration.AccountMigrationData.SubscriptionMigrationData; import org.killbill.billing.subscription.api.user.DefaultSubscriptionBase; import org.killbill.billing.subscription.api.user.DefaultSubscriptionBaseBundle; import org.killbill.billing.subscription.api.user.SubscriptionBuilder; import org.killbill.billing.subscription.engine.dao.SubscriptionDao; import org.killbill.billing.subscription.events.SubscriptionBaseEvent; import org.killbill.billing.subscription.events.SubscriptionBaseEvent.EventType; import org.killbill.billing.subscription.events.phase.PhaseEvent; import org.killbill.billing.subscription.events.phase.PhaseEventData; import org.killbill.billing.subscription.events.user.ApiEvent; import org.killbill.billing.subscription.events.user.ApiEventBuilder; import org.killbill.billing.subscription.events.user.ApiEventCancel; import org.killbill.billing.subscription.events.user.ApiEventChange; import org.killbill.billing.subscription.events.user.ApiEventMigrateBilling; import org.killbill.billing.subscription.events.user.ApiEventMigrateSubscription; import org.killbill.billing.subscription.events.user.ApiEventType; import org.killbill.billing.subscription.exceptions.SubscriptionBaseError; import org.killbill.billing.util.UUIDs; import org.killbill.billing.util.callcontext.CallContext; import org.killbill.billing.util.callcontext.InternalCallContextFactory; import com.google.common.collect.Lists; import com.google.inject.Inject; public class DefaultSubscriptionBaseMigrationApi extends SubscriptionApiBase implements SubscriptionBaseMigrationApi { private final MigrationPlanAligner migrationAligner; private final InternalCallContextFactory internalCallContextFactory; @Inject public DefaultSubscriptionBaseMigrationApi(final MigrationPlanAligner migrationAligner, final SubscriptionBaseApiService apiService, final CatalogService catalogService, final SubscriptionDao dao, final Clock clock, final InternalCallContextFactory internalCallContextFactory) { super(dao, apiService, clock, catalogService); this.migrationAligner = migrationAligner; this.internalCallContextFactory = internalCallContextFactory; } @Override public void migrate(final AccountMigration toBeMigrated, final CallContext context) throws SubscriptionBaseMigrationApiException { final AccountMigrationData accountMigrationData = createAccountMigrationData(toBeMigrated, context); dao.migrate(toBeMigrated.getAccountKey(), accountMigrationData, internalCallContextFactory.createInternalCallContext(toBeMigrated.getAccountKey(), context)); } private AccountMigrationData createAccountMigrationData(final AccountMigration toBeMigrated, final CallContext context) throws SubscriptionBaseMigrationApiException { final UUID accountId = toBeMigrated.getAccountKey(); final DateTime now = clock.getUTCNow(); final List<BundleMigrationData> accountBundleData = new LinkedList<BundleMigrationData>(); for (final BundleMigration curBundle : toBeMigrated.getBundles()) { final DefaultSubscriptionBaseBundle bundleData = new DefaultSubscriptionBaseBundle(curBundle.getBundleKey(), accountId, now, now, now, now); final List<SubscriptionMigrationData> bundleSubscriptionData = new LinkedList<AccountMigrationData.SubscriptionMigrationData>(); final List<SubscriptionMigration> sortedSubscriptions = Lists.newArrayList(curBundle.getSubscriptions()); // Make sure we have first BASE or STANDALONE, then ADDON and for each category order by CED Collections.sort(sortedSubscriptions, new Comparator<SubscriptionMigration>() { @Override public int compare(final SubscriptionMigration o1, final SubscriptionMigration o2) { if (o1.getCategory().equals(o2.getCategory())) { return o1.getSubscriptionCases()[0].getEffectiveDate().compareTo(o2.getSubscriptionCases()[0].getEffectiveDate()); } else { if (!o1.getCategory().name().equalsIgnoreCase("ADD_ON")) { return -1; } else if (o1.getCategory().name().equalsIgnoreCase("ADD_ON")) { return 1; } else { return 0; } } } }); DateTime bundleStartDate = null; for (final SubscriptionMigration curSub : sortedSubscriptions) { SubscriptionMigrationData data = null; if (bundleStartDate == null) { data = createInitialSubscription(accountId, bundleData.getId(), curSub.getCategory(), curSub.getSubscriptionCases(), now, curSub.getChargedThroughDate(), context); bundleStartDate = data.getInitialEvents().get(0).getEffectiveDate(); } else { data = createSubscriptionMigrationDataWithBundleDate(accountId, bundleData.getId(), curSub.getCategory(), curSub.getSubscriptionCases(), now, bundleStartDate, curSub.getChargedThroughDate(), context); } if (data != null) { bundleSubscriptionData.add(data); } } final BundleMigrationData bundleMigrationData = new BundleMigrationData(bundleData, bundleSubscriptionData); accountBundleData.add(bundleMigrationData); } return new AccountMigrationData(accountBundleData); } private SubscriptionMigrationData createInitialSubscription(final UUID accountId, final UUID bundleId, final ProductCategory productCategory, final SubscriptionMigrationCase[] input, final DateTime now, final DateTime ctd, final CallContext context) throws SubscriptionBaseMigrationApiException { final InternalCallContext internalCallContext = internalCallContextFactory.createInternalCallContext(accountId, context); final TimedMigration[] events = migrationAligner.getEventsMigration(input, now, internalCallContext); final DateTime migrationStartDate = events[0].getEventTime(); final List<SubscriptionBaseEvent> emptyEvents = Collections.emptyList(); final DefaultSubscriptionBase defaultSubscriptionBase; try { defaultSubscriptionBase = createSubscriptionForApiUse(new SubscriptionBuilder() .setId(UUIDs.randomUUID()) .setBundleId(bundleId) .setCategory(productCategory) .setBundleStartDate(migrationStartDate) .setAlignStartDate(migrationStartDate), emptyEvents, internalCallContext); return new SubscriptionMigrationData(defaultSubscriptionBase, toEvents(defaultSubscriptionBase, now, ctd, events, context), ctd); } catch (CatalogApiException e) { throw new SubscriptionBaseMigrationApiException(e); } } private SubscriptionMigrationData createSubscriptionMigrationDataWithBundleDate(final UUID accountId, final UUID bundleId, final ProductCategory productCategory, final SubscriptionMigrationCase[] input, final DateTime now, final DateTime bundleStartDate, final DateTime ctd, final CallContext context) throws SubscriptionBaseMigrationApiException { final InternalCallContext internalCallContext = internalCallContextFactory.createInternalCallContext(accountId, context); final TimedMigration[] events = migrationAligner.getEventsMigration(input, now, internalCallContext); final DateTime migrationStartDate = events[0].getEventTime(); final List<SubscriptionBaseEvent> emptyEvents = Collections.emptyList(); final DefaultSubscriptionBase defaultSubscriptionBase; try { defaultSubscriptionBase = createSubscriptionForApiUse(new SubscriptionBuilder() .setId(UUIDs.randomUUID()) .setBundleId(bundleId) .setCategory(productCategory) .setBundleStartDate(bundleStartDate) .setAlignStartDate(migrationStartDate), emptyEvents, internalCallContext); return new SubscriptionMigrationData(defaultSubscriptionBase, toEvents(defaultSubscriptionBase, now, ctd, events, context), ctd); } catch (CatalogApiException e) { throw new SubscriptionBaseMigrationApiException(e); } } private List<SubscriptionBaseEvent> toEvents(final DefaultSubscriptionBase defaultSubscriptionBase, final DateTime now, final DateTime ctd, final TimedMigration[] migrationEvents, final CallContext context) { if (ctd == null) { throw new SubscriptionBaseError(String.format("Could not create migration billing event ctd = %s", ctd)); } final List<SubscriptionBaseEvent> events = new ArrayList<SubscriptionBaseEvent>(migrationEvents.length); ApiEventMigrateBilling apiEventMigrateBilling = null; // The first event date after the MIGRATE_ENTITLEMENT event DateTime nextEventDate = null; boolean isCancelledSubscriptionPriorOrAtCTD = false; for (final TimedMigration cur : migrationEvents) { final ApiEventBuilder builder = new ApiEventBuilder() .setSubscriptionId(defaultSubscriptionBase.getId()) .setEventPlan((cur.getPlan() != null) ? cur.getPlan().getName() : null) .setEventPlanPhase((cur.getPhase() != null) ? cur.getPhase().getName() : null) .setEventPriceList(cur.getPriceList()) .setActiveVersion(defaultSubscriptionBase.getActiveVersion()) .setEffectiveDate(cur.getEventTime()) .setRequestedDate(now) .setFromDisk(true); if (cur.getEventType() == EventType.PHASE) { nextEventDate = nextEventDate != null && nextEventDate.compareTo(cur.getEventTime()) < 0 ? nextEventDate : cur.getEventTime(); final PhaseEvent nextPhaseEvent = PhaseEventData.createNextPhaseEvent(defaultSubscriptionBase.getId(), defaultSubscriptionBase.getActiveVersion(), cur.getPhase().getName(), now, cur.getEventTime()); events.add(nextPhaseEvent); } else if (cur.getEventType() == EventType.API_USER) { switch (cur.getApiEventType()) { case MIGRATE_ENTITLEMENT: ApiEventMigrateSubscription creationEvent = new ApiEventMigrateSubscription(builder); events.add(creationEvent); break; case CHANGE: nextEventDate = nextEventDate != null && nextEventDate.compareTo(cur.getEventTime()) < 0 ? nextEventDate : cur.getEventTime(); events.add(new ApiEventChange(builder)); break; case CANCEL: isCancelledSubscriptionPriorOrAtCTD = !cur.getEventTime().isAfter(ctd); nextEventDate = nextEventDate != null && nextEventDate.compareTo(cur.getEventTime()) < 0 ? nextEventDate : cur.getEventTime(); events.add(new ApiEventCancel(builder)); break; default: throw new SubscriptionBaseError(String.format("Unexpected type of api migration event %s", cur.getApiEventType())); } } else { throw new SubscriptionBaseError(String.format("Unexpected type of migration event %s", cur.getEventType())); } // create the MIGRATE_BILLING based on the current state of the last event. if (!cur.getEventTime().isAfter(ctd)) { builder.setEffectiveDate(ctd); builder.setUuid(UUIDs.randomUUID()); apiEventMigrateBilling = new ApiEventMigrateBilling(builder); } } // Always ADD MIGRATE BILLING which is constructed from latest state seen in the stream prior to CTD if (apiEventMigrateBilling != null && !isCancelledSubscriptionPriorOrAtCTD) { events.add(apiEventMigrateBilling); } Collections.sort(events, new Comparator<SubscriptionBaseEvent>() { int compForApiType(final SubscriptionBaseEvent o1, final SubscriptionBaseEvent o2, final ApiEventType type) { ApiEventType apiO1 = null; if (o1.getType() == EventType.API_USER) { apiO1 = ((ApiEvent) o1).getApiEventType(); } ApiEventType apiO2 = null; if (o2.getType() == EventType.API_USER) { apiO2 = ((ApiEvent) o2).getApiEventType(); } if (apiO1 != null && apiO1.equals(type)) { return -1; } else if (apiO2 != null && apiO2.equals(type)) { return 1; } else { return 0; } } @Override public int compare(final SubscriptionBaseEvent o1, final SubscriptionBaseEvent o2) { int comp = o1.getEffectiveDate().compareTo(o2.getEffectiveDate()); if (comp == 0) { comp = compForApiType(o1, o2, ApiEventType.MIGRATE_ENTITLEMENT); } if (comp == 0) { comp = compForApiType(o1, o2, ApiEventType.MIGRATE_BILLING); } return comp; } }); return events; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.codegen; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.UUID; import org.apache.ignite.internal.GridDirectCollection; import org.apache.ignite.internal.GridDirectMap; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.IgniteCodeGeneratingFail; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType; import org.jetbrains.annotations.Nullable; import static java.lang.reflect.Modifier.isStatic; import static java.lang.reflect.Modifier.isTransient; /** * Direct marshallable code generator. */ public class MessageCodeGenerator { /** */ private static final Comparator<Field> FIELD_CMP = new Comparator<Field>() { @Override public int compare(Field f1, Field f2) { return f1.getName().compareTo(f2.getName()); } }; /** */ private static final String DFLT_SRC_DIR = U.getIgniteHome() + "/modules/core/src/main/java"; /** */ private static final String INDEXING_SRC_DIR = U.getIgniteHome() + "/modules/indexing/src/main/java"; /** */ private static final Class<?> BASE_CLS = Message.class; /** */ private static final String EMPTY = ""; /** */ private static final String TAB = " "; /** */ private static final String BUF_VAR = "buf"; /** */ private static final Map<Class<?>, MessageCollectionItemType> TYPES = U.newHashMap(30); static { TYPES.put(byte.class, MessageCollectionItemType.BYTE); TYPES.put(Byte.class, MessageCollectionItemType.BYTE); TYPES.put(short.class, MessageCollectionItemType.SHORT); TYPES.put(Short.class, MessageCollectionItemType.SHORT); TYPES.put(int.class, MessageCollectionItemType.INT); TYPES.put(Integer.class, MessageCollectionItemType.INT); TYPES.put(long.class, MessageCollectionItemType.LONG); TYPES.put(Long.class, MessageCollectionItemType.LONG); TYPES.put(float.class, MessageCollectionItemType.FLOAT); TYPES.put(Float.class, MessageCollectionItemType.FLOAT); TYPES.put(double.class, MessageCollectionItemType.DOUBLE); TYPES.put(Double.class, MessageCollectionItemType.DOUBLE); TYPES.put(char.class, MessageCollectionItemType.CHAR); TYPES.put(Character.class, MessageCollectionItemType.CHAR); TYPES.put(boolean.class, MessageCollectionItemType.BOOLEAN); TYPES.put(Boolean.class, MessageCollectionItemType.BOOLEAN); TYPES.put(byte[].class, MessageCollectionItemType.BYTE_ARR); TYPES.put(short[].class, MessageCollectionItemType.SHORT_ARR); TYPES.put(int[].class, MessageCollectionItemType.INT_ARR); TYPES.put(long[].class, MessageCollectionItemType.LONG_ARR); TYPES.put(float[].class, MessageCollectionItemType.FLOAT_ARR); TYPES.put(double[].class, MessageCollectionItemType.DOUBLE_ARR); TYPES.put(char[].class, MessageCollectionItemType.CHAR_ARR); TYPES.put(boolean[].class, MessageCollectionItemType.BOOLEAN_ARR); TYPES.put(String.class, MessageCollectionItemType.STRING); TYPES.put(BitSet.class, MessageCollectionItemType.BIT_SET); TYPES.put(UUID.class, MessageCollectionItemType.UUID); TYPES.put(IgniteUuid.class, MessageCollectionItemType.IGNITE_UUID); } /** * @param cls Class. * @return Type enum value. */ private static MessageCollectionItemType typeEnum(Class<?> cls) { MessageCollectionItemType type = TYPES.get(cls); if (type == null) { assert Message.class.isAssignableFrom(cls) : cls; type = MessageCollectionItemType.MSG; } return type; } /** */ private final Collection<String> write = new ArrayList<>(); /** */ private final Collection<String> read = new ArrayList<>(); /** */ private final Map<Class<?>, Integer> fieldCnt = new HashMap<>(); /** */ private final String srcDir; /** */ private int totalFieldCnt; /** */ private List<Field> fields; /** */ private int indent; /** * @param args Arguments. * @throws Exception In case of error. */ public static void main(String[] args) throws Exception { String srcDir = DFLT_SRC_DIR; if (args != null && args.length > 0) srcDir = args[0]; MessageCodeGenerator gen = new MessageCodeGenerator(srcDir); // gen.generateAll(true); // gen.generateAndWrite(GridNearAtomicUpdateRequest.class); // gen.generateAndWrite(GridMessageCollection.class); // gen.generateAndWrite(DataStreamerEntry.class); // gen.generateAndWrite(GridDistributedLockRequest.class); // gen.generateAndWrite(GridDistributedLockResponse.class); // gen.generateAndWrite(GridNearLockRequest.class); // gen.generateAndWrite(GridNearLockResponse.class); // gen.generateAndWrite(GridDhtLockRequest.class); // gen.generateAndWrite(GridDhtLockResponse.class); // // gen.generateAndWrite(GridDistributedTxPrepareRequest.class); // gen.generateAndWrite(GridDistributedTxPrepareResponse.class); // gen.generateAndWrite(GridNearTxPrepareRequest.class); // gen.generateAndWrite(GridNearTxPrepareResponse.class); // gen.generateAndWrite(GridDhtTxPrepareRequest.class); // gen.generateAndWrite(GridDhtTxPrepareResponse.class); // // gen.generateAndWrite(GridDistributedTxFinishRequest.class); // gen.generateAndWrite(GridDistributedTxFinishResponse.class); // gen.generateAndWrite(GridNearTxFinishRequest.class); // gen.generateAndWrite(GridNearTxFinishResponse.class); // gen.generateAndWrite(GridDhtTxFinishRequest.class); // gen.generateAndWrite(GridDhtTxFinishResponse.class); // // gen.generateAndWrite(GridCacheTxRecoveryRequest.class); // gen.generateAndWrite(GridCacheTxRecoveryResponse.class); // gen.generateAndWrite(GridQueryCancelRequest.class); // gen.generateAndWrite(GridQueryFailResponse.class); // gen.generateAndWrite(GridQueryNextPageRequest.class); // gen.generateAndWrite(GridQueryNextPageResponse.class); // gen.generateAndWrite(GridQueryRequest.class); // gen.generateAndWrite(GridCacheSqlQuery.class); // gen.generateAndWrite(GridH2Null.class); // gen.generateAndWrite(GridH2Boolean.class); // gen.generateAndWrite(GridH2Byte.class); // gen.generateAndWrite(GridH2Short.class); // gen.generateAndWrite(GridH2Integer.class); // gen.generateAndWrite(GridH2Long.class); // gen.generateAndWrite(GridH2Decimal.class); // gen.generateAndWrite(GridH2Double.class); // gen.generateAndWrite(GridH2Float.class); // gen.generateAndWrite(GridH2Time.class); // gen.generateAndWrite(GridH2Date.class); // gen.generateAndWrite(GridH2Timestamp.class); // gen.generateAndWrite(GridH2Bytes.class); // gen.generateAndWrite(GridH2String.class); // gen.generateAndWrite(GridH2Array.class); // gen.generateAndWrite(GridH2JavaObject.class); // gen.generateAndWrite(GridH2Uuid.class); // gen.generateAndWrite(GridH2Geometry.class); // gen.generateAndWrite(GridH2CacheObject.class); } /** * @param srcDir Source directory. */ public MessageCodeGenerator(String srcDir) { this.srcDir = srcDir; } /** * Generates code for all classes. * * @param write Whether to write to file. * @throws Exception In case of error. */ public void generateAll(boolean write) throws Exception { Collection<Class<? extends Message>> classes = classes(); for (Class<? extends Message> cls : classes) { try { boolean isAbstract = Modifier.isAbstract(cls.getModifiers()); System.out.println("Processing class: " + cls.getName() + (isAbstract ? " (abstract)" : "")); if (write) generateAndWrite(cls); else generate(cls); } catch (IllegalStateException e) { System.out.println("Will skip class generation [cls=" + cls + ", err=" + e.getMessage() + ']'); } } } /** * Generates code for provided class and writes it to source file. * Note: this method must be called only from {@code generateAll(boolean)} * and only with updating {@code CLASSES_ORDER_FILE} and other auto generated files. * * @param cls Class. * @throws Exception In case of error. */ @SuppressWarnings("ConstantConditions") public void generateAndWrite(Class<? extends Message> cls) throws Exception { assert cls != null; generate(cls); File file = new File(srcDir, cls.getName().replace('.', File.separatorChar) + ".java"); if (!file.exists() || !file.isFile()) { System.out.println("Source file not found: " + file.getPath()); return; } Collection<String> src = new ArrayList<>(); BufferedReader rdr = null; try { rdr = new BufferedReader(new FileReader(file)); String line; boolean skip = false; boolean writeFound = false; boolean readFound = false; boolean fieldCntFound = false; while ((line = rdr.readLine()) != null) { if (!skip) { src.add(line); if (line.contains("public boolean writeTo(ByteBuffer buf, MessageWriter writer)")) { src.addAll(write); skip = true; writeFound = true; } else if (line.contains("public boolean readFrom(ByteBuffer buf, MessageReader reader)")) { src.addAll(read); skip = true; readFound = true; } else if (line.contains("public byte fieldsCount()")) { src.add(TAB + TAB + "return " + totalFieldCnt + ";"); skip = true; fieldCntFound = true; } } else if (line.startsWith(TAB + "}")) { src.add(line); skip = false; } } if (!writeFound) System.out.println(" writeTo method doesn't exist."); if (!readFound) System.out.println(" readFrom method doesn't exist."); if (!fieldCntFound) System.out.println(" fieldCount method doesn't exist."); } finally { if (rdr != null) rdr.close(); } BufferedWriter wr = null; try { wr = new BufferedWriter(new FileWriter(file)); for (String line : src) wr.write(line + '\n'); } finally { if (wr != null) wr.close(); } } /** * Generates code for provided class. * * @param cls Class. * @throws Exception In case of error. */ private void generate(Class<? extends Message> cls) throws Exception { assert cls != null; if (cls.isInterface()) return; if (cls.isAnnotationPresent(IgniteCodeGeneratingFail.class)) throw new IllegalStateException("@IgniteCodeGeneratingFail is provided for class: " + cls.getName()); write.clear(); read.clear(); fields = new ArrayList<>(); Field[] declaredFields = cls.getDeclaredFields(); for (Field field : declaredFields) { int mod = field.getModifiers(); if (!isStatic(mod) && !isTransient(mod) && !field.isAnnotationPresent(GridDirectTransient.class)) fields.add(field); } Collections.sort(fields, FIELD_CMP); int state = startState(cls); totalFieldCnt = state + fields.size(); indent = 2; boolean hasSuper = cls.getSuperclass() != Object.class; start(write, hasSuper ? "writeTo" : null, true); start(read, hasSuper ? "readFrom" : null, false); indent++; for (Field field : fields) processField(field, state++); indent--; finish(write, null); finish(read, cls.getSimpleName()); } /** * @param cls Message class. * @return Start state. */ private int startState(Class<?> cls) { assert cls != null; Class<?> superCls = cls.getSuperclass(); Integer state = fieldCnt.get(superCls); if (state != null) return state; state = 0; while (cls.getSuperclass() != Object.class) { cls = cls.getSuperclass(); for (Field field : cls.getDeclaredFields()) { int mod = field.getModifiers(); if (!isStatic(mod) && !isTransient(mod) && !field.isAnnotationPresent(GridDirectTransient.class)) state++; } } fieldCnt.put(superCls, state); return state; } /** * @param code Code lines. * @param superMtd Super class method name. * @param write Whether write code is generated. */ private void start(Collection<String> code, @Nullable String superMtd, boolean write) { assert code != null; code.add(builder().a(write ? "writer" : "reader").a(".setBuffer(").a(BUF_VAR).a(");").toString()); code.add(EMPTY); if (!write) { code.add(builder().a("if (!reader.beforeMessageRead())").toString()); indent++; code.add(builder().a("return false;").toString()); code.add(EMPTY); indent--; } if (superMtd != null) { if (write) returnFalseIfFailed(code, "super." + superMtd, BUF_VAR, "writer"); else returnFalseIfFailed(code, "super." + superMtd, BUF_VAR, "reader"); code.add(EMPTY); } if (write) { code.add(builder().a("if (!writer.isHeaderWritten()) {").toString()); indent++; returnFalseIfFailed(code, "writer.writeHeader", "directType()", "fieldsCount()"); code.add(EMPTY); code.add(builder().a("writer.onHeaderWritten();").toString()); indent--; code.add(builder().a("}").toString()); code.add(EMPTY); } if (!fields.isEmpty()) code.add(builder().a("switch (").a(write ? "writer.state()" : "reader.state()").a(") {").toString()); } /** * @param code Code lines. */ private void finish(Collection<String> code, String readClsName) { assert code != null; if (!fields.isEmpty()) { code.add(builder().a("}").toString()); code.add(EMPTY); } if (readClsName == null) code.add(builder().a("return true;").toString()); else code.add(builder().a("return reader.afterMessageRead(").a(readClsName).a(".class);").toString()); } /** * @param field Field. * @param opt Case option. */ private void processField(Field field, int opt) { assert field != null; assert opt >= 0; GridDirectCollection colAnn = field.getAnnotation(GridDirectCollection.class); GridDirectMap mapAnn = field.getAnnotation(GridDirectMap.class); if (colAnn == null && Collection.class.isAssignableFrom(field.getType())) throw new IllegalStateException("@GridDirectCollection annotation is not provided for field: " + field.getName()); if (mapAnn == null && Map.class.isAssignableFrom(field.getType())) throw new IllegalStateException("@GridDirectMap annotation is not provided for field: " + field.getName()); writeField(field, opt, colAnn, mapAnn); readField(field, opt, colAnn, mapAnn); } /** * @param field Field. * @param opt Case option. * @param colAnn Collection annotation. * @param mapAnn Map annotation. */ private void writeField(Field field, int opt, @Nullable GridDirectCollection colAnn, @Nullable GridDirectMap mapAnn) { assert field != null; assert opt >= 0; write.add(builder().a("case ").a(opt).a(":").toString()); indent++; returnFalseIfWriteFailed(field.getType(), field.getName(), colAnn != null ? colAnn.value() : null, mapAnn != null ? mapAnn.keyType() : null, mapAnn != null ? mapAnn.valueType() : null, false); write.add(EMPTY); write.add(builder().a("writer.incrementState();").toString()); write.add(EMPTY); indent--; } /** * @param field Field. * @param opt Case option. * @param colAnn Collection annotation. * @param mapAnn Map annotation. */ private void readField(Field field, int opt, @Nullable GridDirectCollection colAnn, @Nullable GridDirectMap mapAnn) { assert field != null; assert opt >= 0; read.add(builder().a("case ").a(opt).a(":").toString()); indent++; returnFalseIfReadFailed(field.getType(), field.getName(), colAnn != null ? colAnn.value() : null, mapAnn != null ? mapAnn.keyType() : null, mapAnn != null ? mapAnn.valueType() : null); read.add(EMPTY); read.add(builder().a("reader.incrementState();").toString()); read.add(EMPTY); indent--; } /** * @param type Field type. * @param name Field name. * @param colItemType Collection item type. * @param mapKeyType Map key type. * @param mapValType Map key value. * @param raw Raw write flag. */ private void returnFalseIfWriteFailed(Class<?> type, String name, @Nullable Class<?> colItemType, @Nullable Class<?> mapKeyType, @Nullable Class<?> mapValType, boolean raw) { assert type != null; assert name != null; String field = raw ? "null" : '"' + name + '"'; if (type == byte.class) returnFalseIfFailed(write, "writer.writeByte", field, name); else if (type == short.class) returnFalseIfFailed(write, "writer.writeShort", field, name); else if (type == int.class) returnFalseIfFailed(write, "writer.writeInt", field, name); else if (type == long.class) returnFalseIfFailed(write, "writer.writeLong", field, name); else if (type == float.class) returnFalseIfFailed(write, "writer.writeFloat", field, name); else if (type == double.class) returnFalseIfFailed(write, "writer.writeDouble", field, name); else if (type == char.class) returnFalseIfFailed(write, "writer.writeChar", field, name); else if (type == boolean.class) returnFalseIfFailed(write, "writer.writeBoolean", field, name); else if (type == byte[].class) returnFalseIfFailed(write, "writer.writeByteArray", field, name); else if (type == short[].class) returnFalseIfFailed(write, "writer.writeShortArray", field, name); else if (type == int[].class) returnFalseIfFailed(write, "writer.writeIntArray", field, name); else if (type == long[].class) returnFalseIfFailed(write, "writer.writeLongArray", field, name); else if (type == float[].class) returnFalseIfFailed(write, "writer.writeFloatArray", field, name); else if (type == double[].class) returnFalseIfFailed(write, "writer.writeDoubleArray", field, name); else if (type == char[].class) returnFalseIfFailed(write, "writer.writeCharArray", field, name); else if (type == boolean[].class) returnFalseIfFailed(write, "writer.writeBooleanArray", field, name); else if (type == String.class) returnFalseIfFailed(write, "writer.writeString", field, name); else if (type == BitSet.class) returnFalseIfFailed(write, "writer.writeBitSet", field, name); else if (type == UUID.class) returnFalseIfFailed(write, "writer.writeUuid", field, name); else if (type == IgniteUuid.class) returnFalseIfFailed(write, "writer.writeIgniteUuid", field, name); else if (type.isEnum()) { String arg = name + " != null ? (byte)" + name + ".ordinal() : -1"; returnFalseIfFailed(write, "writer.writeByte", field, arg); } else if (BASE_CLS.isAssignableFrom(type)) returnFalseIfFailed(write, "writer.writeMessage", field, name); else if (type.isArray()) { returnFalseIfFailed(write, "writer.writeObjectArray", field, name, "MessageCollectionItemType." + typeEnum(type.getComponentType())); } else if (Collection.class.isAssignableFrom(type) && !Set.class.isAssignableFrom(type)) { assert colItemType != null; returnFalseIfFailed(write, "writer.writeCollection", field, name, "MessageCollectionItemType." + typeEnum(colItemType)); } else if (Map.class.isAssignableFrom(type)) { assert mapKeyType != null; assert mapValType != null; returnFalseIfFailed(write, "writer.writeMap", field, name, "MessageCollectionItemType." + typeEnum(mapKeyType), "MessageCollectionItemType." + typeEnum(mapValType)); } else throw new IllegalStateException("Unsupported type: " + type); } /** * @param type Field type. * @param name Field name. * @param colItemType Collection item type. * @param mapKeyType Map key type. * @param mapValType Map value type. */ private void returnFalseIfReadFailed(Class<?> type, @Nullable String name, @Nullable Class<?> colItemType, @Nullable Class<?> mapKeyType, @Nullable Class<?> mapValType) { assert type != null; String field = '"' + name + '"'; if (type == byte.class) returnFalseIfReadFailed(name, "reader.readByte", field); else if (type == short.class) returnFalseIfReadFailed(name, "reader.readShort", field); else if (type == int.class) returnFalseIfReadFailed(name, "reader.readInt", field); else if (type == long.class) returnFalseIfReadFailed(name, "reader.readLong", field); else if (type == float.class) returnFalseIfReadFailed(name, "reader.readFloat", field); else if (type == double.class) returnFalseIfReadFailed(name, "reader.readDouble", field); else if (type == char.class) returnFalseIfReadFailed(name, "reader.readChar", field); else if (type == boolean.class) returnFalseIfReadFailed(name, "reader.readBoolean", field); else if (type == byte[].class) returnFalseIfReadFailed(name, "reader.readByteArray", field); else if (type == short[].class) returnFalseIfReadFailed(name, "reader.readShortArray", field); else if (type == int[].class) returnFalseIfReadFailed(name, "reader.readIntArray", field); else if (type == long[].class) returnFalseIfReadFailed(name, "reader.readLongArray", field); else if (type == float[].class) returnFalseIfReadFailed(name, "reader.readFloatArray", field); else if (type == double[].class) returnFalseIfReadFailed(name, "reader.readDoubleArray", field); else if (type == char[].class) returnFalseIfReadFailed(name, "reader.readCharArray", field); else if (type == boolean[].class) returnFalseIfReadFailed(name, "reader.readBooleanArray", field); else if (type == String.class) returnFalseIfReadFailed(name, "reader.readString", field); else if (type == BitSet.class) returnFalseIfReadFailed(name, "reader.readBitSet", field); else if (type == UUID.class) returnFalseIfReadFailed(name, "reader.readUuid", field); else if (type == IgniteUuid.class) returnFalseIfReadFailed(name, "reader.readIgniteUuid", field); else if (type.isEnum()) { String loc = name + "Ord"; read.add(builder().a("byte ").a(loc).a(";").toString()); read.add(EMPTY); returnFalseIfReadFailed(loc, "reader.readByte", field); read.add(EMPTY); read.add(builder().a(name).a(" = ").a(type.getSimpleName()).a(".fromOrdinal(").a(loc).a(");").toString()); } else if (BASE_CLS.isAssignableFrom(type)) returnFalseIfReadFailed(name, "reader.readMessage", field); else if (type.isArray()) { Class<?> compType = type.getComponentType(); returnFalseIfReadFailed(name, "reader.readObjectArray", field, "MessageCollectionItemType." + typeEnum(compType), compType.getSimpleName() + ".class"); } else if (Collection.class.isAssignableFrom(type) && !Set.class.isAssignableFrom(type)) { assert colItemType != null; returnFalseIfReadFailed(name, "reader.readCollection", field, "MessageCollectionItemType." + typeEnum(colItemType)); } else if (Map.class.isAssignableFrom(type)) { assert mapKeyType != null; assert mapValType != null; boolean linked = type.equals(LinkedHashMap.class); returnFalseIfReadFailed(name, "reader.readMap", field, "MessageCollectionItemType." + typeEnum(mapKeyType), "MessageCollectionItemType." + typeEnum(mapValType), linked ? "true" : "false"); } else throw new IllegalStateException("Unsupported type: " + type); } /** * @param var Variable name. * @param mtd Method name. * @param args Method arguments. */ private void returnFalseIfReadFailed(String var, String mtd, @Nullable String... args) { assert mtd != null; String argsStr = ""; if (args != null && args.length > 0) { for (String arg : args) argsStr += arg + ", "; argsStr = argsStr.substring(0, argsStr.length() - 2); } read.add(builder().a(var).a(" = ").a(mtd).a("(").a(argsStr).a(");").toString()); read.add(EMPTY); read.add(builder().a("if (!reader.isLastRead())").toString()); indent++; read.add(builder().a("return false;").toString()); indent--; } /** * @param code Code lines. * @param accessor Field or method name. * @param args Method arguments. */ private void returnFalseIfFailed(Collection<String> code, String accessor, @Nullable String... args) { assert code != null; assert accessor != null; String argsStr = ""; if (args != null && args.length > 0) { for (String arg : args) argsStr += arg + ", "; argsStr = argsStr.substring(0, argsStr.length() - 2); } code.add(builder().a("if (!").a(accessor).a("(").a(argsStr).a("))").toString()); indent++; code.add(builder().a("return false;").toString()); indent--; } /** * Creates new builder with correct indent. * * @return Builder. */ private SB builder() { assert indent > 0; SB sb = new SB(); for (int i = 0; i < indent; i++) sb.a(TAB); return sb; } /** * Gets all direct marshallable classes. * First classes will be classes from {@code classesOrder} with same order * as ordered values. Other classes will be at the end and ordered by name * (with package prefix). * That orders need for saving {@code directType} value. * * @return Classes. * @throws Exception In case of error. */ private Collection<Class<? extends Message>> classes() throws Exception { Collection<Class<? extends Message>> col = new TreeSet<>( new Comparator<Class<? extends Message>>() { @Override public int compare(Class<? extends Message> c1, Class<? extends Message> c2) { return c1.getName().compareTo(c2.getName()); } }); URLClassLoader ldr = (URLClassLoader)getClass().getClassLoader(); for (URL url : ldr.getURLs()) { File file = new File(url.toURI()); int prefixLen = file.getPath().length() + 1; processFile(file, ldr, prefixLen, col); } return col; } /** * Recursively process provided file or directory. * * @param file File. * @param ldr Class loader. * @param prefixLen Path prefix length. * @param col Classes. * @throws Exception In case of error. */ @SuppressWarnings("unchecked") private void processFile(File file, ClassLoader ldr, int prefixLen, Collection<Class<? extends Message>> col) throws Exception { assert file != null; assert ldr != null; assert prefixLen > 0; assert col != null; if (!file.exists()) throw new FileNotFoundException("File doesn't exist: " + file); if (file.isDirectory()) { for (File f : file.listFiles()) processFile(f, ldr, prefixLen, col); } else { assert file.isFile(); String path = file.getPath(); if (path.endsWith(".class")) { String clsName = path.substring(prefixLen, path.length() - 6).replace(File.separatorChar, '.'); Class<?> cls = Class.forName(clsName, false, ldr); if (cls.getDeclaringClass() == null && cls.getEnclosingClass() == null && !BASE_CLS.equals(cls) && BASE_CLS.isAssignableFrom(cls)) col.add((Class<? extends Message>)cls); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.blob; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.JobID; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.BlobServerOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.runtime.net.SSLUtils; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FileUtils; import org.apache.flink.util.NetUtils; import org.apache.flink.util.ShutdownHookUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import javax.net.ServerSocketFactory; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.security.MessageDigest; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Timer; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import static org.apache.flink.runtime.blob.BlobKey.BlobType.PERMANENT_BLOB; import static org.apache.flink.runtime.blob.BlobKey.BlobType.TRANSIENT_BLOB; import static org.apache.flink.runtime.blob.BlobServerProtocol.BUFFER_SIZE; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * This class implements the BLOB server. The BLOB server is responsible for listening for incoming * requests and spawning threads to handle these requests. Furthermore, it takes care of creating * the directory structure to store the BLOBs or temporarily cache them. */ public class BlobServer extends Thread implements BlobService, BlobWriter, PermanentBlobService, TransientBlobService { /** The log object used for debugging. */ private static final Logger LOG = LoggerFactory.getLogger(BlobServer.class); /** Counter to generate unique names for temporary files. */ private final AtomicLong tempFileCounter = new AtomicLong(0); /** The server socket listening for incoming connections. */ private final ServerSocket serverSocket; /** Blob Server configuration. */ private final Configuration blobServiceConfiguration; /** Indicates whether a shutdown of server component has been requested. */ private final AtomicBoolean shutdownRequested = new AtomicBoolean(); /** Root directory for local file storage. */ private final File storageDir; /** Blob store for distributed file storage, e.g. in HA. */ private final BlobStore blobStore; /** Set of currently running threads. */ private final Set<BlobServerConnection> activeConnections = new HashSet<>(); /** The maximum number of concurrent connections. */ private final int maxConnections; /** Lock guarding concurrent file accesses. */ private final ReadWriteLock readWriteLock; /** Shutdown hook thread to ensure deletion of the local storage directory. */ private final Thread shutdownHook; // -------------------------------------------------------------------------------------------- /** * Map to store the TTL of each element stored in the local storage, i.e. via one of the {@link * #getFile} methods. */ private final ConcurrentHashMap<Tuple2<JobID, TransientBlobKey>, Long> blobExpiryTimes = new ConcurrentHashMap<>(); /** Time interval (ms) to run the cleanup task; also used as the default TTL. */ private final long cleanupInterval; /** Timer task to execute the cleanup at regular intervals. */ private final Timer cleanupTimer; /** * Instantiates a new BLOB server and binds it to a free network port. * * @param config Configuration to be used to instantiate the BlobServer * @param blobStore BlobStore to store blobs persistently * @throws IOException thrown if the BLOB server cannot bind to a free network port or if the * (local or distributed) file storage cannot be created or is not usable */ public BlobServer(Configuration config, BlobStore blobStore) throws IOException { this.blobServiceConfiguration = checkNotNull(config); this.blobStore = checkNotNull(blobStore); this.readWriteLock = new ReentrantReadWriteLock(); // configure and create the storage directory this.storageDir = BlobUtils.initLocalStorageDirectory(config); LOG.info("Created BLOB server storage directory {}", storageDir); // configure the maximum number of concurrent connections final int maxConnections = config.getInteger(BlobServerOptions.FETCH_CONCURRENT); if (maxConnections >= 1) { this.maxConnections = maxConnections; } else { LOG.warn( "Invalid value for maximum connections in BLOB server: {}. Using default value of {}", maxConnections, BlobServerOptions.FETCH_CONCURRENT.defaultValue()); this.maxConnections = BlobServerOptions.FETCH_CONCURRENT.defaultValue(); } // configure the backlog of connections int backlog = config.getInteger(BlobServerOptions.FETCH_BACKLOG); if (backlog < 1) { LOG.warn( "Invalid value for BLOB connection backlog: {}. Using default value of {}", backlog, BlobServerOptions.FETCH_BACKLOG.defaultValue()); backlog = BlobServerOptions.FETCH_BACKLOG.defaultValue(); } // Initializing the clean up task this.cleanupTimer = new Timer(true); this.cleanupInterval = config.getLong(BlobServerOptions.CLEANUP_INTERVAL) * 1000; this.cleanupTimer.schedule( new TransientBlobCleanupTask( blobExpiryTimes, readWriteLock.writeLock(), storageDir, LOG), cleanupInterval, cleanupInterval); this.shutdownHook = ShutdownHookUtil.addShutdownHook(this, getClass().getSimpleName(), LOG); // ----------------------- start the server ------------------- final String serverPortRange = config.getString(BlobServerOptions.PORT); final Iterator<Integer> ports = NetUtils.getPortRangeFromString(serverPortRange); final ServerSocketFactory socketFactory; if (SSLUtils.isInternalSSLEnabled(config) && config.getBoolean(BlobServerOptions.SSL_ENABLED)) { try { socketFactory = SSLUtils.createSSLServerSocketFactory(config); } catch (Exception e) { throw new IOException("Failed to initialize SSL for the blob server", e); } } else { socketFactory = ServerSocketFactory.getDefault(); } final int finalBacklog = backlog; final String bindHost = config.getOptional(JobManagerOptions.BIND_HOST) .orElseGet(NetUtils::getWildcardIPAddress); this.serverSocket = NetUtils.createSocketFromPorts( ports, (port) -> socketFactory.createServerSocket( port, finalBacklog, InetAddress.getByName(bindHost))); if (serverSocket == null) { throw new IOException( "Unable to open BLOB Server in specified port range: " + serverPortRange); } // start the server thread setName("BLOB Server listener at " + getPort()); setDaemon(true); if (LOG.isInfoEnabled()) { LOG.info( "Started BLOB server at {}:{} - max concurrent requests: {} - max backlog: {}", serverSocket.getInetAddress().getHostAddress(), getPort(), maxConnections, backlog); } } // -------------------------------------------------------------------------------------------- // Path Accessors // -------------------------------------------------------------------------------------------- public File getStorageDir() { return storageDir; } /** * Returns a file handle to the file associated with the given blob key on the blob server. * * <p><strong>This is only called from {@link BlobServerConnection} or unit tests.</strong> * * @param jobId ID of the job this blob belongs to (or <tt>null</tt> if job-unrelated) * @param key identifying the file * @return file handle to the file * @throws IOException if creating the directory fails */ @VisibleForTesting public File getStorageLocation(@Nullable JobID jobId, BlobKey key) throws IOException { return BlobUtils.getStorageLocation(storageDir, jobId, key); } /** * Returns a temporary file inside the BLOB server's incoming directory. * * @return a temporary file inside the BLOB server's incoming directory * @throws IOException if creating the directory fails */ File createTemporaryFilename() throws IOException { return new File( BlobUtils.getIncomingDirectory(storageDir), String.format("temp-%08d", tempFileCounter.getAndIncrement())); } /** Returns the lock used to guard file accesses. */ ReadWriteLock getReadWriteLock() { return readWriteLock; } @Override public void run() { try { while (!this.shutdownRequested.get()) { BlobServerConnection conn = new BlobServerConnection(serverSocket.accept(), this); try { synchronized (activeConnections) { while (activeConnections.size() >= maxConnections) { activeConnections.wait(2000); } activeConnections.add(conn); } conn.start(); conn = null; } finally { if (conn != null) { conn.close(); synchronized (activeConnections) { activeConnections.remove(conn); } } } } } catch (Throwable t) { if (!this.shutdownRequested.get()) { LOG.error("BLOB server stopped working. Shutting down", t); try { close(); } catch (Throwable closeThrowable) { LOG.error("Could not properly close the BlobServer.", closeThrowable); } } } } /** Shuts down the BLOB server. */ @Override public void close() throws IOException { cleanupTimer.cancel(); if (shutdownRequested.compareAndSet(false, true)) { Exception exception = null; try { this.serverSocket.close(); } catch (IOException ioe) { exception = ioe; } // wake the thread up, in case it is waiting on some operation interrupt(); try { join(); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); LOG.debug("Error while waiting for this thread to die.", ie); } synchronized (activeConnections) { if (!activeConnections.isEmpty()) { for (BlobServerConnection conn : activeConnections) { LOG.debug("Shutting down connection {}.", conn.getName()); conn.close(); } activeConnections.clear(); } } // Clean up the storage directory try { FileUtils.deleteDirectory(storageDir); } catch (IOException e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } // Remove shutdown hook to prevent resource leaks ShutdownHookUtil.removeShutdownHook(shutdownHook, getClass().getSimpleName(), LOG); if (LOG.isInfoEnabled()) { LOG.info( "Stopped BLOB server at {}:{}", serverSocket.getInetAddress().getHostAddress(), getPort()); } ExceptionUtils.tryRethrowIOException(exception); } } protected BlobClient createClient() throws IOException { return new BlobClient( new InetSocketAddress(serverSocket.getInetAddress(), getPort()), blobServiceConfiguration); } /** * Retrieves the local path of a (job-unrelated) file associated with a job and a blob key. * * <p>The blob server looks the blob key up in its local storage. If the file exists, it is * returned. If the file does not exist, it is retrieved from the HA blob store (if available) * or a {@link FileNotFoundException} is thrown. * * @param key blob key associated with the requested file * @return file referring to the local storage location of the BLOB * @throws IOException Thrown if the file retrieval failed. */ @Override public File getFile(TransientBlobKey key) throws IOException { return getFileInternal(null, key); } /** * Retrieves the local path of a file associated with a job and a blob key. * * <p>The blob server looks the blob key up in its local storage. If the file exists, it is * returned. If the file does not exist, it is retrieved from the HA blob store (if available) * or a {@link FileNotFoundException} is thrown. * * @param jobId ID of the job this blob belongs to * @param key blob key associated with the requested file * @return file referring to the local storage location of the BLOB * @throws IOException Thrown if the file retrieval failed. */ @Override public File getFile(JobID jobId, TransientBlobKey key) throws IOException { checkNotNull(jobId); return getFileInternal(jobId, key); } /** * Returns the path to a local copy of the file associated with the provided job ID and blob * key. * * <p>We will first attempt to serve the BLOB from the local storage. If the BLOB is not in * there, we will try to download it from the HA store. * * @param jobId ID of the job this blob belongs to * @param key blob key associated with the requested file * @return The path to the file. * @throws java.io.FileNotFoundException if the BLOB does not exist; * @throws IOException if any other error occurs when retrieving the file */ @Override public File getFile(JobID jobId, PermanentBlobKey key) throws IOException { checkNotNull(jobId); return getFileInternal(jobId, key); } /** * Retrieves the local path of a file associated with a job and a blob key. * * <p>The blob server looks the blob key up in its local storage. If the file exists, it is * returned. If the file does not exist, it is retrieved from the HA blob store (if available) * or a {@link FileNotFoundException} is thrown. * * @param jobId ID of the job this blob belongs to (or <tt>null</tt> if job-unrelated) * @param blobKey blob key associated with the requested file * @return file referring to the local storage location of the BLOB * @throws IOException Thrown if the file retrieval failed. */ private File getFileInternal(@Nullable JobID jobId, BlobKey blobKey) throws IOException { checkArgument(blobKey != null, "BLOB key cannot be null."); final File localFile = BlobUtils.getStorageLocation(storageDir, jobId, blobKey); readWriteLock.readLock().lock(); try { getFileInternal(jobId, blobKey, localFile); return localFile; } finally { readWriteLock.readLock().unlock(); } } /** * Helper to retrieve the local path of a file associated with a job and a blob key. * * <p>The blob server looks the blob key up in its local storage. If the file exists, it is * returned. If the file does not exist, it is retrieved from the HA blob store (if available) * or a {@link FileNotFoundException} is thrown. * * <p><strong>Assumes the read lock has already been acquired.</strong> * * @param jobId ID of the job this blob belongs to (or <tt>null</tt> if job-unrelated) * @param blobKey blob key associated with the requested file * @param localFile (local) file where the blob is/should be stored * @throws IOException Thrown if the file retrieval failed. */ void getFileInternal(@Nullable JobID jobId, BlobKey blobKey, File localFile) throws IOException { // assume readWriteLock.readLock() was already locked (cannot really check that) if (localFile.exists()) { // update TTL for transient BLOBs: if (blobKey instanceof TransientBlobKey) { // regarding concurrent operations, it is not really important which timestamp makes // it into the map as they are close to each other anyway, also we can simply // overwrite old values as long as we are in the read (or write) lock blobExpiryTimes.put( Tuple2.of(jobId, (TransientBlobKey) blobKey), System.currentTimeMillis() + cleanupInterval); } return; } else if (blobKey instanceof PermanentBlobKey) { // Try the HA blob store // first we have to release the read lock in order to acquire the write lock readWriteLock.readLock().unlock(); // use a temporary file (thread-safe without locking) File incomingFile = null; try { incomingFile = createTemporaryFilename(); blobStore.get(jobId, blobKey, incomingFile); readWriteLock.writeLock().lock(); try { BlobUtils.moveTempFileToStore( incomingFile, jobId, blobKey, localFile, LOG, null); } finally { readWriteLock.writeLock().unlock(); } return; } finally { // delete incomingFile from a failed download if (incomingFile != null && !incomingFile.delete() && incomingFile.exists()) { LOG.warn( "Could not delete the staging file {} for blob key {} and job {}.", incomingFile, blobKey, jobId); } // re-acquire lock so that it can be unlocked again outside readWriteLock.readLock().lock(); } } throw new FileNotFoundException( "Local file " + localFile + " does not exist " + "and failed to copy from blob store."); } @Override public TransientBlobKey putTransient(byte[] value) throws IOException { return (TransientBlobKey) putBuffer(null, value, TRANSIENT_BLOB); } @Override public TransientBlobKey putTransient(JobID jobId, byte[] value) throws IOException { checkNotNull(jobId); return (TransientBlobKey) putBuffer(jobId, value, TRANSIENT_BLOB); } @Override public TransientBlobKey putTransient(InputStream inputStream) throws IOException { return (TransientBlobKey) putInputStream(null, inputStream, TRANSIENT_BLOB); } @Override public TransientBlobKey putTransient(JobID jobId, InputStream inputStream) throws IOException { checkNotNull(jobId); return (TransientBlobKey) putInputStream(jobId, inputStream, TRANSIENT_BLOB); } @Override public PermanentBlobKey putPermanent(JobID jobId, byte[] value) throws IOException { checkNotNull(jobId); return (PermanentBlobKey) putBuffer(jobId, value, PERMANENT_BLOB); } @Override public PermanentBlobKey putPermanent(JobID jobId, InputStream inputStream) throws IOException { checkNotNull(jobId); return (PermanentBlobKey) putInputStream(jobId, inputStream, PERMANENT_BLOB); } /** * Uploads the data of the given byte array for the given job to the BLOB server. * * @param jobId the ID of the job the BLOB belongs to * @param value the buffer to upload * @param blobType whether to make the data permanent or transient * @return the computed BLOB key identifying the BLOB on the server * @throws IOException thrown if an I/O error occurs while writing it to a local file, or * uploading it to the HA store */ private BlobKey putBuffer(@Nullable JobID jobId, byte[] value, BlobKey.BlobType blobType) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Received PUT call for BLOB of job {}.", jobId); } File incomingFile = createTemporaryFilename(); MessageDigest md = BlobUtils.createMessageDigest(); BlobKey blobKey = null; try (FileOutputStream fos = new FileOutputStream(incomingFile)) { md.update(value); fos.write(value); } catch (IOException ioe) { // delete incomingFile from a failed download if (!incomingFile.delete() && incomingFile.exists()) { LOG.warn("Could not delete the staging file {} for job {}.", incomingFile, jobId); } throw ioe; } try { // persist file blobKey = moveTempFileToStore(incomingFile, jobId, md.digest(), blobType); return blobKey; } finally { // delete incomingFile from a failed download if (!incomingFile.delete() && incomingFile.exists()) { LOG.warn( "Could not delete the staging file {} for blob key {} and job {}.", incomingFile, blobKey, jobId); } } } /** * Uploads the data from the given input stream for the given job to the BLOB server. * * @param jobId the ID of the job the BLOB belongs to * @param inputStream the input stream to read the data from * @param blobType whether to make the data permanent or transient * @return the computed BLOB key identifying the BLOB on the server * @throws IOException thrown if an I/O error occurs while reading the data from the input * stream, writing it to a local file, or uploading it to the HA store */ private BlobKey putInputStream( @Nullable JobID jobId, InputStream inputStream, BlobKey.BlobType blobType) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Received PUT call for BLOB of job {}.", jobId); } File incomingFile = createTemporaryFilename(); BlobKey blobKey = null; try { MessageDigest md = writeStreamToFileAndCreateDigest(inputStream, incomingFile); // persist file blobKey = moveTempFileToStore(incomingFile, jobId, md.digest(), blobType); return blobKey; } finally { // delete incomingFile from a failed download if (!incomingFile.delete() && incomingFile.exists()) { LOG.warn( "Could not delete the staging file {} for blob key {} and job {}.", incomingFile, blobKey, jobId); } } } private static MessageDigest writeStreamToFileAndCreateDigest( InputStream inputStream, File file) throws IOException { try (FileOutputStream fos = new FileOutputStream(file)) { MessageDigest md = BlobUtils.createMessageDigest(); // read stream byte[] buf = new byte[BUFFER_SIZE]; while (true) { final int bytesRead = inputStream.read(buf); if (bytesRead == -1) { // done break; } fos.write(buf, 0, bytesRead); md.update(buf, 0, bytesRead); } return md; } } /** * Moves the temporary <tt>incomingFile</tt> to its permanent location where it is available for * use. * * @param incomingFile temporary file created during transfer * @param jobId ID of the job this blob belongs to or <tt>null</tt> if job-unrelated * @param digest BLOB content digest, i.e. hash * @param blobType whether this file is a permanent or transient BLOB * @return unique BLOB key that identifies the BLOB on the server * @throws IOException thrown if an I/O error occurs while moving the file or uploading it to * the HA store */ BlobKey moveTempFileToStore( File incomingFile, @Nullable JobID jobId, byte[] digest, BlobKey.BlobType blobType) throws IOException { int retries = 10; int attempt = 0; while (true) { // add unique component independent of the BLOB content BlobKey blobKey = BlobKey.createKey(blobType, digest); File storageFile = BlobUtils.getStorageLocation(storageDir, jobId, blobKey); // try again until the key is unique (put the existence check into the lock!) readWriteLock.writeLock().lock(); try { if (!storageFile.exists()) { BlobUtils.moveTempFileToStore( incomingFile, jobId, blobKey, storageFile, LOG, blobKey instanceof PermanentBlobKey ? blobStore : null); // add TTL for transient BLOBs: if (blobKey instanceof TransientBlobKey) { // must be inside read or write lock to add a TTL blobExpiryTimes.put( Tuple2.of(jobId, (TransientBlobKey) blobKey), System.currentTimeMillis() + cleanupInterval); } return blobKey; } } finally { readWriteLock.writeLock().unlock(); } ++attempt; if (attempt >= retries) { String message = "Failed to find a unique key for BLOB of job " + jobId + " (last tried " + storageFile.getAbsolutePath() + "."; LOG.error(message + " No retries left."); throw new IOException(message); } else { if (LOG.isDebugEnabled()) { LOG.debug( "Trying to find a unique key for BLOB of job {} (retry {}, last tried {})", jobId, attempt, storageFile.getAbsolutePath()); } } } } /** * Deletes the (job-unrelated) file associated with the blob key in the local storage of the * blob server. * * @param key blob key associated with the file to be deleted * @return <tt>true</tt> if the given blob is successfully deleted or non-existing; * <tt>false</tt> otherwise */ @Override public boolean deleteFromCache(TransientBlobKey key) { return deleteInternal(null, key); } /** * Deletes the file associated with the blob key in the local storage of the blob server. * * @param jobId ID of the job this blob belongs to * @param key blob key associated with the file to be deleted * @return <tt>true</tt> if the given blob is successfully deleted or non-existing; * <tt>false</tt> otherwise */ @Override public boolean deleteFromCache(JobID jobId, TransientBlobKey key) { checkNotNull(jobId); return deleteInternal(jobId, key); } /** * Deletes the file associated with the blob key in the local storage of the blob server. * * @param jobId ID of the job this blob belongs to (or <tt>null</tt> if job-unrelated) * @param key blob key associated with the file to be deleted * @return <tt>true</tt> if the given blob is successfully deleted or non-existing; * <tt>false</tt> otherwise */ boolean deleteInternal(@Nullable JobID jobId, TransientBlobKey key) { final File localFile = new File( BlobUtils.getStorageLocationPath(storageDir.getAbsolutePath(), jobId, key)); readWriteLock.writeLock().lock(); try { if (!localFile.delete() && localFile.exists()) { LOG.warn( "Failed to locally delete BLOB " + key + " at " + localFile.getAbsolutePath()); return false; } // this needs to happen inside the write lock in case of concurrent getFile() calls blobExpiryTimes.remove(Tuple2.of(jobId, key)); return true; } finally { readWriteLock.writeLock().unlock(); } } /** * Removes all BLOBs from local and HA store belonging to the given job ID. * * @param jobId ID of the job this blob belongs to * @param cleanupBlobStoreFiles True if the corresponding blob store files shall be cleaned up * as well. Otherwise false. * @return <tt>true</tt> if the job directory is successfully deleted or non-existing; * <tt>false</tt> otherwise */ public boolean cleanupJob(JobID jobId, boolean cleanupBlobStoreFiles) { checkNotNull(jobId); final File jobDir = new File(BlobUtils.getStorageLocationPath(storageDir.getAbsolutePath(), jobId)); readWriteLock.writeLock().lock(); try { // delete locally boolean deletedLocally = false; try { FileUtils.deleteDirectory(jobDir); // NOTE: Instead of going through blobExpiryTimes, keep lingering entries - they // will be cleaned up by the timer task which tolerates non-existing files // If inserted again with the same IDs (via put()), the TTL will be updated // again. deletedLocally = true; } catch (IOException e) { LOG.warn( "Failed to locally delete BLOB storage directory at " + jobDir.getAbsolutePath(), e); } // delete in HA blob store files final boolean deletedHA = !cleanupBlobStoreFiles || blobStore.deleteAll(jobId); return deletedLocally && deletedHA; } finally { readWriteLock.writeLock().unlock(); } } @Override public PermanentBlobService getPermanentBlobService() { return this; } @Override public TransientBlobService getTransientBlobService() { return this; } /** * Returns the configuration used by the BLOB server. * * @return configuration */ @Override public final int getMinOffloadingSize() { return blobServiceConfiguration.getInteger(BlobServerOptions.OFFLOAD_MINSIZE); } /** * Returns the port on which the server is listening. * * @return port on which the server is listening */ @Override public int getPort() { return this.serverSocket.getLocalPort(); } /** * Returns the blob expiry times - for testing purposes only! * * @return blob expiry times (internal state!) */ @VisibleForTesting ConcurrentMap<Tuple2<JobID, TransientBlobKey>, Long> getBlobExpiryTimes() { return blobExpiryTimes; } /** * Tests whether the BLOB server has been requested to shut down. * * @return True, if the server has been requested to shut down, false otherwise. */ public boolean isShutdown() { return this.shutdownRequested.get(); } /** Access to the server socket, for testing. */ ServerSocket getServerSocket() { return this.serverSocket; } void unregisterConnection(BlobServerConnection conn) { synchronized (activeConnections) { activeConnections.remove(conn); activeConnections.notifyAll(); } } /** * Returns all the current active connections in the BlobServer. * * @return the list of all the active in current BlobServer */ List<BlobServerConnection> getCurrentActiveConnections() { synchronized (activeConnections) { return new ArrayList<>(activeConnections); } } }
package adaptors.googlep.gpObjects; import helper.utilities.ParseUtilities; import java.util.ArrayList; import java.util.List; import org.json.JSONArray; import org.json.JSONObject; public class gpActivity { private GpaActor actor; private String address; private String annotation; private String geocode; private String id; private GpaObject object; private String placeName; private String published; private String radius; private String title; private String updated; private String url; private String verb; public gpActivity() { } public gpActivity(JSONObject json) { this.setTitle(json.optString("title", null)); this.setPublished(json.optString("published", null)); this.setUpdated(json.optString("updated", null)); this.setId(json.optString("id", null)); this.setUrl(json.optString("url", null)); JSONObject jsactor = json.optJSONObject("actor"); if (jsactor != null) { this.setActor(new GpaActor(jsactor)); } this.setVerb(json.optString("verb", null)); JSONObject jsobject = json.optJSONObject("object"); if (jsobject != null) { this.setObject(new GpaObject(jsobject)); } this.setAnnotation(json.optString("annotation", null)); this.setGeocode(json.optString("geocode", null)); this.setAddress(json.optString("address", null)); this.setRadius(json.optString("radius", null)); this.setPlaceName(json.optString("placeName", null)); } public String getVerb() { return verb; } public void setVerb(String verb) { this.verb = verb; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getUpdated() { return updated; } public void setUpdated(String updated) { this.updated = updated; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getRadius() { return radius; } public void setRadius(String radius) { this.radius = radius; } public String getPublished() { return published; } public void setPublished(String published) { this.published = published; } public String getPlaceName() { return placeName; } public void setPlaceName(String placeName) { this.placeName = placeName; } public GpaObject getObject() { return object; } public void setObject(GpaObject object) { this.object = object; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getGeocode() { return geocode; } public void setGeocode(String geocode) { this.geocode = geocode; } public String getAnnotation() { return annotation; } public void setAnnotation(String annotation) { this.annotation = annotation; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } public GpaActor getActor() { return actor; } public void setActor(GpaActor actor) { this.actor = actor; } public class GpaAttachment { private String content; private String displayName; private String embed; private String fullImage; private String id; private String image; private String objectType; private List<GpaThumbnail> thumbnails; private String url; public GpaAttachment() { } public GpaAttachment(JSONObject json) { this.setObjectType(json.optString("objectType", null)); this.setDisplayName(json.optString("displayName", null)); this.setId(json.optString("id", null)); this.setContent(json.optString("content", null)); this.setUrl(json.optString("url", null)); this.setImage(ParseUtilities.doubleJsParse(json, "image", "url")); this.setFullImage(ParseUtilities.doubleJsParse(json, "fullImage", "url")); this.setEmbed(ParseUtilities.doubleJsParse(json, "embed", "url")); JSONArray thumbnailsArray = json.optJSONArray("thumbnails"); if (thumbnailsArray != null) { this.thumbnails = new ArrayList<GpaThumbnail>(); for (int index = 0; index < thumbnailsArray.length(); index++) { JSONObject jsthumbnail = thumbnailsArray.optJSONObject(index); if (jsthumbnail != null) { this.thumbnails.add(new GpaThumbnail(jsthumbnail)); } } } } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getObjectType() { return objectType; } public void setObjectType(String objectType) { this.objectType = objectType; } public String getImage() { return image; } public void setImage(String image) { this.image = image; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getFullImage() { return fullImage; } public void setFullImage(String fullImage) { this.fullImage = fullImage; } public String getEmbed() { return embed; } public void setEmbed(String embed) { this.embed = embed; } public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public List<GpaThumbnail> getThumbnails() { return thumbnails; } public void setThumbnails(List<GpaThumbnail> thumbnails) { this.thumbnails = thumbnails; } } public class GpaActor { private String displayName; private String familyName; private String givenName; private String id; private String image; private String url; public GpaActor() { } public GpaActor(JSONObject json) { this.setId(json.optString("id", null)); this.setDisplayName(json.optString("displayName", null)); JSONObject jsname = json.optJSONObject("name"); if (jsname != null) { this.setFamilyName(jsname.optString("familyName", null)); this.setGivenName(jsname.optString("givenName", null)); } this.setUrl(json.optString("url", null)); this.setImage(ParseUtilities.doubleJsParse(json, "image", "url")); } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getImage() { return image; } public void setImage(String image) { this.image = image; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getGivenName() { return givenName; } public void setGivenName(String givenName) { this.givenName = givenName; } public String getFamilyName() { return familyName; } public void setFamilyName(String familyName) { this.familyName = familyName; } public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } } public class GpaThumbnail { private String description; private String image; private String url; public GpaThumbnail() { } public GpaThumbnail(JSONObject json) { this.setUrl(json.optString("url", null)); this.setDescription(json.optString("description", null)); this.setImage(ParseUtilities.doubleJsParse(json, "image", "url")); } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getImage() { return image; } public void setImage(String image) { this.image = image; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } } public class GpaObject { private GpaActor actor; private List<GpaAttachment> attachments; private String content; private String id; private String objectType; private String originalContent; private int plusoners; private int replies; private int resharers; private String url; public GpaObject() { } public GpaObject(JSONObject json) { this.setObjectType(json.optString("objectType", null)); this.setId(json.optString("id", null)); JSONObject jsactor = json.optJSONObject("actor"); if (jsactor != null) { this.setActor(new GpaActor(jsactor)); } this.setContent(json.optString("content", null)); this.setOriginalContent(json.optString("originalContent", null)); this.setUrl(json.optString("url", null)); this.setReplies(ParseUtilities.doubleJsParseInt(json, "replies", "totalItems")); this.setPlusoners(ParseUtilities.doubleJsParseInt(json, "plusoners", "totalItems")); this.setResharers(ParseUtilities.doubleJsParseInt(json, "resharers", "totalItems")); JSONArray attachmentsArray = json.optJSONArray("attachments"); if (attachmentsArray != null) { this.attachments = new ArrayList<GpaAttachment>(); for (int index = 0; index < attachmentsArray.length(); index++) { JSONObject jsattachment = attachmentsArray.optJSONObject(index); if (jsattachment != null) { this.attachments.add(new GpaAttachment(jsattachment)); } } } } public List<GpaAttachment> getAttachments() { return attachments; } public void setAttachments(List<GpaAttachment> attachments) { this.attachments = attachments; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public int getResharers() { return resharers; } public void setResharers(int resharers) { this.resharers = resharers; } public int getReplies() { return replies; } public void setReplies(int replies) { this.replies = replies; } public int getPlusoners() { return plusoners; } public void setPlusoners(int plusoners) { this.plusoners = plusoners; } public String getOriginalContent() { return originalContent; } public void setOriginalContent(String originalContent) { this.originalContent = originalContent; } public String getObjectType() { return objectType; } public void setObjectType(String objectType) { this.objectType = objectType; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public GpaActor getActor() { return actor; } public void setActor(GpaActor actor) { this.actor = actor; } } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.listener.author; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.event.AbortProcessingException; import javax.faces.event.ActionEvent; import javax.faces.event.ActionListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.tool.assessment.api.SamigoApiFactory; import org.sakaiproject.tool.assessment.data.dao.assessment.AssessmentAccessControl; import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentAccessControlIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.EvaluationModelIfc; import org.sakaiproject.tool.assessment.facade.AssessmentFacade; import org.sakaiproject.tool.assessment.services.assessment.AssessmentService; import org.sakaiproject.tool.assessment.shared.api.assessment.SecureDeliveryServiceAPI; import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentSettingsBean; import org.sakaiproject.tool.assessment.ui.bean.author.AuthorBean; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; import org.sakaiproject.tool.assessment.util.TextFormat; import org.sakaiproject.util.FormattedText; /** * <p>Title: Samigo</p>2 * <p>Description: Sakai Assessment Manager</p> * @author Ed Smiley * @version $Id$ */ public class SaveAssessmentSettingsListener implements ActionListener { private static Log log = LogFactory.getLog(SaveAssessmentSettingsListener.class); //private static final GradebookServiceHelper gbsHelper = IntegrationContextFactory.getInstance().getGradebookServiceHelper(); //private static final boolean integrated = IntegrationContextFactory.getInstance().isIntegrated(); public SaveAssessmentSettingsListener() { } public void processAction(ActionEvent ae) throws AbortProcessingException { FacesContext context = FacesContext.getCurrentInstance(); AssessmentSettingsBean assessmentSettings = (AssessmentSettingsBean) ContextUtil. lookupBean("assessmentSettings"); boolean error=false; String assessmentId=String.valueOf(assessmentSettings.getAssessmentId()); AssessmentService assessmentService = new AssessmentService(); SaveAssessmentSettings s = new SaveAssessmentSettings(); String assessmentName = TextFormat.convertPlaintextToFormattedTextNoHighUnicode(log, assessmentSettings.getTitle()); // check if name is empty if(assessmentName!=null &&(assessmentName.trim()).equals("")){ String nameEmpty_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","assessmentName_empty"); context.addMessage(null,new FacesMessage(nameEmpty_err)); error=true; } // check if name is unique if(!assessmentService.assessmentTitleIsUnique(assessmentId,assessmentName,false)){ String nameUnique_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","assessmentName_error"); context.addMessage(null,new FacesMessage(nameUnique_err)); error=true; } // check if start date is valid if(!assessmentSettings.getIsValidStartDate()){ String startDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_start_date"); context.addMessage(null,new FacesMessage(startDateErr)); error=true; } // check if due date is valid if(!assessmentSettings.getIsValidDueDate()){ String dueDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_due_date"); context.addMessage(null,new FacesMessage(dueDateErr)); error=true; } // check if late submission date is valid if(!assessmentSettings.getIsValidRetractDate()){ String retractDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_retrack_date"); context.addMessage(null,new FacesMessage(retractDateErr)); error=true; } if (assessmentSettings.getReleaseTo().equals(AssessmentAccessControl.RELEASE_TO_SELECTED_GROUPS)) { String[] groupsAuthorized = assessmentSettings.getGroupsAuthorizedToSave(); //getGroupsAuthorized(); if (groupsAuthorized == null || groupsAuthorized.length == 0) { String releaseGroupError = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","choose_one_group"); context.addMessage(null,new FacesMessage(releaseGroupError)); error=true; assessmentSettings.setNoGroupSelectedError(true); } else { assessmentSettings.setNoGroupSelectedError(false); } } // if timed assessment, does it has value for time Object time=assessmentSettings.getValueMap().get("hasTimeAssessment"); boolean isTime=false; try { if (time != null) { isTime = ( (Boolean) time).booleanValue(); } } catch (Exception ex) { // keep default log.warn("Expecting Boolean hasTimeAssessment, got: " + time); } if((isTime) &&((assessmentSettings.getTimeLimit().intValue())==0)){ String time_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","timeSelect_error"); context.addMessage(null,new FacesMessage(time_err)); error=true; } String ipString = assessmentSettings.getIpAddresses().trim(); String[]arraysIp=(ipString.split("\n")); boolean ipErr=false; for(int a=0;a<arraysIp.length;a++){ String currentString=arraysIp[a]; if(!currentString.trim().equals("")){ if(a<(arraysIp.length-1)) currentString=currentString.substring(0,currentString.length()-1); if(!s.isIpValid(currentString)){ ipErr=true; break; } } } if(ipErr){ error=true; String ip_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","ip_error"); context.addMessage(null,new FacesMessage(ip_err)); } String unlimitedSubmissions = assessmentSettings.getUnlimitedSubmissions(); if (unlimitedSubmissions != null && unlimitedSubmissions.equals(AssessmentAccessControlIfc.LIMITED_SUBMISSIONS.toString())) { try { String submissionsAllowed = assessmentSettings.getSubmissionsAllowed().trim(); int submissionAllowed = Integer.parseInt(submissionsAllowed); if (submissionAllowed < 1) { throw new RuntimeException(); } } catch (RuntimeException e){ error=true; String submission_err = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","submissions_allowed_error"); context.addMessage(null,new FacesMessage(submission_err)); } } //String unlimitedSubmissions = assessmentSettings.getUnlimitedSubmissions(); String scoringType=assessmentSettings.getScoringType(); if ((scoringType).equals(EvaluationModelIfc.AVERAGE_SCORE.toString()) && "0".equals(assessmentSettings.getUnlimitedSubmissions())) { try { String submissionsAllowed = assessmentSettings.getSubmissionsAllowed().trim(); int submissionAllowed = Integer.parseInt(submissionsAllowed); if (submissionAllowed < 2) { throw new RuntimeException(); } } catch (RuntimeException e){ error=true; String submission_err = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","averag_grading_single_submission"); context.addMessage(null,new FacesMessage(submission_err)); } } //check feedback - if at specific time then time should be defined. if((assessmentSettings.getFeedbackDelivery()).equals("2")) { if (assessmentSettings.getFeedbackDateString()==null || assessmentSettings.getFeedbackDateString().equals("")) { error=true; String date_err=ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","date_error"); context.addMessage(null,new FacesMessage(date_err)); } else if(!assessmentSettings.getIsValidFeedbackDate()){ String feedbackDateErr = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","invalid_feedback_date"); context.addMessage(null,new FacesMessage(feedbackDateErr)); error=true; } } // check secure delivery exit password SecureDeliveryServiceAPI secureDeliveryService = SamigoApiFactory.getInstance().getSecureDeliveryServiceAPI(); if ( secureDeliveryService.isSecureDeliveryAvaliable() ) { String moduleId = assessmentSettings.getSecureDeliveryModule(); if ( ! SecureDeliveryServiceAPI.NONE_ID.equals( moduleId ) ) { String exitPassword = assessmentSettings.getSecureDeliveryModuleExitPassword(); if ( exitPassword != null && exitPassword.length() > 0 ) { for ( int i = 0; i < exitPassword.length(); i++ ) { char c = exitPassword.charAt(i); if ( ! (( c >= 'a' && c <= 'z' ) || ( c >= 'A' && c <= 'Z' ) || ( c >= '0' && c <= '9' )) ) { error = true; String submission_err = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages","exit_password_error"); context.addMessage(null,new FacesMessage(submission_err)); break; } } } } } if (error){ String blockDivs = ContextUtil.lookupParam("assessmentSettingsAction:blockDivs"); assessmentSettings.setBlockDivs(blockDivs); assessmentSettings.setOutcomeSave("editAssessmentSettings"); return; } // Set the outcome once Save button is clicked AuthorBean author = (AuthorBean) ContextUtil.lookupBean("author"); assessmentSettings.setOutcomeSave(author.getFromPage()); s.save(assessmentSettings, false); // reset the core listing in case assessment title changes ArrayList assessmentList = assessmentService.getBasicInfoOfAllActiveAssessments( author.getCoreAssessmentOrderBy(),author.isCoreAscending()); Iterator iter = assessmentList.iterator(); while (iter.hasNext()) { AssessmentFacade assessmentFacade= (AssessmentFacade) iter.next(); assessmentFacade.setTitle(FormattedText.convertFormattedTextToPlaintext(assessmentFacade.getTitle())); } // get the managed bean, author and set the list author.setAssessments(assessmentList); // goto Question Authoring page EditAssessmentListener editA= new EditAssessmentListener(); editA.setPropertiesForAssessment(author); } }
package xal.smf.impl; import xal.ca.ChannelFactory; import xal.ca.ConnectionException; import xal.ca.GetException; import xal.smf.AcceleratorNode; import xal.smf.attr.AttributeBucket; import xal.smf.attr.RfCavityBucket; import xal.smf.attr.RfGapBucket; import xal.smf.impl.qualify.ElementTypeManager; import xal.tools.math.fnc.poly.RealUnivariatePolynomial; /** * The implementation of the RF gap element. * * The RfGap class is meant to be used in connection with a set of * related RF gaps, such as the gaps in a DTL Tank, which are all * part of a single resonant cavity * controlled by a single klystron. Each gap may have a fixed scale * factor for both the field and phase, relative to a nominal * field and phase. * @author J. Galambos */ public class RfGap extends AcceleratorNode { /* * Constants */ public static final String s_strType = "RG"; static { registerType(); } /* * Register type for qualification */ private static void registerType() { ElementTypeManager.defaultManager().registerTypes( RfGap.class, s_strType, "rfgap" ); } /* * Local Attributes */ /** The rf gap bucket containing the length, ampFactor, phaseFactor and TTF*/ protected RfGapBucket m_bucRfGap; // RfGap parameters /** a flag indicating whether this gap is the first gap in a cavity string */ private boolean firstGap = false; /** Override to provide type signature */ public String getType() { return s_strType; }; /** Primary Constructor */ public RfGap( final String strId, final ChannelFactory channelFactory ) { super( strId, channelFactory ); setRfGap(new RfGapBucket()); } /** Constructor */ public RfGap( final String strId ) { this( strId, null ); } /* * Attributes */ public RfGapBucket getRfGap() { return m_bucRfGap; } public void setRfGap(RfGapBucket buc) { m_bucRfGap = buc; super.addBucket(buc); } /** Override AcceleratorNode implementation to check for a RfGapBucket */ public void addBucket(AttributeBucket buc) { if (buc.getClass().equals( RfGapBucket.class )) setRfGap((RfGapBucket)buc); super.addBucket(buc); } /** return the RF amplitude in the gap (kV/m). Note, this method should probably be modified */ public double getGapAmpAvg() throws ConnectionException, GetException { final RfCavity rfCav = (RfCavity) this.getParent(); return toGapAmpFromCavityAmp( rfCav.getCavAmpAvg() ); } /** return the RF amplitude in the gap (kV/m) */ public double getGapDfltAmp() { final RfCavity rfCav = (RfCavity) this.getParent(); final RfCavityBucket rfCavBuc = rfCav.getRfField(); return toGapAmpFromCavityAmp( rfCavBuc.getAmplitude() ); } /** * This includes the calibration offset factor if it has been set * @return the RF phase in the gap (deg). */ public double getGapPhaseAvg() throws ConnectionException, GetException { final RfCavity rfCav = (RfCavity) this.getParent(); return toGapPhaseFromCavityPhase( rfCav.getCavPhaseAvg() ); } /** * This is the product of the field * gap length * TTF * @return the E0TL product (kV) */ public double getGapE0TL() throws ConnectionException, GetException { return toE0TLFromGapField( getGapAmpAvg() ); } /** * This is the product of the field * gap length * TTF * @return the E0TL product (kV) */ public double getGapDfltE0TL() { return toE0TLFromGapField( getGapDfltAmp() ); } /** return the RF phase in the gap (deg) */ public double getGapDfltPhase() { final RfCavity rfCav = (RfCavity) this.getParent(); final RfCavityBucket rfCavBuc = rfCav.getRfField(); return toGapPhaseFromCavityPhase( rfCavBuc.getPhase() ); } /** return the RF fundamental frequency */ public double getGapDfltFrequency() { final RfCavity rfCav = (RfCavity) this.getParent(); final RfCavityBucket rfCavBuc = rfCav.getRfField(); return rfCavBuc.getFrequency(); } /** * Convert RF cavity amplitude to get the RF gap's amplitude. * @param cavityAmp the RF cavity's amplitude * @return this RF gap's amplitude */ public double toGapAmpFromCavityAmp( final double cavityAmp ) { return cavityAmp * m_bucRfGap.getAmpFactor(); } /** * Convert RF cavity phase to get the RF gap's phase. * @param cavityPhase the RF cavity's phase * @return this RF gap's phase */ public double toGapPhaseFromCavityPhase( final double cavityPhase ) { return cavityPhase + m_bucRfGap.getPhaseFactor(); } /** * Convert RF gap field, E0, to E0TL. This is the product of the field * gap length * TTF. * @param field the RF field in KV/m * @return the E0TL product (kV) */ public double toE0TLFromGapField( final double field ) { return field * m_bucRfGap.getLength() * m_bucRfGap.getTTF(); } /** * return Rf Gap Length * * <p> * <h4>CKA NOTES:</h4> * &middot; I believe this is the length of the overall gap cell * structure, not just the gap itself. * <br/> * &middot; Specifically, it is the distance from one gap center * to the next in an accelerating structure. * </p> * */ public double getGapLength() { return m_bucRfGap.getLength() ; } /** return TTF */ public double getGapTTF() { return m_bucRfGap.getTTF(); } //JAMES CODE: sets the gap TTF value for the given gap public void setGapTTF(double gapTTFval) { m_bucRfGap.setTTF(gapTTFval); } /** * Set the RF amplitude in the (kV/m) * should be done by the parent cavity (e.g. DTL tank) * <br/> * <br/> * <em>Currently this method does nothing!</em> * * * @param cavAmp The amplitude of the first gap (kV/m) */ public void setGapAmp(double cavAmp){ // ampAvg = cavAmp * m_bucRfGap.getAmpFactor(); } /** Set the RF phase in the gap (deg) * should be done by the parent cavity (e.g. DTL tank) * @param cavPhase The phase of the first gap (deg) */ public void setGapPhase(double cavPhase){ // phaseAvg = cavPhase + m_bucRfGap.getPhaseFactor(); } // the RfGapDataSource interface methods: /** * Return a polynomial fit of the transit time factor <i>T</i>(&beta;) * as a function of normalized velocity &beta;. * * <p> * <h4>CKA NOTES:</h4> * &middot; It appears to me that the returned value of <i>T</i>'(&beta;) is * in the units of <b>centimeters</b>. * <br/> * &middot; The units for the transit time factor <i>T</i>(&beta;) are in * <b>meters</b>. * <br/> * &middot; This is a confusing inconsistency and hopefully we can resolve this * in the future. * <br/> * &middot; The modeling element <code>IdealRfGap</code> uses the magic number * of 0.01 as a factor in front of <code>{@link #getTTFPrimeFit()}</code>. * </p> * * @return &nbsp; &nbsp; <i>T</i>(&beta;) &approx; <i>a</i><sub>0</sub> * + <i>a</i><sub>1</sub>&beta; * + <i>a</i><sub>2</sub>&beta;<sup>2</sup> + ... * * @version June 1, 2015 */ public RealUnivariatePolynomial getTTFFit() { double[] arrCoeffs = this.m_bucRfGap.getTCoefficients(); // Defaults to the RF cavity transit time factor if none is // defined for this gap. if (arrCoeffs == null || arrCoeffs.length == 0) { RfCavity rfCav = (RfCavity) this.getParent(); if(isEndCell()) return rfCav.getTTFFitEnd(); else return rfCav.getTTFFit(); } // A set of coefficients is defined for this fit. // Create the fitting function and return it. RealUnivariatePolynomial polyFit = new RealUnivariatePolynomial(arrCoeffs); return polyFit; } /** * <p> * Return a polynomial fit of the transit time factor derivative <i>T'</i>(&beta;) * as a function of normalized velocity &beta;. Note that the derivative * is with respect to the wave number <i>k</i>; that is, * <i>T</i>'(&beta) = <i>dT</i>(&beta;)/<i>dk</i>. * </p> * <p> * <h4>CKA NOTES:</h4> * &middot; It appears to me that the returned value of <i>T</i>'(&beta;) is * in the units of <b>centimeters</b>. * <br/> * &middot; The units for the transit time factor <i>T</i>(&beta;) are in * <b>meters</b>. * <br/> * &middot; This is a confusing inconsistency and hopefully we can resolve this * in the future. * <br/> * &middot; The modeling element <code>IdealRfGap</code> uses the magic number * of 0.01 as a factor in front of <code>{@link #getTTFPrimeFit()}</code>. * <br/> * &middot; Equally distressing is that the code within <code>IdealRfGap</code>, * the modeling element for an RF gap, treats this value as if it where the * derivative with respect to wave number <i>k</i>. That is, the returned * value here is &part;<i>T</i>(&beta;)/&part;<i>k</i>. * </p> * * @return &nbsp; &nbsp; <i>T</i>(&beta;) &approx; <i>a</i><sub>0</sub> * + <i>a</i><sub>1</sub>&beta; * + <i>a</i><sub>2</sub>&beta;<sup>2</sup> + ... * * @version June 1, 2015 */ public RealUnivariatePolynomial getTTFPrimeFit() { double[] arrCoeffs = this.m_bucRfGap.getTpCoefficients(); // Defaults to the RF cavity transit time factor if none is // defined for this gap. if (arrCoeffs == null || arrCoeffs.length == 0) { RfCavity rfCav = (RfCavity) this.getParent(); if (isEndCell()) return rfCav.getTTFPrimeFitEnd(); else return rfCav.getTTFPrimeFit(); } // A set of coefficients is defined for this fit. // Create the fitting function and return it. RealUnivariatePolynomial polyFit = new RealUnivariatePolynomial(arrCoeffs); return polyFit; } /** * Return a polynomial fit of the sine transit time factor <i>S</i>(&beta;) * as a function of normalized velocity &beta;. * * @return &nbsp; &nbsp; <i>S</i>(&beta;) &approx; <i>b</i><sub>0</sub> * + <i>b</i><sub>1</sub>&beta; * + <i>b</i><sub>2</sub>&beta;<sup>2</sup> + ... * * @version June 1, 2015 */ public RealUnivariatePolynomial getSFit() { double[] arrCoeffs = this.m_bucRfGap.getSCoefficients(); // Defaults to the RF cavity transit time factor if none is // defined for this gap. if (arrCoeffs == null || arrCoeffs.length == 0) { RfCavity rfCav = (RfCavity) this.getParent(); if (isEndCell()) return rfCav.getSTFFitEnd(); else return rfCav.getSTFFit(); } // A set of coefficients is defined for this fit. // Create the fitting function and return it. RealUnivariatePolynomial polyFit = new RealUnivariatePolynomial(arrCoeffs); return polyFit; } /** * <p> * Return a polynomial fit of the sine transit time factor derivative <i>S'</i>(&beta;) * as a function of normalized velocity &beta;. Note that the derivative * is with respect to the wave number <i>k</i>; that is, * <i>S</i>'(&beta) = <i>dS</i>(&beta;)/<i>dk</i>. * </p> * <p> * <h4>CKA NOTES:</h4> * &middot; It appears to me that the returned value of <i>S</i>'(&beta;) is * in the units of <b>centimeters</b>. * <br/> * &middot; The units for the transit time factor <i>S</i>(&beta;) are in * <b>meters</b>. * <br/> * &middot; This is a confusing inconsistency and hopefully we can resolve this * in the future. * <br/> * &middot; The modeling element <code>IdealRfGap</code> uses the magic number * of 0.01 as a factor in front of <code>{@link #getSTFPrimeFit()}</code>. * </p> * * @return &nbsp; &nbsp; <i>S</i>(&beta;) &approx; <i>b</i><sub>0</sub> * + <i>b</i><sub>1</sub>&beta; * + <i>b</i><sub>2</sub>&beta;<sup>2</sup> + ... * * @version June 1, 2015 */ public RealUnivariatePolynomial getSPrimeFit() { double[] arrCoeffs = this.m_bucRfGap.getSpCoefficients(); // Defaults to the RF cavity transit time factor derivative if none is // defined for this gap. if (arrCoeffs == null || arrCoeffs.length == 0) { RfCavity rfCav = (RfCavity) this.getParent(); if (isEndCell()) return rfCav.getSTFPrimeFitEnd(); //return rfCav.getSTFPrimeFit(); else return rfCav.getSTFPrimeFit(); } // A set of coefficients is defined for this fit. // Create the fitting function and return it. RealUnivariatePolynomial polyFit = new RealUnivariatePolynomial(arrCoeffs); return polyFit; } /** * @return <b>0</b> if the gap is part of a 0 mode cavity structure (e.g. DTL) <br/> * <b>1</b> if the gap is part of a &pi; mode cavity (e.g. CCL, Superconducting) */ public double getStructureMode() { RfCavity rfCav = (RfCavity) this.getParent(); return rfCav.getStructureMode(); } /** * these may be different, for example, for a DTL cavity * @return the offset of the gap center from the cell center (m) */ public double getGapOffset() { return m_bucRfGap.getGapOffset(); } /** sets the flag indicating whether this is the first gap in a cavity */ public void setFirstGap(boolean tf) { firstGap = tf;} /** returns whether this is the first gap of a cavity string */ public boolean isFirstGap() {return firstGap;} /** returns whether this is the <b>last</b> gap of a cavity string */ public boolean isEndCell() { if (m_bucRfGap.getEndCell() == 1) return true; else return false; } /** * Computes and returns the design value of the energy gain for this gap. * The energy gain is given by the Panofsky equation * <br/> * <br/> * &nbsp; &nbsp; &Delta;<i>W</i> = * <i>q</i> <i>E</i><sub>0</sub><i>L</i> <i>T</i>(&beta;) cos(&phi;<sub>0</sub>). * * @return design energy gain &Delta;<i>W</i> (eV) * * Added 10/17/02 CKA */ public double getDesignEnergyGain() { double ETL = this.getGapDfltE0TL(); double phi = this.getGapDfltPhase(); return ETL*Math.cos(phi); }; }
package org.sagebionetworks.auth; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.PrintWriter; import java.util.Base64; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.sagebionetworks.repo.model.AuthorizationConstants; import org.springframework.http.HttpStatus; import com.google.common.collect.ImmutableList; @ExtendWith(MockitoExtension.class) class HttpAuthUtilTest { private static final String BEARER_TOKEN = "1a2b3c"; @Mock private HttpServletRequest httpRequest; @Mock private HttpServletResponse httpResponse; @Mock private PrintWriter mockWriter; private static final String username = "username"; private static final String password = "password"; private static String base64Encode(String in) { return Base64.getEncoder().encodeToString(in.getBytes()); } @Test void testUsesBasicAuthenticationCredentials() { when(httpRequest.getHeader("Authorization")).thenReturn("Basic placeholder"); // method under test assertTrue(HttpAuthUtil.usesBasicAuthentication(httpRequest)); } @Test void testUsesBasicAuthenticationCredentials_NoHeader() { when(httpRequest.getHeader("Authorization")).thenReturn(null); // method under test assertFalse(HttpAuthUtil.usesBasicAuthentication(httpRequest)); } @Test void testUsesBasicAuthenticationCredentials_EmptyHeader() { when(httpRequest.getHeader("Authorization")).thenReturn(""); // method under test assertFalse(HttpAuthUtil.usesBasicAuthentication(httpRequest)); } @Test void testUsesBasicAuthenticationCredentials_NotBasic() { when(httpRequest.getHeader("Authorization")).thenReturn("Bearer placeholder"); // method under test assertFalse(HttpAuthUtil.usesBasicAuthentication(httpRequest)); } @Test void testGetBasicAuthenticationCredentialsWithNoHeader() { // test no authorization header when(httpRequest.getHeader("Authorization")).thenReturn(null); // method under test assertEquals(Optional.empty(), HttpAuthUtil.getBasicAuthenticationCredentials(httpRequest)); } @Test void testGetBasicAuthenticationCredentialsWithEmptyHeader() { // Empty header when(httpRequest.getHeader("Authorization")).thenReturn(" "); // method under test assertEquals(Optional.empty(), HttpAuthUtil.getBasicAuthenticationCredentials(httpRequest)); } @Test void testGetBasicAuthenticationCredentialsWithWrongHeader() { // not Basic Authentication when(httpRequest.getHeader("Authorization")).thenReturn("Bearer 1a2b3c"); // method under test IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, () -> { HttpAuthUtil.getBasicAuthenticationCredentials(httpRequest); }); assertEquals("Invalid Authorization header for basic authentication (Missing \"Basic \" prefix)", ex.getMessage()); } @Test void testGetBasicAuthenticationCredentialsWithoutColon() { // not properly formatted Basic auth when(httpRequest.getHeader("Authorization")).thenReturn("Basic "+base64Encode("some random text")); IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, () -> { HttpAuthUtil.getBasicAuthenticationCredentials(httpRequest); }); assertEquals("Invalid Authorization header for basic authentication (Decoded credentials should be colon separated)", ex.getMessage()); } @Test void testGetBasicAuthenticationCredentialsWithCredentials() { when(httpRequest.getHeader("Authorization")).thenReturn("Basic "+base64Encode(username+":"+password)); Optional<UserNameAndPassword> expected = Optional.of(new UserNameAndPassword(username, password)); // method under test assertEquals(expected, HttpAuthUtil.getBasicAuthenticationCredentials(httpRequest)); } @Test void testGetBasicAuthenticationCredentialsWithExtraSpace() { // extra white space when(httpRequest.getHeader("Authorization")).thenReturn("Basic \t"+base64Encode(username+":"+password)+"\n\n \t"); Optional<UserNameAndPassword> expected = Optional.of(new UserNameAndPassword(username, password)); // method under test assertEquals(expected, HttpAuthUtil.getBasicAuthenticationCredentials(httpRequest)); } @Test void testGetBasicAuthenticationCredentialsWithInvalidEncoding() { // invalid encoding when(httpRequest.getHeader("Authorization")).thenReturn("Basic "+base64Encode(username+":"+password)+"__"); // method under test IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, () -> { HttpAuthUtil.getBasicAuthenticationCredentials(httpRequest); }); assertEquals("Invalid Authorization header for basic authentication (Malformed Base64 encoding: Input byte array has incorrect ending byte at 24)", ex.getMessage()); } @Test void testGetBearerToken() { // test no authorization header when(httpRequest.getHeader("Authorization")).thenReturn(null); // method under test assertNull(HttpAuthUtil.getBearerTokenFromStandardAuthorizationHeader(httpRequest)); // proper bearer token when(httpRequest.getHeader("Authorization")).thenReturn("Bearer "+BEARER_TOKEN); // method under test assertEquals(BEARER_TOKEN, HttpAuthUtil.getBearerTokenFromStandardAuthorizationHeader(httpRequest)); } @Test void testGetBearerTokenFromAuthorizationHeader() { // method under test assertNull(HttpAuthUtil.getBearerTokenFromAuthorizationHeader(null)); // method under test assertNull(HttpAuthUtil.getBearerTokenFromAuthorizationHeader(" ")); // method under test assertNull(HttpAuthUtil.getBearerTokenFromAuthorizationHeader("Basic xxx")); // method under test assertEquals(BEARER_TOKEN, HttpAuthUtil.getBearerTokenFromAuthorizationHeader("Bearer "+BEARER_TOKEN)); // what if there's extra white space? // method under test assertEquals(BEARER_TOKEN, HttpAuthUtil.getBearerTokenFromAuthorizationHeader("Bearer \t\t "+BEARER_TOKEN+" ")); } @Test void testSetBearerTokenHeader() { Map<String, String[]> headers = new HashMap<String, String[]>(); HttpAuthUtil.setBearerTokenHeader(headers, BEARER_TOKEN); assertEquals("Bearer "+BEARER_TOKEN, headers.get(AuthorizationConstants.SYNAPSE_AUTHORIZATION_HEADER_NAME)[0]); } @Test void testSetServiceNameHeader() { Map<String, String[]> headers = new HashMap<String, String[]>(); String serviceName = "someService"; // Call under test HttpAuthUtil.setServiceNameHeader(headers, serviceName); assertEquals(serviceName, headers.get(AuthorizationConstants.SYNAPSE_HEADER_SERVICE_NAME)[0]); } @Test void testFilterAuthorizationHeaders() { String nonAuthHeader = "Accept"; String nonAuthHeaderValue = "application/json"; when(httpRequest.getHeaderNames()).thenReturn(Collections.enumeration(ImmutableList.of( "Synapse-Authorization", "sessionToken", "userId", "signatureTimestamp", "signature", "verifiedOAuthClientId", nonAuthHeader))); when(httpRequest.getHeaders(nonAuthHeader)).thenReturn(Collections.enumeration(Collections.singleton(nonAuthHeaderValue))); // method under test Map<String,String[]> actual = HttpAuthUtil.filterAuthorizationHeaders(httpRequest); assertEquals(1, actual.size()); assertEquals(Collections.singleton(nonAuthHeader), actual.keySet()); String[] values = actual.get(nonAuthHeader); assertEquals(1, values.length); assertEquals("application/json", values[0]); } @Test void testRejectWithStatus() throws Exception { HttpStatus status = HttpStatus.UNAUTHORIZED; when(httpResponse.getWriter()).thenReturn(mockWriter); // method under test HttpAuthUtil.rejectWithErrorResponse(httpResponse, "bad request", status); verify(httpResponse).setStatus(status.value()); verify(httpResponse).setContentType("application/json"); verify(httpResponse).setHeader("WWW-Authenticate", "\"Digest\" your email"); verify(mockWriter).println("{\"reason\":\"bad request\"}"); } @Test void testNoWwwAuthenticateOnReject_nonUnauthorized() throws Exception { HttpStatus status = HttpStatus.FORBIDDEN; when(httpResponse.getWriter()).thenReturn(mockWriter); // method under test HttpAuthUtil.rejectWithErrorResponse(httpResponse, "bad request", status); verify(httpResponse).setStatus(status.value()); verify(httpResponse).setContentType("application/json"); verify(httpResponse, never()).setHeader("WWW-Authenticate", "\"Digest\" your email"); verify(mockWriter).println("{\"reason\":\"bad request\"}"); } @Test void testRejectUnauthorized() throws Exception { when(httpResponse.getWriter()).thenReturn(mockWriter); // method under test HttpAuthUtil.reject(httpResponse, "missing token"); verify(httpResponse).setStatus(401); verify(httpResponse).setContentType("application/json"); verify(httpResponse).setHeader("WWW-Authenticate", "\"Digest\" your email"); verify(mockWriter).println("{\"reason\":\"missing token\"}"); } }
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.test.autoconfigure.properties; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.springframework.core.annotation.AnnotatedElementUtils; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.env.EnumerablePropertySource; import org.springframework.util.ObjectUtils; import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; /** * {@link EnumerablePropertySource} to adapt annotations marked with * {@link PropertyMapping @PropertyMapping}. * * @author Phillip Webb * @author Andy Wilkinson * @since 1.4.0 */ public class AnnotationsPropertySource extends EnumerablePropertySource<Class<?>> { private static final Pattern CAMEL_CASE_PATTERN = Pattern.compile("([^A-Z-])([A-Z])"); private final Map<String, Object> properties; public AnnotationsPropertySource(Class<?> source) { this("Annotations", source); } public AnnotationsPropertySource(String name, Class<?> source) { super(name, source); this.properties = getProperties(source); } private Map<String, Object> getProperties(Class<?> source) { Map<String, Object> properties = new LinkedHashMap<>(); collectProperties(source, source, properties, new HashSet<>()); return Collections.unmodifiableMap(properties); } private void collectProperties(Class<?> root, Class<?> source, Map<String, Object> properties, Set<Class<?>> seen) { if (source != null && seen.add(source)) { for (Annotation annotation : getMergedAnnotations(root, source)) { if (!AnnotationUtils.isInJavaLangAnnotationPackage(annotation)) { PropertyMapping typeMapping = annotation.annotationType() .getAnnotation(PropertyMapping.class); for (Method attribute : annotation.annotationType() .getDeclaredMethods()) { collectProperties(annotation, attribute, typeMapping, properties); } collectProperties(root, annotation.annotationType(), properties, seen); } } collectProperties(root, source.getSuperclass(), properties, seen); } } private List<Annotation> getMergedAnnotations(Class<?> root, Class<?> source) { List<Annotation> mergedAnnotations = new ArrayList<>(); Annotation[] annotations = AnnotationUtils.getAnnotations(source); if (annotations != null) { for (Annotation annotation : annotations) { if (!AnnotationUtils.isInJavaLangAnnotationPackage(annotation)) { mergedAnnotations .add(findMergedAnnotation(root, annotation.annotationType())); } } } return mergedAnnotations; } private Annotation findMergedAnnotation(Class<?> source, Class<? extends Annotation> annotationType) { if (source == null) { return null; } Annotation mergedAnnotation = AnnotatedElementUtils.getMergedAnnotation(source, annotationType); return (mergedAnnotation != null) ? mergedAnnotation : findMergedAnnotation(source.getSuperclass(), annotationType); } private void collectProperties(Annotation annotation, Method attribute, PropertyMapping typeMapping, Map<String, Object> properties) { PropertyMapping attributeMapping = AnnotationUtils.getAnnotation(attribute, PropertyMapping.class); SkipPropertyMapping skip = getMappingType(typeMapping, attributeMapping); if (skip == SkipPropertyMapping.YES) { return; } String name = getName(typeMapping, attributeMapping, attribute); ReflectionUtils.makeAccessible(attribute); Object value = ReflectionUtils.invokeMethod(attribute, annotation); if (skip == SkipPropertyMapping.ON_DEFAULT_VALUE) { Object defaultValue = AnnotationUtils.getDefaultValue(annotation, attribute.getName()); if (ObjectUtils.nullSafeEquals(value, defaultValue)) { return; } } putProperties(name, value, properties); } private SkipPropertyMapping getMappingType(PropertyMapping typeMapping, PropertyMapping attributeMapping) { if (attributeMapping != null) { return attributeMapping.skip(); } if (typeMapping != null) { return typeMapping.skip(); } return SkipPropertyMapping.YES; } private String getName(PropertyMapping typeMapping, PropertyMapping attributeMapping, Method attribute) { String prefix = (typeMapping != null) ? typeMapping.value() : ""; String name = (attributeMapping != null) ? attributeMapping.value() : ""; if (!StringUtils.hasText(name)) { name = toKebabCase(attribute.getName()); } return dotAppend(prefix, name); } private String toKebabCase(String name) { Matcher matcher = CAMEL_CASE_PATTERN.matcher(name); StringBuffer result = new StringBuffer(); while (matcher.find()) { matcher.appendReplacement(result, matcher.group(1) + '-' + StringUtils.uncapitalize(matcher.group(2))); } matcher.appendTail(result); return result.toString().toLowerCase(Locale.ENGLISH); } private String dotAppend(String prefix, String postfix) { if (StringUtils.hasText(prefix)) { return (prefix.endsWith(".") ? prefix + postfix : prefix + "." + postfix); } return postfix; } private void putProperties(String name, Object value, Map<String, Object> properties) { if (ObjectUtils.isArray(value)) { Object[] array = ObjectUtils.toObjectArray(value); for (int i = 0; i < array.length; i++) { properties.put(name + "[" + i + "]", array[i]); } } else { properties.put(name, value); } } @Override public boolean containsProperty(String name) { return this.properties.containsKey(name); } @Override public Object getProperty(String name) { return this.properties.get(name); } @Override public String[] getPropertyNames() { return StringUtils.toStringArray(this.properties.keySet()); } public boolean isEmpty() { return this.properties.isEmpty(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } return this.properties.equals(((AnnotationsPropertySource) obj).properties); } @Override public int hashCode() { return this.properties.hashCode(); } }
package com.rezgame.backend.board; import com.rezgame.backend.Color; import com.rezgame.backend.Placement; import com.rezgame.backend.Move; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Logger; /* * Copyright (c) <2013>, Amanj Sherwany and Nosheen Zaza * All rights reserved. * */ /** * 1- Represent the board as a 3 * 8 matrix. * 2- Each row of the matrix represents an orbit. * 3- Movies are legal if: * 1- Only x, or why changes by exactly one. AND * 2- If x was odd, then y cannot change. AND * 3- The target cell is not occupied * * * * A representation of Board, the board can do/knows the following: * 1- How many empty, black, white cells is there. * 2- What is the valid moves of a cell * 3- Can tell if a move is valid * 4- place/remove/move an item * * Created by Amanj and Nosheen on 26/9/13. */ public class Board implements BoardInterface { private enum CellState {BLACK, WHITE, EMPTY}; private static final int ORBITS = 3; private static final int ORBIT_ITEMS = 8; private static final Logger LOGGER = Logger.getLogger("BoardLog"); private CellState[][] board = new CellState[ORBITS][ORBIT_ITEMS]; public Board() { for(int i = 0; i < ORBITS; i++) { for(int j = 0; j < ORBIT_ITEMS; j++) { board[i][j] = CellState.EMPTY; } } } private CellState colorToState(Color color) { if(color.equals(Color.Black)) return CellState.BLACK; else return CellState.WHITE; } private Color stateToColor(CellState s) { if(s.equals(CellState.BLACK)) return Color.Black; else if(s.equals(CellState.WHITE)) return Color.White; else throw new RuntimeException("Cannot convert empty state to color"); } public void setCell(Placement plc) { CellState cell = colorToState(plc.getColor()); checkBounds(plc); // FIXME checkEmpty(plc.getOrbit(), plc.getLoc()); setCellState(plc.getOrbit(), plc.getLoc(), colorToState(plc.getColor())); } public void move(Move mv) { if(! isPossibleMove(mv)) { String msg = "Not a valid move, " + mv; LOGGER.severe(msg); throw new RuntimeException(msg); } setCellState(mv.getNewOrbit(), mv.getNewLocation(), colorToState(mv.getColor())); setCellState(mv.getOldOrbit(), mv.getOldLocation(), CellState.EMPTY); } public List<Placement> getAllEmptyCells() { return getAllSpecifiedCells(CellState.EMPTY); } public List<Placement> getAllBlackCells() { return getAllSpecifiedCells(CellState.BLACK); } public List<Placement> getAllWhiteCells() { return getAllSpecifiedCells(CellState.WHITE); } public List<Move> getPossibleMoves(int orbit, int location) { List<Move> neighbours = new LinkedList<Move>(); // Any item can move by one step at a time neighbours.add(new Move(orbit, location, orbit-1, location, stateToColor(board[orbit][location]))); neighbours.add(new Move(orbit, location, orbit+1, location, stateToColor(board[orbit][location]))); neighbours.add(new Move(orbit, location, orbit, location-1, stateToColor(board[orbit][location]))); neighbours.add(new Move(orbit, location, orbit, location+1, stateToColor(board[orbit][location]))); List<Move> buffer = new LinkedList<Move>(); for(Move mv : neighbours) { if(isPossibleMove(mv)) { buffer.add(mv); } } return buffer; } public Map<Placement, List<Move>> getAllPossibleMoves(CellState state) { return getAllPossibleMovesFor(state); } public void removeItem(int orbit, int loc) { //TODO this is ewww, but just for the moment. checkBounds(new Placement (orbit, loc, Color.Black)); if(isEmpty(orbit, loc)) { String msg = "Location is empty: " + orbit + ", " + loc; LOGGER.severe(msg); throw new RuntimeException(msg); } setCellState(orbit, loc, CellState.EMPTY); } @Override public int getNumberOfBlackCells() { return getAllBlackCells().size(); } @Override public int getNumberOfWhiteCells() { return getAllWhiteCells().size(); } public boolean isPossibleMove(Move mv) { boolean result = isWithinBounds(mv.getNewOrbit(), mv.getNewLocation()) && isWithinBounds(mv.getOldOrbit(), mv.getOldLocation()) && isEmpty(mv.getNewOrbit(), mv.getNewLocation()) && !isEmpty(mv.getOldOrbit(), mv.getOldLocation()); int dOrbit = Math.abs(mv.getOldOrbit() - mv.getNewOrbit()); int dLoc = Math.abs(mv.getOldLocation() - mv.getNewLocation()) % 6; result = result && (dOrbit == 1 ^ dLoc == 1); if(mv.getOldLocation() % 2 == 0) result = result && dOrbit == 0; return result; } public boolean isEmpty(int orbit, int loc) { return board[orbit][loc].equals(CellState.EMPTY); } public boolean isAllSet() { boolean flag = true; for(int i = 0; i < ORBITS; i++) { for(int j = 0; j < ORBIT_ITEMS; j++) { flag = flag && !board[i][j].equals(CellState.EMPTY); } } return flag; } //TODO these checks are probably not needed if I use a placement. public boolean isBlack(int orbit, int loc) { return board[orbit][loc].equals(CellState.BLACK); } public boolean isWhite(int orbit, int loc) { return board[orbit][loc].equals(CellState.WHITE); } public boolean isWithinBounds(int orbit, int loc) { return (orbit < ORBITS && orbit >= 0 && loc < ORBIT_ITEMS && loc >= 0); } private CellState getCellState(int orbit, int location) { return board[orbit][location]; } private void setCellState(int orbit, int location, CellState c) { board[orbit][location] = c; } public boolean isCorner(Placement loc) { return (loc.getLoc() % 2 == 0); } public Placement getClockWiseAdjacent(Placement loc) { int x = loc.getOrbit(); int y = loc.getLoc(); Placement l = new Placement(x, (y + 1) % 8, loc.getColor()); return l; } public Placement getCounterClockWiseAdjacent(Placement loc) { int x = loc.getOrbit(); int y = loc.getLoc(); Placement l = new Placement(x, (y == 0? 8 : y) - 1, loc.getColor()); return l; } private void checkBounds(Placement loc) { if(loc.getOrbit() >= ORBITS || loc.getOrbit() < 0) { String msg = "Illegal orbit value: " + loc.getOrbit(); LOGGER.severe(msg); throw new RuntimeException(msg); } if(loc.getLoc() >= ORBIT_ITEMS || loc.getLoc() < 0) { String msg = "Illegal location value: " + loc; LOGGER.severe(msg); throw new RuntimeException(msg); } } private List<Placement> getAllSpecifiedCells(CellState c) { List<Placement> buffer = new LinkedList<Placement>(); for(int i = 0; i < ORBITS; i++) { for(int j = 0; j < ORBIT_ITEMS; j++) { if(c.equals(board[i][j])) { buffer.add(new Placement(i, j, stateToColor(c))); } } } return buffer; } private Map<Placement, List<Move>> getAllPossibleMovesFor(CellState c) { if(c.equals(CellState.EMPTY)) { String msg = "There is no move for Empty cells"; LOGGER.severe(msg); throw new RuntimeException(msg); } List<Placement> items = getAllSpecifiedCells(c); Map<Placement, List<Move>> buffer = new HashMap<Placement, List<Move>>(); for(Placement item : items) { List<Move> mvs = getPossibleMoves(item.getOrbit(), item.getLoc()); if(!mvs.isEmpty()) { buffer.put(item, mvs); } } return buffer; } public List<Placement> filterValid(List<Placement> locs) { List<Placement> valid = new LinkedList<Placement>(); for(Placement loc : locs) { if(isWithinBounds(loc.getOrbit(), loc.getLoc())) { valid.add(loc); } } return valid; } /* 00 row1 - 01 row1 - 02 ======== | 10 row2 - 11 row2 - 12 | ======== | 20 row3 - 21 row3 - 22 | ======== 0----------0----------0 | 0--------0--------0 | | | 0------0------0 | | | | | | | | 0 0 0 o 0 0 0 | | | | | | | | 0------0------0 | | | 0--------0--------0 | x----------x----------x */ public String prettyPrint() { // Each row has a different length of separators between each two locations. // A separator in the outer orbit is longer than the inner one by two `-` String[] rows = {repeatChar('-', 10), repeatChar('-', 8), repeatChar('-', 6)}; // There are two kinds of spaces String longSpaces = repeatChar(' ', (rows[1].length() * 2) - 3); String shortSpaces = repeatChar(' ', rows[2].length() - 1); String empty = "| | |" + longSpaces + "| | |\n"; StringBuilder str = new StringBuilder(); // Generate the string for the first three items of each orbit for(int i = 0; i < ORBITS; i++) { StringBuilder sb = new StringBuilder(); for(int j = 0; j < 3; j++) { sb.append(cellToChar(board[i][j])); if(j != 2) sb.append(rows[i]); } if(i == 1) { sb.insert(0, "| "); sb.append(" |\n"); } else if (i == 2) { sb.insert(0, "| | "); sb.append(" | |\n"); } else { sb.append("\n"); } str.append(sb); } str.append(empty); // Generate items number 8 of each orbit for(int i = 0; i < ORBITS; i++) { str.append(cellToChar(board[i][7])); str.append(' '); } str.append(shortSpaces + 'o' + shortSpaces); // Generate items number 4 of each orbit for(int i = ORBITS - 1; i >= 0; i--) { str.append(' '); str.append(cellToChar(board[i][3])); } str.append('\n'); str.append(empty); // Generate the string for the items number 7, 6, 5 of each orbit for(int i = ORBITS - 1; i >= 0 ; i--) { StringBuilder sb = new StringBuilder(); for(int j = 6; j >= 4; j--) { sb.append(cellToChar(board[i][j])); if(j != 4) sb.append(rows[i]); } if(i == 1) { sb.insert(0, "| "); sb.append(" |\n"); } else if (i == 2) { sb.insert(0, "| | "); sb.append(" | |\n"); } else { sb.append("\n"); } str.append(sb); } return str.toString(); } private char cellToChar(CellState c) { if(c.equals(CellState.EMPTY)) return '0'; else if(c.equals(CellState.EMPTY)) return 'Y'; else return 'X'; } private String repeatChar(char ch, int times) { StringBuilder sb = new StringBuilder(); for(int i = 0; i < times; i++){ sb.append(ch); } return sb.toString(); } }
package de.fau.cs.mad.rpgpack.matrix; import java.util.ArrayList; import java.util.List; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.FrameLayout; import android.widget.GridView; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import de.fau.cs.mad.rpgpack.R; import de.fau.cs.mad.rpgpack.SlideoutNavigationActivity; import de.fau.cs.mad.rpgpack.character.CharacterEditActivity; import de.fau.cs.mad.rpgpack.game.CharacterPlayActivity; import de.fau.cs.mad.rpgpack.jackson.MatrixTable; import de.fau.cs.mad.rpgpack.template_generator.GeneralFragment; import de.fau.cs.mad.rpgpack.template_generator.TemplateGeneratorActivity; public class MatrixFragment extends GeneralFragment { // this flags is used to store visibility of UI elements public static final int FLAG_FROM = 1; // Binary 00001 public static final int FLAG_TO = 2; // Binary 00010 public static final int FLAG_VALUE = 4; // Binary 00100 public static final int FLAG_MOD = 8; // Binary 01000 private GridView gridView; public List<MatrixItem> itemsList = null; private List<MatrixItem> playMatrixItems = null; private MatrixItem addNewMatrixItem; private MatrixViewArrayAdapter adapterCreateTemplate; private NewCharacterMatrixViewArrayAdapter adapterCreateCharacter; private PlayCharacterMatrixAdapter adapterPlay; private PlayCharacterEditModeMatrixAdapter adapterPlayEditMode; private View rootView; /* * JACKSON START */ public MatrixTable jacksonTable; public boolean jacksonInflateWithData; /* * JACKSON END */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); } protected void createViewForTemplateCreationMode(LayoutInflater inflater) { Log.d("MartixFragment", "inflated for TemplateGenerator"); rootView = inflater.inflate(R.layout.fragment_matrix_view, new LinearLayout(getActivity()), false); gridView = (GridView) rootView.findViewById(R.id.gridViewMatrixItem); // check needed for jackson data loading if (itemsList == null) { itemsList = new ArrayList<MatrixItem>(); jacksonTable.entries = itemsList; // set create new item to the end, it will not appear in // jacksonTable.entries // FIXME comment is not correct, last fake item appears also in // jacksonTable.entries and make problems!!!! // now it is fixed but dirty! MatrixItem addNewMatrixItem = new MatrixItem(getResources() .getString(R.string.new_matrix_item), "+", ""); itemsList.add(addNewMatrixItem); } if (adapterCreateTemplate == null) { adapterCreateTemplate = new MatrixViewArrayAdapter(getActivity(), itemsList); // adapter.jacksonTable = jacksonTable; } gridView.setAdapter(adapterCreateTemplate); addListenersForTemplateCreateMode(); } protected void createViewForCharacterGeneratorMode(LayoutInflater inflater) { // edit mode, popup allows to correct or change matrix items fields // just like in create template mode if (SlideoutNavigationActivity.theActiveActivity.inEditMode()) { Log.d("Martix Fragment", "in edit mode"); rootView = inflater.inflate(R.layout.fragment_matrix_view, new LinearLayout(getActivity()), false); TextView textView = (TextView) rootView .findViewById(R.id.textView1); textView.setText(getResources().getString(R.string.hint_edit_items)); FrameLayout frameLayout = (FrameLayout) rootView .findViewById(R.id.container); frameLayout.setBackgroundColor(getResources().getColor( R.color.background)); gridView = (GridView) rootView .findViewById(R.id.gridViewMatrixItem); // check it the last item is not a fake one, add it if (!(itemsList.get(itemsList.size() - 1).getValue().equals("+"))) { addNewMatrixItem = new MatrixItem(getResources().getString( R.string.new_matrix_item), "+", ""); itemsList.add(addNewMatrixItem); } // check needed for jackson data loading if (itemsList == null) { itemsList = new ArrayList<MatrixItem>(); jacksonTable.entries = itemsList; // set create new item to the end, it will not appear in // jacksonTable.entries // FIXME comment is not correct, last fake item appears also // in // jacksonTable.entries and make problems!!!! addNewMatrixItem = new MatrixItem(getResources().getString( R.string.new_matrix_item), "+", ""); itemsList.add(addNewMatrixItem); } if (adapterCreateTemplate == null) { adapterCreateTemplate = new MatrixViewArrayAdapter( getActivity(), itemsList); // adapter.jacksonTable = jacksonTable; } gridView.setAdapter(adapterCreateTemplate); addListenersForCharacterCreateEditMode(); } // not in edit mode, popup allows to set just values else { Log.d("Martix Fragment", "not editable"); rootView = (FrameLayout) inflater.inflate( R.layout.character_edit_matrix_view, new LinearLayout( getActivity()), false); gridView = (GridView) rootView.findViewById(R.id.gridView); // check needed for jackson data loading if (itemsList == null) { itemsList = new ArrayList<MatrixItem>(); jacksonTable.entries = itemsList; } // check it the last item is a fake one, remove it if (itemsList.get(itemsList.size() - 1).getValue().equals("+")) { itemsList.remove(itemsList.get(itemsList.size() - 1)); } if (adapterCreateCharacter == null) { adapterCreateCharacter = new NewCharacterMatrixViewArrayAdapter( getActivity(), itemsList); // adapter.jacksonTable = jacksonTable; } final ArrayList<MatrixItem> selectedItems = ((NewCharacterMatrixViewArrayAdapter) adapterCreateCharacter).selectedMatrixItems; for (final MatrixItem item : itemsList) { if (item.isSelected()) { selectedItems.add(item); } } gridView.setAdapter(adapterCreateCharacter); addListenersForCharacterCreateNormalMode(selectedItems); } } protected void createViewForPlayMode(LayoutInflater inflater) { // edit mode if (SlideoutNavigationActivity.theActiveActivity.inEditMode()) { Log.d("Martix Fragment", "in edit mode"); // inflate to edit! rootView = (FrameLayout) inflater.inflate( R.layout.character_edit_matrix_view, new LinearLayout( getActivity()), false); gridView = (GridView) rootView.findViewById(R.id.gridView); // check needed for jackson data loading if (itemsList == null) { itemsList = new ArrayList<MatrixItem>(); jacksonTable.entries = itemsList; } if (adapterPlayEditMode == null) { adapterPlayEditMode = new PlayCharacterEditModeMatrixAdapter( getActivity(), itemsList); // adapter.jacksonTable = jacksonTable; } final ArrayList<MatrixItem> selectedItems = ((PlayCharacterEditModeMatrixAdapter) adapterPlayEditMode).selectedMatrixItems; for (final MatrixItem item : itemsList) { if (item.isSelected()) { selectedItems.add(item); } } gridView.setAdapter(adapterPlayEditMode); addListenersForCharacterPlayEditMode(selectedItems); } else {// ==!editable Log.d("Martix Fragment", "not editable"); // inflate to play rootView = (FrameLayout) inflater.inflate( R.layout.character_play_matrix_view, new LinearLayout( getActivity()), false); gridView = (GridView) rootView.findViewById(R.id.gridViewM); // check needed for jackson data loading if (itemsList == null) { itemsList = new ArrayList<MatrixItem>(); jacksonTable.entries = itemsList; } playMatrixItems = new ArrayList<MatrixItem>(); for (MatrixItem ma : itemsList) { if (ma.isSelected()) playMatrixItems.add(ma); } if (adapterPlay == null) { adapterPlay = new PlayCharacterMatrixAdapter(getActivity(), playMatrixItems); } gridView.setAdapter(adapterPlay); addListenersForCharacterPlayNormalMode(playMatrixItems); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { super.onCreateView(inflater, container, savedInstanceState); // template creation mode if (SlideoutNavigationActivity.theActiveActivity instanceof TemplateGeneratorActivity) { this.createViewForTemplateCreationMode(inflater); } // character generator mode else if (SlideoutNavigationActivity.theActiveActivity instanceof CharacterEditActivity) { this.createViewForCharacterGeneratorMode(inflater); } // Character Playing mode else if (SlideoutNavigationActivity.theActiveActivity instanceof CharacterPlayActivity) { this.createViewForPlayMode(inflater); } return rootView; } protected void addListenersForTemplateCreateMode() { gridView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) { // is it last item? if (position == adapterCreateTemplate.getCount() - 1) { // create new item showPopup(adapterCreateTemplate); } else { // edit item showPopupForEditing( adapterCreateTemplate.getItem(position), adapterCreateTemplate); } } }); gridView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() { @Override public boolean onItemLongClick(AdapterView<?> adapterView, View view, final int position, long id) { if (position == adapterCreateTemplate.getCount() - 1) { return true; } AlertDialog.Builder builder = new AlertDialog.Builder( getActivity()); builder.setTitle(getResources().getString( R.string.msg_delete_item)); builder.setMessage(getResources().getString( R.string.msg_yes_to_item_delete)); builder.setNegativeButton( getResources().getString(R.string.no), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); builder.setPositiveButton(getResources() .getString(R.string.yes), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { removeMatrixItem(position, adapterCreateTemplate); } }); builder.create().show(); return true; } }); } protected void addListenersForCharacterCreateEditMode() { gridView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) { // is it last item? if (position == adapterCreateTemplate.getCount() - 1) { // create new item showPopup(adapterCreateTemplate); } else { // edit item showPopupForEditing( adapterCreateTemplate.getItem(position), adapterCreateTemplate); } } }); gridView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() { @Override public boolean onItemLongClick(AdapterView<?> adapterView, View view, final int position, long id) { if (position == adapterCreateTemplate.getCount() - 1) { return true; } AlertDialog.Builder builder = new AlertDialog.Builder( getActivity()); builder.setTitle(getResources().getString( R.string.msg_delete_item)); builder.setMessage(getResources().getString( R.string.msg_yes_to_item_delete)); builder.setNegativeButton( getResources().getString(R.string.no), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); builder.setPositiveButton(getResources() .getString(R.string.yes), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { removeMatrixItem(position, adapterCreateTemplate); } }); builder.create().show(); return true; } }); } protected void addListenersForCharacterCreateNormalMode( final List<MatrixItem> selectedItems) { gridView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) { MatrixItem curMatrixItem = itemsList.get(position); // if it is yet selected then remove if (selectedItems.contains(curMatrixItem)) { curMatrixItem.setSelected(false); selectedItems.clear(); for (MatrixItem ma : itemsList) { if (ma.isSelected()) selectedItems.add(ma); } adapterCreateCharacter.notifyDataSetChanged(); } // if is not yet selected then set as selected to show later else { curMatrixItem.setSelected(true); selectedItems.clear(); for (MatrixItem ma : itemsList) { if (ma.isSelected()) selectedItems.add(ma); } adapterCreateCharacter.notifyDataSetChanged(); // show popup to set current value showSetValuePopup(curMatrixItem, adapterCreateCharacter, null, selectedItems); } } }); gridView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() { @Override public boolean onItemLongClick(AdapterView<?> adapterView, View view, final int position, long id) { if (position == adapterCreateTemplate.getCount() - 1) { return true; } AlertDialog.Builder builder = new AlertDialog.Builder( getActivity()); builder.setTitle(getResources().getString( R.string.msg_delete_item)); builder.setMessage(getResources().getString( R.string.msg_yes_to_item_delete)); builder.setNegativeButton( getResources().getString(R.string.no), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); builder.setPositiveButton(getResources() .getString(R.string.yes), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { removeMatrixItem(position, adapterCreateCharacter); } }); builder.create().show(); return true; } }); } protected void addListenersForCharacterPlayEditMode( final List<MatrixItem> selectedItems) { gridView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) { if (position == itemsList.size() - 1) { Toast.makeText(getActivity(), "Neues Element wird in Deinem Character erstellt!", Toast.LENGTH_SHORT).show(); // add new matrix item showPopup(adapterPlayEditMode); } else { MatrixItem curMatrixItem = itemsList.get(position); if (selectedItems.contains(curMatrixItem)) { // because we do not want to show this item curMatrixItem.setSelected(false); selectedItems.clear(); for (MatrixItem ma : itemsList) { if (ma.isSelected()) selectedItems.add(ma); } adapterPlayEditMode.notifyDataSetChanged(); adapterPlay.clear(); adapterPlay.addAll(selectedItems); adapterPlay.notifyDataSetChanged(); } else { // show popup to set current value // showSetValuePopup(curMatrixItem, adapterPlayEditMode, // adapterPlay, selectedItems); // show popup to edit item showPopupForEditing(curMatrixItem, adapterPlayEditMode); // because it should show up in play mode curMatrixItem.setSelected(true); selectedItems.clear(); for (MatrixItem ma : itemsList) { if (ma.isSelected()) selectedItems.add(ma); } adapterPlayEditMode.notifyDataSetChanged(); adapterPlay.clear(); adapterPlay.addAll(selectedItems); adapterPlay.notifyDataSetChanged(); } } } }); } protected void addListenersForCharacterPlayNormalMode( final List<MatrixItem> selItems) { // to set new value for a matrix item in play mode directly gridView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) { showSetValuePopup(adapterPlay.getItem(position), adapterPlay, adapterPlayEditMode, selItems); } }); } @Override protected void addItemList() { // TODO Auto-generated method stub } @Override public void showDialog() { // TODO Auto-generated method stub } /** * This Method is used every time we need to add new element into adapters * element list. * */ public void addMatrixItem(MatrixItem newItem, ArrayAdapter adapter) { adapter.insert(newItem, adapter.getCount() - 1); adapter.notifyDataSetChanged(); } /** * This Method is used every time we want remove some element from adapters * element list. * */ public void removeMatrixItem(int position, ArrayAdapter adapter) { if (position < 0 || position == adapter.getCount() - 1) { return; } adapter.remove(adapter.getItem(position)); adapter.notifyDataSetChanged(); } private void showPopup(ArrayAdapter adapter) { AddNewItemDialogFragment popupAddNewItemFragment = AddNewItemDialogFragment .newInstance(this, adapter); popupAddNewItemFragment.show(getFragmentManager(), "popupAddNewItemFragment"); } private void showPopupForEditing(MatrixItem item, ArrayAdapter adapter) { AddNewItemDialogFragment popupAddNewItemFragment = AddNewItemDialogFragment .newInstance(this, adapter); popupAddNewItemFragment.editItem = item; popupAddNewItemFragment.show(getFragmentManager(), "popupAddNewItemFragment"); } private void showSetValuePopup(MatrixItem item, ArrayAdapter<MatrixItem> adapterEdit, ArrayAdapter<MatrixItem> adapterNormal, List<MatrixItem> selectedItems) { SettingValueDialogFragment settingValueDialogFragment = SettingValueDialogFragment .newInstance(this); settingValueDialogFragment.show(getFragmentManager(), "dialog"); settingValueDialogFragment.matrixItem = item; settingValueDialogFragment.passAdapterEdit(adapterEdit); settingValueDialogFragment.passAdapterNormal(adapterNormal); settingValueDialogFragment.passSelItems(selectedItems); } /* * JACKSON START */ public void jacksonInflate(MatrixTable myTable, Context appContext) { // set table setJacksonTable(myTable); // set flag, so that we are inflating the views with data from jackson // model // jacksonInflateWithData = true; itemsList = jacksonTable.entries; // check it the last item is a fake one, remove it // also before check if any item exists so we dont get ArrayOutOfBoundsException if(itemsList.size() > 0){ if (itemsList.get(itemsList.size() - 1).getValue().equals("+")) { itemsList.remove(itemsList.get(itemsList.size() - 1)); } } // add the "new item" entry final MatrixItem newElement = new MatrixItem(appContext.getResources() .getString(R.string.text_new_element), "+", ""); newElement.setSelected(false); itemsList.add(newElement); // // for (MatrixItem ma : jacksonTable.entries) { // Log.d("jacksonTable.entries - jacksonInflate", ma.getItemName()); // } } public void setJacksonTable(MatrixTable myTable) { jacksonTable = myTable; } /* * JACKSON END */ }
/* * JBoss, Home of Professional Open Source * Copyright 2009, JBoss Inc., and individual contributors as indicated * by the @authors tag. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.vfs.spi; import java.io.BufferedOutputStream; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.security.CodeSigner; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.jar.JarEntry; import java.util.jar.JarFile; import org.jboss.vfs.TempDir; import org.jboss.vfs.VFSLogger; import org.jboss.vfs.VFSMessages; import org.jboss.vfs.VFSUtils; import org.jboss.vfs.VirtualFile; import org.jboss.vfs.util.PathTokenizer; /** * {@inheritDoc} * <p/> * This implementation is backed by a zip file. The provided file must be owned by this instance; otherwise, if the * file disappears unexpectedly, the filesystem will malfunction. * * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> * @author <a href="mailto:ales.justin@jboss.org">Ales Justin</a> */ public final class JavaZipFileSystem implements FileSystem { private final JarFile zipFile; private final File archiveFile; private final long zipTime; private final ZipNode rootNode; private final TempDir tempDir; private final File contentsDir; /** * Create a new instance. * * @param name the name of the source archive * @param inputStream an input stream from the source archive * @param tempDir the temp dir into which zip information is stored * @throws java.io.IOException if an I/O error occurs */ public JavaZipFileSystem(String name, InputStream inputStream, TempDir tempDir) throws IOException { this(tempDir.createFile(name, inputStream), tempDir); } /** * Create a new instance. * * @param archiveFile the original archive file * @param tempDir the temp dir into which zip information is stored * @throws java.io.IOException if an I/O error occurs */ public JavaZipFileSystem(File archiveFile, TempDir tempDir) throws IOException { zipTime = archiveFile.lastModified(); final JarFile zipFile; this.zipFile = zipFile = new JarFile(archiveFile); this.archiveFile = archiveFile; this.tempDir = tempDir; final Enumeration<? extends JarEntry> entries = zipFile.entries(); final ZipNode rootNode = new ZipNode(new HashMap<String, ZipNode>(), "", null); FILES: for (JarEntry entry : iter(entries)) { final String name = entry.getName(); final boolean isDirectory = entry.isDirectory(); final List<String> tokens = PathTokenizer.getTokens(name); ZipNode node = rootNode; final Iterator<String> it = tokens.iterator(); while (it.hasNext()) { String token = it.next(); if (PathTokenizer.isCurrentToken(token) || PathTokenizer.isReverseToken(token)) { // invalid file name continue FILES; } final Map<String, ZipNode> children = node.children; if (children == null) { // todo - log bad zip entry continue FILES; } ZipNode child = children.get(token); if (child == null) { child = it.hasNext() || isDirectory ? new ZipNode(new HashMap<String, ZipNode>(), token, null) : new ZipNode(null, token, entry); children.put(token, child); } node = child; } } this.rootNode = rootNode; contentsDir = tempDir.getFile("contents"); contentsDir.mkdir(); VFSLogger.ROOT_LOGGER.tracef("Created zip filesystem for file %s in temp dir %s", archiveFile, tempDir); } /** * {@inheritDoc} */ private static <T> Iterable<T> iter(final Enumeration<T> entries) { return new EnumerationIterable<T>(entries); } /** * {@inheritDoc} */ public File getFile(VirtualFile mountPoint, VirtualFile target) throws IOException { final ZipNode zipNode = getExistingZipNode(mountPoint, target); // check if we have cached one already File cachedFile = zipNode.cachedFile; if (cachedFile != null) { return cachedFile; } synchronized (zipNode) { // double-check cachedFile = zipNode.cachedFile; if (cachedFile != null) { return cachedFile; } // nope, create a cached temp final JarEntry zipEntry = zipNode.entry; String name = target.getPathNameRelativeTo(mountPoint); cachedFile = buildFile(contentsDir, name); if (zipEntry == null) { cachedFile.mkdir(); } else { VFSUtils.copyStreamAndClose(zipFile.getInputStream(zipEntry), new BufferedOutputStream(new FileOutputStream(cachedFile))); } zipNode.cachedFile = cachedFile; return cachedFile; } } /** * {@inheritDoc} */ public InputStream openInputStream(VirtualFile mountPoint, VirtualFile target) throws IOException { final ZipNode zipNode = getExistingZipNode(mountPoint, target); final File cachedFile = zipNode.cachedFile; if (cachedFile != null) { return new FileInputStream(cachedFile); } if (rootNode == zipNode) { return new FileInputStream(archiveFile); } final JarEntry entry = zipNode.entry; if (entry == null) { throw VFSMessages.MESSAGES.notAFile(target.getPathName()); } return zipFile.getInputStream(entry); } /** * {@inheritDoc} */ public boolean delete(VirtualFile mountPoint, VirtualFile target) { final ZipNode zipNode = getZipNode(mountPoint, target); if (zipNode == null) { return false; } final File cachedFile = zipNode.cachedFile; return cachedFile != null && cachedFile.delete(); } /** * {@inheritDoc} */ public long getSize(VirtualFile mountPoint, VirtualFile target) { final ZipNode zipNode = getZipNode(mountPoint, target); if (zipNode == null) { return 0L; } final File cachedFile = zipNode.cachedFile; final JarEntry entry = zipNode.entry; if (zipNode == rootNode) { return archiveFile.length(); } return cachedFile != null ? cachedFile.length() : entry == null ? 0L : entry.getSize(); } /** * {@inheritDoc} */ public long getLastModified(VirtualFile mountPoint, VirtualFile target) { final ZipNode zipNode = getZipNode(mountPoint, target); if (zipNode == null) { return 0L; } final File cachedFile = zipNode.cachedFile; final JarEntry entry = zipNode.entry; return cachedFile != null ? cachedFile.lastModified() : entry == null ? zipTime : entry.getTime(); } /** * {@inheritDoc} */ public boolean exists(VirtualFile mountPoint, VirtualFile target) { final ZipNode zipNode = rootNode.find(mountPoint, target); if (zipNode == null) { return false; } else { final File cachedFile = zipNode.cachedFile; return cachedFile == null || cachedFile.exists(); } } /** * {@inheritDoc} */ public boolean isFile(final VirtualFile mountPoint, final VirtualFile target) { final ZipNode zipNode = rootNode.find(mountPoint, target); return zipNode != null && zipNode.entry != null; } /** * {@inheritDoc} */ public boolean isDirectory(VirtualFile mountPoint, VirtualFile target) { final ZipNode zipNode = rootNode.find(mountPoint, target); return zipNode != null && zipNode.entry == null; } /** * {@inheritDoc} */ public List<String> getDirectoryEntries(VirtualFile mountPoint, VirtualFile target) { final ZipNode zipNode = getZipNode(mountPoint, target); if (zipNode == null) { return Collections.emptyList(); } final Map<String, ZipNode> children = zipNode.children; if (children == null) { return Collections.emptyList(); } final Collection<ZipNode> values = children.values(); final List<String> names = new ArrayList<String>(values.size()); for (ZipNode node : values) { names.add(node.name); } return names; } /** * {@inheritDoc} */ public CodeSigner[] getCodeSigners(VirtualFile mountPoint, VirtualFile target) { final ZipNode zipNode = getZipNode(mountPoint, target); if (zipNode == null) { return null; } JarEntry jarEntry = zipNode.entry; return jarEntry.getCodeSigners(); } private ZipNode getZipNode(VirtualFile mountPoint, VirtualFile target) { return rootNode.find(mountPoint, target); } private ZipNode getExistingZipNode(VirtualFile mountPoint, VirtualFile target) throws FileNotFoundException { final ZipNode zipNode = rootNode.find(mountPoint, target); if (zipNode == null) { throw new FileNotFoundException(target.getPathName()); } return zipNode; } /** * {@inheritDoc} */ public boolean isReadOnly() { return true; } /** * {@inheritDoc} */ public File getMountSource() { return archiveFile; } public URI getRootURI() throws URISyntaxException { return new URI("jar", archiveFile.toURI().toString() + "!/", null); } /** * {@inheritDoc} */ public void close() throws IOException { VFSLogger.ROOT_LOGGER.tracef("Closing zip filesystem %s", this); VFSUtils.safeClose(new Closeable() { public void close() throws IOException { zipFile.close(); } }); tempDir.close(); } private File buildFile(File contentsDir, String name) { List<String> tokens = PathTokenizer.getTokens(name); File currentFile = contentsDir; for (String token : tokens) { currentFile = new File(currentFile, token); } currentFile.getParentFile().mkdirs(); return currentFile; } private static final class ZipNode { // immutable child map private final Map<String, ZipNode> children; private final String name; private final JarEntry entry; private volatile File cachedFile; private ZipNode(Map<String, ZipNode> children, String name, JarEntry entry) { this.children = children; this.name = name; this.entry = entry; } private ZipNode find(VirtualFile mountPoint, VirtualFile target) { if (mountPoint.equals(target)) { return this; } else { final ZipNode parent = find(mountPoint, target.getParent()); if (parent == null) { return null; } final Map<String, ZipNode> children = parent.children; if (children == null) { return null; } return children.get(target.getName()); } } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.forecast.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Provides information about the Explainability resource. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ExplainabilityInfo" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ExplainabilityInfo implements Serializable, Cloneable, StructuredPojo { /** * <p> * The Amazon Resource Name (ARN) of the Explainability. * </p> */ private String explainabilityArn; /** * <p> * The status of the Explainability. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * </ul> */ private String status; /** * <p> * The Amazon Resource Name (ARN) of the Explainability. * </p> * * @param explainabilityArn * The Amazon Resource Name (ARN) of the Explainability. */ public void setExplainabilityArn(String explainabilityArn) { this.explainabilityArn = explainabilityArn; } /** * <p> * The Amazon Resource Name (ARN) of the Explainability. * </p> * * @return The Amazon Resource Name (ARN) of the Explainability. */ public String getExplainabilityArn() { return this.explainabilityArn; } /** * <p> * The Amazon Resource Name (ARN) of the Explainability. * </p> * * @param explainabilityArn * The Amazon Resource Name (ARN) of the Explainability. * @return Returns a reference to this object so that method calls can be chained together. */ public ExplainabilityInfo withExplainabilityArn(String explainabilityArn) { setExplainabilityArn(explainabilityArn); return this; } /** * <p> * The status of the Explainability. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * </ul> * * @param status * The status of the Explainability. States include: </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the Explainability. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * </ul> * * @return The status of the Explainability. States include: </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> */ public String getStatus() { return this.status; } /** * <p> * The status of the Explainability. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * </ul> * * @param status * The status of the Explainability. States include: </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public ExplainabilityInfo withStatus(String status) { setStatus(status); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getExplainabilityArn() != null) sb.append("ExplainabilityArn: ").append(getExplainabilityArn()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ExplainabilityInfo == false) return false; ExplainabilityInfo other = (ExplainabilityInfo) obj; if (other.getExplainabilityArn() == null ^ this.getExplainabilityArn() == null) return false; if (other.getExplainabilityArn() != null && other.getExplainabilityArn().equals(this.getExplainabilityArn()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getExplainabilityArn() == null) ? 0 : getExplainabilityArn().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); return hashCode; } @Override public ExplainabilityInfo clone() { try { return (ExplainabilityInfo) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.forecast.model.transform.ExplainabilityInfoMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.stram; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.net.InetSocketAddress; import java.net.URI; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; import javax.xml.bind.annotation.XmlElement; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.client.api.AMRMClient; import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.async.NMClientAsync; import org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.SystemClock; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.datatorrent.api.Attribute; import com.datatorrent.api.AutoMetric; import com.datatorrent.api.Context.DAGContext; import com.datatorrent.api.DAG; import com.datatorrent.api.StringCodec; import com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest; import com.datatorrent.stram.StreamingContainerManager.ContainerResource; import com.datatorrent.stram.api.AppDataSource; import com.datatorrent.stram.api.BaseContext; import com.datatorrent.stram.api.StramEvent; import com.datatorrent.stram.appdata.AppDataPushAgent; import com.datatorrent.stram.client.StramClientUtils; import com.datatorrent.stram.engine.StreamingContainer; import com.datatorrent.stram.plan.logical.LogicalPlan; import com.datatorrent.stram.plan.physical.OperatorStatus.PortStatus; import com.datatorrent.stram.plan.physical.PTContainer; import com.datatorrent.stram.plan.physical.PTOperator; import com.datatorrent.stram.security.StramDelegationTokenIdentifier; import com.datatorrent.stram.security.StramDelegationTokenManager; import com.datatorrent.stram.security.StramUserLogin; import com.datatorrent.stram.security.StramWSFilterInitializer; import com.datatorrent.stram.util.ConfigUtils; import com.datatorrent.stram.util.SecurityUtils; import com.datatorrent.stram.webapp.AppInfo; import com.datatorrent.stram.webapp.StramWebApp; import static java.lang.Thread.sleep; /** * Streaming Application Master * * @since 0.3.2 */ public class StreamingAppMasterService extends CompositeService { private static final Logger LOG = LoggerFactory.getLogger(StreamingAppMasterService.class); private static final long DELEGATION_KEY_UPDATE_INTERVAL = 24 * 60 * 60 * 1000; private static final long DELEGATION_TOKEN_MAX_LIFETIME = Long.MAX_VALUE / 2; private static final long DELEGATION_TOKEN_RENEW_INTERVAL = Long.MAX_VALUE / 2; private static final long DELEGATION_TOKEN_REMOVER_SCAN_INTERVAL = 24 * 60 * 60 * 1000; private static final int UPDATE_NODE_REPORTS_INTERVAL = 10 * 60 * 1000; private AMRMClient<ContainerRequest> amRmClient; private NMClientAsync nmClient; private LogicalPlan dag; // Application Attempt Id ( combination of attemptId and fail count ) private final ApplicationAttemptId appAttemptID; // Hostname of the container private final String appMasterHostname = ""; // Tracking url to which app master publishes info for clients to monitor private String appMasterTrackingUrl = ""; // Simple flag to denote whether all works is done private boolean appDone = false; // Counter for completed containers ( complete denotes successful or failed ) private final AtomicInteger numCompletedContainers = new AtomicInteger(); // Containers that the RM has allocated to us private final ConcurrentMap<String, AllocatedContainer> allocatedContainers = Maps.newConcurrentMap(); // Set of nodes marked blacklisted due to consecutive container failures on the nodes private final Set<String> failedBlackListedNodes = Sets.newHashSet(); // Maintains max consecutive failures stats for nodes for blacklisting failing nodes private final Map<String, NodeFailureStats> failedContainerNodesMap = Maps.newHashMap(); // Count of failed containers private final AtomicInteger numFailedContainers = new AtomicInteger(); private final ConcurrentLinkedQueue<Runnable> pendingTasks = new ConcurrentLinkedQueue<>(); // child container callback private StreamingContainerParent heartbeatListener; private StreamingContainerManager dnmgr; private StramAppContext appContext; private final Clock clock = new SystemClock(); private final long startTime = clock.getTime(); private final ClusterAppStats stats = new ClusterAppStats(); private StramDelegationTokenManager delegationTokenManager = null; private AppDataPushAgent appDataPushAgent; public StreamingAppMasterService(ApplicationAttemptId appAttemptID) { super(StreamingAppMasterService.class.getName()); this.appAttemptID = appAttemptID; } private class NodeFailureStats { long lastFailureTimeStamp; int failureCount; long blackListAdditionTime; public NodeFailureStats(long lastFailureTimeStamp, int failureCount) { this.lastFailureTimeStamp = lastFailureTimeStamp; this.failureCount = failureCount; } } /** * Overrides getters to pull live info. */ protected class ClusterAppStats extends AppInfo.AppStats { @AutoMetric @Override public int getAllocatedContainers() { return allocatedContainers.size(); } @AutoMetric @Override public int getPlannedContainers() { return dnmgr.getPhysicalPlan().getContainers().size(); } @AutoMetric @Override @XmlElement public int getFailedContainers() { return numFailedContainers.get(); } @AutoMetric @Override public int getNumOperators() { return dnmgr.getPhysicalPlan().getAllOperators().size(); } @Override public long getCurrentWindowId() { long min = Long.MAX_VALUE; for (Map.Entry<Integer, PTOperator> entry : dnmgr.getPhysicalPlan().getAllOperators().entrySet()) { long windowId = entry.getValue().stats.currentWindowId.get(); if (min > windowId) { min = windowId; } } return StreamingContainerManager.toWsWindowId(min == Long.MAX_VALUE ? 0 : min); } @Override public long getRecoveryWindowId() { return StreamingContainerManager.toWsWindowId(dnmgr.getCommittedWindowId()); } @AutoMetric @Override public long getTuplesProcessedPSMA() { long result = 0; for (Map.Entry<Integer, PTOperator> entry : dnmgr.getPhysicalPlan().getAllOperators().entrySet()) { result += entry.getValue().stats.tuplesProcessedPSMA.get(); } return result; } @AutoMetric @Override public long getTotalTuplesProcessed() { long result = 0; for (Map.Entry<Integer, PTOperator> entry : dnmgr.getPhysicalPlan().getAllOperators().entrySet()) { result += entry.getValue().stats.totalTuplesProcessed.get(); } return result; } @AutoMetric @Override public long getTuplesEmittedPSMA() { long result = 0; for (Map.Entry<Integer, PTOperator> entry : dnmgr.getPhysicalPlan().getAllOperators().entrySet()) { result += entry.getValue().stats.tuplesEmittedPSMA.get(); } return result; } @AutoMetric @Override public long getTotalTuplesEmitted() { long result = 0; for (Map.Entry<Integer, PTOperator> entry : dnmgr.getPhysicalPlan().getAllOperators().entrySet()) { result += entry.getValue().stats.totalTuplesEmitted.get(); } return result; } @AutoMetric @Override public long getTotalMemoryAllocated() { long result = 0; for (PTContainer c : dnmgr.getPhysicalPlan().getContainers()) { result += c.getAllocatedMemoryMB(); } return result; } @AutoMetric @Override public long getMemoryRequired() { long result = 0; for (PTContainer c : dnmgr.getPhysicalPlan().getContainers()) { if (c.getExternalId() == null || c.getState() == PTContainer.State.KILLED) { result += c.getRequiredMemoryMB(); } } return result; } @AutoMetric @Override public int getTotalVCoresAllocated() { int result = 0; for (PTContainer c : dnmgr.getPhysicalPlan().getContainers()) { result += c.getAllocatedVCores(); } return result; } @AutoMetric @Override public int getVCoresRequired() { int result = 0; for (PTContainer c : dnmgr.getPhysicalPlan().getContainers()) { if (c.getExternalId() == null || c.getState() == PTContainer.State.KILLED) { if (c.getRequiredVCores() == 0) { result++; } else { result += c.getRequiredVCores(); } } } return result; } @AutoMetric @Override public long getTotalBufferServerReadBytesPSMA() { long result = 0; for (Map.Entry<Integer, PTOperator> entry : dnmgr.getPhysicalPlan().getAllOperators().entrySet()) { for (Map.Entry<String, PortStatus> portEntry : entry.getValue().stats.inputPortStatusList.entrySet()) { result += portEntry.getValue().bufferServerBytesPMSMA.getAvg() * 1000; } } return result; } @AutoMetric @Override public long getTotalBufferServerWriteBytesPSMA() { long result = 0; for (Map.Entry<Integer, PTOperator> entry : dnmgr.getPhysicalPlan().getAllOperators().entrySet()) { for (Map.Entry<String, PortStatus> portEntry : entry.getValue().stats.outputPortStatusList.entrySet()) { result += portEntry.getValue().bufferServerBytesPMSMA.getAvg() * 1000; } } return result; } @Override public List<Integer> getCriticalPath() { StreamingContainerManager.CriticalPathInfo criticalPathInfo = dnmgr.getCriticalPathInfo(); return (criticalPathInfo == null) ? null : criticalPathInfo.path; } @AutoMetric @Override public long getLatency() { StreamingContainerManager.CriticalPathInfo criticalPathInfo = dnmgr.getCriticalPathInfo(); return (criticalPathInfo == null) ? 0 : criticalPathInfo.latency; } @Override public long getWindowStartMillis() { return dnmgr.getWindowStartMillis(); } } private class ClusterAppContextImpl extends BaseContext implements StramAppContext { private ClusterAppContextImpl() { super(null, null); } ClusterAppContextImpl(Attribute.AttributeMap attributes) { super(attributes, null); } @Override public ApplicationId getApplicationID() { return appAttemptID.getApplicationId(); } @Override public ApplicationAttemptId getApplicationAttemptId() { return appAttemptID; } @Override public String getApplicationName() { return getValue(LogicalPlan.APPLICATION_NAME); } @Override public String getApplicationDocLink() { return getValue(LogicalPlan.APPLICATION_DOC_LINK); } @Override public long getStartTime() { return startTime; } @Override public String getApplicationPath() { return getValue(LogicalPlan.APPLICATION_PATH); } @Override public CharSequence getUser() { return System.getenv(ApplicationConstants.Environment.USER.toString()); } @Override public Clock getClock() { return clock; } @Override public String getAppMasterTrackingUrl() { return appMasterTrackingUrl; } @Override public ClusterAppStats getStats() { return stats; } @Override public String getGatewayAddress() { return getValue(LogicalPlan.GATEWAY_CONNECT_ADDRESS); } @Override public boolean isGatewayConnected() { if (StreamingAppMasterService.this.dnmgr != null) { return StreamingAppMasterService.this.dnmgr.isGatewayConnected(); } return false; } @Override public List<AppDataSource> getAppDataSources() { if (StreamingAppMasterService.this.dnmgr != null) { return StreamingAppMasterService.this.dnmgr.getAppDataSources(); } return null; } @Override public Map<String, Object> getMetrics() { if (StreamingAppMasterService.this.dnmgr != null) { return (Map)StreamingAppMasterService.this.dnmgr.getLatestLogicalMetrics(); } return null; } @SuppressWarnings("FieldNameHidesFieldInSuperclass") private static final long serialVersionUID = 201309112304L; } /** * Dump out contents of $CWD and the environment to stdout for debugging */ @SuppressWarnings("UseOfSystemOutOrSystemErr") public void dumpOutDebugInfo() { LOG.info("Dump debug output"); Map<String, String> envs = System.getenv(); LOG.info("\nDumping System Env: begin"); for (Map.Entry<String, String> env : envs.entrySet()) { LOG.info("System env: key=" + env.getKey() + ", val=" + env.getValue()); } LOG.info("Dumping System Env: end"); String cmd = "ls -al"; Runtime run = Runtime.getRuntime(); Process pr; try { pr = run.exec(cmd); pr.waitFor(); BufferedReader buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); String line; LOG.info("\nDumping files in local dir: begin"); try { while ((line = buf.readLine()) != null) { LOG.info("System CWD content: " + line); } LOG.info("Dumping files in local dir: end"); } finally { buf.close(); } } catch (IOException e) { LOG.debug("Exception", e); } catch (InterruptedException e) { LOG.info("Interrupted", e); } LOG.info("Classpath: {}", System.getProperty("java.class.path")); LOG.info("Config resources: {}", getConfig().toString()); try { // find a better way of logging this using the logger. Configuration.dumpConfiguration(getConfig(), new PrintWriter(System.out)); } catch (Exception e) { LOG.error("Error dumping configuration.", e); } } @Override protected void serviceInit(Configuration conf) throws Exception { LOG.info("Application master" + ", appId=" + appAttemptID.getApplicationId().getId() + ", clustertimestamp=" + appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + appAttemptID.getAttemptId()); FileInputStream fis = new FileInputStream("./" + LogicalPlan.SER_FILE_NAME); try { this.dag = LogicalPlan.read(fis); } finally { fis.close(); } // "debug" simply dumps all data using LOG.info if (dag.isDebug()) { dumpOutDebugInfo(); } dag.setAttribute(LogicalPlan.APPLICATION_ATTEMPT_ID, appAttemptID.getAttemptId()); FSRecoveryHandler recoveryHandler = new FSRecoveryHandler(dag.assertAppPath(), conf); this.dnmgr = StreamingContainerManager.getInstance(recoveryHandler, dag, true); dag = this.dnmgr.getLogicalPlan(); this.appContext = new ClusterAppContextImpl(dag.getAttributes()); Map<Class<?>, Class<? extends StringCodec<?>>> codecs = dag.getAttributes().get(DAG.STRING_CODECS); StringCodecs.loadConverters(codecs); LOG.info("Starting application with {} operators in {} containers", dnmgr.getPhysicalPlan().getAllOperators().size(), dnmgr.getPhysicalPlan().getContainers().size()); // Setup security configuration such as that for web security SecurityUtils.init(conf, dag.getValue(LogicalPlan.STRAM_HTTP_AUTHENTICATION)); if (UserGroupInformation.isSecurityEnabled()) { // TODO :- Need to perform token renewal delegationTokenManager = new StramDelegationTokenManager(DELEGATION_KEY_UPDATE_INTERVAL, DELEGATION_TOKEN_MAX_LIFETIME, DELEGATION_TOKEN_RENEW_INTERVAL, DELEGATION_TOKEN_REMOVER_SCAN_INTERVAL); } this.nmClient = new NMClientAsyncImpl(new NMCallbackHandler()); addService(nmClient); this.amRmClient = AMRMClient.createAMRMClient(); addService(amRmClient); // start RPC server int rpcListenerCount = dag.getValue(DAGContext.HEARTBEAT_LISTENER_THREAD_COUNT); this.heartbeatListener = new StreamingContainerParent(this.getClass().getName(), dnmgr, delegationTokenManager, rpcListenerCount); addService(heartbeatListener); AutoMetric.Transport appDataPushTransport = dag.getValue(LogicalPlan.METRICS_TRANSPORT); if (appDataPushTransport != null) { this.appDataPushAgent = new AppDataPushAgent(dnmgr, appContext); addService(this.appDataPushAgent); } // initialize all services added above super.serviceInit(conf); } @Override protected void serviceStart() throws Exception { super.serviceStart(); if (UserGroupInformation.isSecurityEnabled()) { delegationTokenManager.startThreads(); } // write the connect address for containers to DFS InetSocketAddress connectAddress = NetUtils.getConnectAddress(this.heartbeatListener.getAddress()); URI connectUri = RecoverableRpcProxy.toConnectURI(connectAddress); FSRecoveryHandler recoveryHandler = new FSRecoveryHandler(dag.assertAppPath(), getConfig()); recoveryHandler.writeConnectUri(connectUri.toString()); // start web service try { org.mortbay.log.Log.setLog(null); } catch (Throwable throwable) { // SPOI-2687. As part of Pivotal Certification, we need to catch ClassNotFoundException as Pivotal was using // Jetty 7 where as other distros are using Jetty 6. // LOG.error("can't set the log to null: ", throwable); } try { Configuration config = getConfig(); if (SecurityUtils.isStramWebSecurityEnabled()) { config = new Configuration(config); config.set("hadoop.http.filter.initializers", StramWSFilterInitializer.class.getCanonicalName()); } WebApp webApp = WebApps.$for("stram", StramAppContext.class, appContext, "ws").with(config).start(new StramWebApp(this.dnmgr)); LOG.info("Started web service at port: " + webApp.port()); appMasterTrackingUrl = NetUtils.getConnectAddress(webApp.getListenerAddress()).getHostName() + ":" + webApp.port(); if (ConfigUtils.isSSLEnabled(config)) { appMasterTrackingUrl = "https://" + appMasterTrackingUrl; } LOG.info("Setting tracking URL to: " + appMasterTrackingUrl); } catch (Exception e) { LOG.error("Webapps failed to start. Ignoring for now:", e); } } @Override protected void serviceStop() throws Exception { super.serviceStop(); if (UserGroupInformation.isSecurityEnabled()) { delegationTokenManager.stopThreads(); } if (nmClient != null) { nmClient.stop(); } if (amRmClient != null) { amRmClient.stop(); } if (dnmgr != null) { dnmgr.teardown(); } } public boolean run() throws Exception { boolean status = true; try { StreamingContainer.eventloop.start(); execute(); } finally { StreamingContainer.eventloop.stop(); } return status; } /** * Main run function for the application master * * @throws YarnException */ @SuppressWarnings("SleepWhileInLoop") private void execute() throws YarnException, IOException { LOG.info("Starting ApplicationMaster"); final Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials(); LOG.info("number of tokens: {}", credentials.getAllTokens().size()); Iterator<Token<?>> iter = credentials.getAllTokens().iterator(); while (iter.hasNext()) { Token<?> token = iter.next(); LOG.debug("token: {}", token); } final Configuration conf = getConfig(); long tokenLifeTime = (long)(dag.getValue(LogicalPlan.TOKEN_REFRESH_ANTICIPATORY_FACTOR) * Math.min(dag.getValue(LogicalPlan.HDFS_TOKEN_LIFE_TIME), dag.getValue(LogicalPlan.RM_TOKEN_LIFE_TIME))); long expiryTime = System.currentTimeMillis() + tokenLifeTime; LOG.debug(" expiry token time {}", tokenLifeTime); String principal = dag.getValue(LogicalPlan.PRINCIPAL); String hdfsKeyTabFile = dag.getValue(LogicalPlan.KEY_TAB_FILE); // Register self with ResourceManager RegisterApplicationMasterResponse response = amRmClient.registerApplicationMaster(appMasterHostname, 0, appMasterTrackingUrl); // Dump out information about cluster capability as seen by the resource manager int maxMem = response.getMaximumResourceCapability().getMemory(); int maxVcores = response.getMaximumResourceCapability().getVirtualCores(); int minMem = conf.getInt("yarn.scheduler.minimum-allocation-mb", 0); int minVcores = conf.getInt("yarn.scheduler.minimum-allocation-vcores", 0); LOG.info("Max mem {}m, Min mem {}m, Max vcores {} and Min vcores {} capabililty of resources in this cluster ", maxMem, minMem, maxVcores, minVcores); long blacklistRemovalTime = dag.getValue(DAGContext.BLACKLISTED_NODE_REMOVAL_TIME_MILLIS); int maxConsecutiveContainerFailures = dag.getValue(DAGContext.MAX_CONSECUTIVE_CONTAINER_FAILURES_FOR_BLACKLIST); LOG.info("Blacklist removal time in millis = {}, max consecutive node failure count = {}", blacklistRemovalTime, maxConsecutiveContainerFailures); // for locality relaxation fall back Map<StreamingContainerAgent.ContainerStartRequest, MutablePair<Integer, ContainerRequest>> requestedResources = Maps.newHashMap(); // Setup heartbeat emitter // TODO poll RM every now and then with an empty request to let RM know that we are alive // The heartbeat interval after which an AM is timed out by the RM is defined by a config setting: // RM_AM_EXPIRY_INTERVAL_MS with default defined by DEFAULT_RM_AM_EXPIRY_INTERVAL_MS // The allocate calls to the RM count as heartbeat so, for now, this additional heartbeat emitter // is not required. int loopCounter = -1; long nodeReportUpdateTime = 0; List<ContainerId> releasedContainers = new ArrayList<>(); // keep track of already requested containers to not request them again while waiting for allocation int numRequestedContainers = 0; int numReleasedContainers = 0; int nextRequestPriority = 0; // Use override for resource requestor in case of cloudera distribution, to handle host specific requests ResourceRequestHandler resourceRequestor = System.getenv().containsKey("CDH_HADOOP_BIN") ? new BlacklistBasedResourceRequestHandler() : new ResourceRequestHandler(); List<ContainerStartRequest> pendingContainerStartRequests = new LinkedList<>(); YarnClient clientRMService = YarnClient.createYarnClient(); try { // YARN-435 // we need getClusterNodes to populate the initial node list, // subsequent updates come through the heartbeat response clientRMService.init(conf); clientRMService.start(); ApplicationReport ar = StramClientUtils.getStartedAppInstanceByName(clientRMService, dag.getAttributes().get(DAG.APPLICATION_NAME), UserGroupInformation.getLoginUser().getUserName(), dag.getAttributes().get(DAG.APPLICATION_ID)); if (ar != null) { appDone = true; dnmgr.shutdownDiagnosticsMessage = String.format("Application master failed due to application %s with duplicate application name \"%s\" by the same user \"%s\" is already started.", ar.getApplicationId().toString(), ar.getName(), ar.getUser()); LOG.info("Forced shutdown due to {}", dnmgr.shutdownDiagnosticsMessage); finishApplication(FinalApplicationStatus.FAILED); return; } resourceRequestor.updateNodeReports(clientRMService.getNodeReports()); nodeReportUpdateTime = System.currentTimeMillis() + UPDATE_NODE_REPORTS_INTERVAL; } catch (Exception e) { throw new RuntimeException("Failed to retrieve cluster nodes report.", e); } finally { clientRMService.stop(); } List<Container> containers = response.getContainersFromPreviousAttempts(); // Running containers might take a while to register with the new app master and send the heartbeat signal. int waitForRecovery = containers.size() > 0 ? dag.getValue(LogicalPlan.HEARTBEAT_TIMEOUT_MILLIS) / 1000 : 0; previouslyAllocatedContainers(containers); FinalApplicationStatus finalStatus = FinalApplicationStatus.SUCCEEDED; final InetSocketAddress rmAddress = conf.getSocketAddr(YarnConfiguration.RM_ADDRESS, YarnConfiguration.DEFAULT_RM_ADDRESS, YarnConfiguration.DEFAULT_RM_PORT); while (!appDone) { loopCounter++; final long currentTimeMillis = System.currentTimeMillis(); if (UserGroupInformation.isSecurityEnabled() && currentTimeMillis >= expiryTime && hdfsKeyTabFile != null) { String applicationId = appAttemptID.getApplicationId().toString(); expiryTime = StramUserLogin.refreshTokens(tokenLifeTime, FileUtils.getTempDirectoryPath(), applicationId, conf, principal, hdfsKeyTabFile, credentials, rmAddress, true); } if (currentTimeMillis > nodeReportUpdateTime) { resourceRequestor.updateNodeReports(clientRMService.getNodeReports()); nodeReportUpdateTime = currentTimeMillis + UPDATE_NODE_REPORTS_INTERVAL; } Runnable r; while ((r = this.pendingTasks.poll()) != null) { r.run(); } // log current state /* * LOG.info("Current application state: loop=" + loopCounter + ", appDone=" + appDone + ", total=" + * numTotalContainers + ", requested=" + numRequestedContainers + ", completed=" + numCompletedContainers + * ", failed=" + numFailedContainers + ", currentAllocated=" + this.allAllocatedContainers.size()); */ // Sleep before each loop when asking RM for containers // to avoid flooding RM with spurious requests when it // need not have any available containers try { sleep(1000); } catch (InterruptedException e) { LOG.info("Sleep interrupted " + e.getMessage()); } // Setup request to be sent to RM to allocate containers List<ContainerRequest> containerRequests = new ArrayList<>(); List<ContainerRequest> removedContainerRequests = new ArrayList<>(); // request containers for pending deploy requests if (!dnmgr.containerStartRequests.isEmpty()) { StreamingContainerAgent.ContainerStartRequest csr; while ((csr = dnmgr.containerStartRequests.poll()) != null) { if (csr.container.getRequiredMemoryMB() > maxMem) { LOG.warn("Container memory {}m above max threshold of cluster. Using max value {}m.", csr.container.getRequiredMemoryMB(), maxMem); csr.container.setRequiredMemoryMB(maxMem); } if (csr.container.getRequiredMemoryMB() < minMem) { csr.container.setRequiredMemoryMB(minMem); } if (csr.container.getRequiredVCores() > maxVcores) { LOG.warn("Container vcores {} above max threshold of cluster. Using max value {}.", csr.container.getRequiredVCores(), maxVcores); csr.container.setRequiredVCores(maxVcores); } if (csr.container.getRequiredVCores() < minVcores) { csr.container.setRequiredVCores(minVcores); } csr.container.setResourceRequestPriority(nextRequestPriority++); ContainerRequest cr = resourceRequestor.createContainerRequest(csr, true); if (cr == null) { pendingContainerStartRequests.add(csr); } else { resourceRequestor.addContainerRequest(requestedResources, loopCounter, containerRequests, csr, cr); } } } // If all other requests are allocated, retry pending requests which need host availability if (containerRequests.isEmpty() && !pendingContainerStartRequests.isEmpty()) { List<ContainerStartRequest> removalList = new LinkedList<>(); for (ContainerStartRequest csr : pendingContainerStartRequests) { ContainerRequest cr = resourceRequestor.createContainerRequest(csr, true); if (cr != null) { resourceRequestor.addContainerRequest(requestedResources, loopCounter, containerRequests, csr, cr); removalList.add(csr); } } pendingContainerStartRequests.removeAll(removalList); } resourceRequestor.reissueContainerRequests(amRmClient, requestedResources, loopCounter, resourceRequestor, containerRequests, removedContainerRequests); /* Remove nodes from blacklist after timeout */ List<String> blacklistRemovals = new ArrayList<>(); for (String hostname : failedBlackListedNodes) { Long timeDiff = currentTimeMillis - failedContainerNodesMap.get(hostname).blackListAdditionTime; if (timeDiff >= blacklistRemovalTime) { blacklistRemovals.add(hostname); failedContainerNodesMap.remove(hostname); } } if (!blacklistRemovals.isEmpty()) { amRmClient.updateBlacklist(null, blacklistRemovals); LOG.info("Removing nodes {} from blacklist: time elapsed since last blacklisting due to failure is greater than specified timeout", blacklistRemovals.toString()); failedBlackListedNodes.removeAll(blacklistRemovals); } numRequestedContainers += containerRequests.size() - removedContainerRequests.size(); AllocateResponse amResp = sendContainerAskToRM(containerRequests, removedContainerRequests, releasedContainers); if (amResp.getAMCommand() != null) { LOG.info(" statement executed:{}", amResp.getAMCommand()); switch (amResp.getAMCommand()) { case AM_RESYNC: case AM_SHUTDOWN: throw new YarnRuntimeException("Received the " + amResp.getAMCommand() + " command from RM"); default: throw new YarnRuntimeException("Received the " + amResp.getAMCommand() + " command from RM"); } } releasedContainers.clear(); // Retrieve list of allocated containers from the response List<Container> newAllocatedContainers = amResp.getAllocatedContainers(); // LOG.info("Got response from RM for container ask, allocatedCnt=" + newAllocatedContainers.size()); numRequestedContainers -= newAllocatedContainers.size(); long timestamp = System.currentTimeMillis(); for (Container allocatedContainer : newAllocatedContainers) { LOG.info("Got new container." + ", containerId=" + allocatedContainer.getId() + ", containerNode=" + allocatedContainer.getNodeId() + ", containerNodeURI=" + allocatedContainer.getNodeHttpAddress() + ", containerResourceMemory" + allocatedContainer.getResource().getMemory() + ", priority" + allocatedContainer.getPriority()); // + ", containerToken" + allocatedContainer.getContainerToken().getIdentifier().toString()); boolean alreadyAllocated = true; StreamingContainerAgent.ContainerStartRequest csr = null; for (Map.Entry<StreamingContainerAgent.ContainerStartRequest, MutablePair<Integer, ContainerRequest>> entry : requestedResources.entrySet()) { if (entry.getKey().container.getResourceRequestPriority() == allocatedContainer.getPriority().getPriority()) { alreadyAllocated = false; csr = entry.getKey(); break; } } if (alreadyAllocated) { LOG.info("Releasing {} as resource with priority {} was already assigned", allocatedContainer.getId(), allocatedContainer.getPriority()); releasedContainers.add(allocatedContainer.getId()); numReleasedContainers++; numRequestedContainers--; continue; } if (csr != null) { requestedResources.remove(csr); } // allocate resource to container ContainerResource resource = new ContainerResource(allocatedContainer.getPriority().getPriority(), allocatedContainer.getId().toString(), allocatedContainer.getNodeId().toString(), allocatedContainer.getResource().getMemory(), allocatedContainer.getResource().getVirtualCores(), allocatedContainer.getNodeHttpAddress()); StreamingContainerAgent sca = dnmgr.assignContainer(resource, null); if (sca == null) { // allocated container no longer needed, add release request LOG.warn("Container {} allocated but nothing to deploy, going to release this container.", allocatedContainer.getId()); releasedContainers.add(allocatedContainer.getId()); } else { AllocatedContainer allocatedContainerHolder = new AllocatedContainer(allocatedContainer); this.allocatedContainers.put(allocatedContainer.getId().toString(), allocatedContainerHolder); ByteBuffer tokens = null; if (UserGroupInformation.isSecurityEnabled()) { UserGroupInformation ugi = UserGroupInformation.getLoginUser(); Token<StramDelegationTokenIdentifier> delegationToken = allocateDelegationToken(ugi.getUserName(), heartbeatListener.getAddress()); allocatedContainerHolder.delegationToken = delegationToken; //ByteBuffer tokens = LaunchContainerRunnable.getTokens(delegationTokenManager, heartbeatListener.getAddress()); tokens = LaunchContainerRunnable.getTokens(ugi, delegationToken); } LaunchContainerRunnable launchContainer = new LaunchContainerRunnable(allocatedContainer, nmClient, sca, tokens); // Thread launchThread = new Thread(runnableLaunchContainer); // launchThreads.add(launchThread); // launchThread.start(); launchContainer.run(); // communication with NMs is now async // record container start event StramEvent ev = new StramEvent.StartContainerEvent(allocatedContainer.getId().toString(), allocatedContainer.getNodeId().toString()); ev.setTimestamp(timestamp); dnmgr.recordEventAsync(ev); } } // track node updates for future locality constraint allocations // TODO: it seems 2.0.4-alpha doesn't give us any updates resourceRequestor.updateNodeReports(amResp.getUpdatedNodes()); // Check the completed containers List<ContainerStatus> completedContainers = amResp.getCompletedContainersStatuses(); // LOG.debug("Got response from RM for container ask, completedCnt=" + completedContainers.size()); List<String> blacklistAdditions = new ArrayList<>(); for (ContainerStatus containerStatus : completedContainers) { LOG.info("Completed containerId=" + containerStatus.getContainerId() + ", state=" + containerStatus.getState() + ", exitStatus=" + containerStatus.getExitStatus() + ", diagnostics=" + containerStatus.getDiagnostics()); // non complete containers should not be here assert (containerStatus.getState() == ContainerState.COMPLETE); AllocatedContainer allocatedContainer = allocatedContainers.remove(containerStatus.getContainerId().toString()); if (allocatedContainer != null && allocatedContainer.delegationToken != null) { UserGroupInformation ugi = UserGroupInformation.getLoginUser(); delegationTokenManager.cancelToken(allocatedContainer.delegationToken, ugi.getUserName()); } int exitStatus = containerStatus.getExitStatus(); if (0 != exitStatus) { if (allocatedContainer != null) { numFailedContainers.incrementAndGet(); if (exitStatus != 1 && maxConsecutiveContainerFailures != Integer.MAX_VALUE) { // If container failure due to framework String hostname = allocatedContainer.container.getNodeId().getHost(); if (!failedBlackListedNodes.contains(hostname)) { // Blacklist the node if not already blacklisted if (failedContainerNodesMap.containsKey(hostname)) { NodeFailureStats stats = failedContainerNodesMap.get(hostname); long timeStamp = System.currentTimeMillis(); if (timeStamp - stats.lastFailureTimeStamp >= blacklistRemovalTime) { // Reset failure count if last failure was before Blacklist removal time stats.failureCount = 1; stats.lastFailureTimeStamp = timeStamp; } else { stats.lastFailureTimeStamp = timeStamp; stats.failureCount++; if (stats.failureCount >= maxConsecutiveContainerFailures) { LOG.info("Node {} failed {} times consecutively within {} minutes, marking the node blacklisted", hostname, stats.failureCount, blacklistRemovalTime / (60 * 1000)); blacklistAdditions.add(hostname); failedBlackListedNodes.add(hostname); } } } else { failedContainerNodesMap.put(hostname, new NodeFailureStats(System.currentTimeMillis(), 1)); } } } } // if (exitStatus == 1) { // // non-recoverable StreamingContainer failure // appDone = true; // finalStatus = FinalApplicationStatus.FAILED; // dnmgr.shutdownDiagnosticsMessage = "Unrecoverable failure " + containerStatus.getContainerId(); // LOG.info("Exiting due to: {}", dnmgr.shutdownDiagnosticsMessage); // } // else { // Recoverable failure or process killed (externally or via stop request by AM) // also occurs when a container was released by the application but never assigned/launched LOG.debug("Container {} failed or killed.", containerStatus.getContainerId()); dnmgr.scheduleContainerRestart(containerStatus.getContainerId().toString()); // } } else { // container completed successfully numCompletedContainers.incrementAndGet(); LOG.info("Container completed successfully." + ", containerId=" + containerStatus.getContainerId()); // Reset counter for node failure, if exists String hostname = allocatedContainer.container.getNodeId().getHost(); NodeFailureStats stats = failedContainerNodesMap.get(hostname); if (stats != null) { stats.failureCount = 0; } } String containerIdStr = containerStatus.getContainerId().toString(); dnmgr.removeContainerAgent(containerIdStr); // record container stop event StramEvent ev = new StramEvent.StopContainerEvent(containerIdStr, containerStatus.getExitStatus()); ev.setReason(containerStatus.getDiagnostics()); dnmgr.recordEventAsync(ev); } if (!blacklistAdditions.isEmpty()) { amRmClient.updateBlacklist(blacklistAdditions, null); long timeStamp = System.currentTimeMillis(); for (String hostname : blacklistAdditions) { NodeFailureStats stats = failedContainerNodesMap.get(hostname); stats.blackListAdditionTime = timeStamp; } } if (dnmgr.forcedShutdown) { LOG.info("Forced shutdown due to {}", dnmgr.shutdownDiagnosticsMessage); finalStatus = FinalApplicationStatus.FAILED; appDone = true; } else if (allocatedContainers.isEmpty() && numRequestedContainers == 0 && dnmgr.containerStartRequests.isEmpty()) { LOG.debug("Exiting as no more containers are allocated or requested"); finalStatus = FinalApplicationStatus.SUCCEEDED; appDone = true; } LOG.debug("Current application state: loop={}, appDone={}, requested={}, released={}, completed={}, failed={}, currentAllocated={}, dnmgr.containerStartRequests={}", loopCounter, appDone, numRequestedContainers, numReleasedContainers, numCompletedContainers, numFailedContainers, allocatedContainers.size(), dnmgr.containerStartRequests); // monitor child containers dnmgr.monitorHeartbeat(waitForRecovery > 0); waitForRecovery = Math.max(waitForRecovery - 1, 0); } finishApplication(finalStatus); } private void finishApplication(FinalApplicationStatus finalStatus) throws YarnException, IOException { LOG.info("Application completed. Signalling finish to RM"); FinishApplicationMasterRequest finishReq = Records.newRecord(FinishApplicationMasterRequest.class); finishReq.setFinalApplicationStatus(finalStatus); if (finalStatus != FinalApplicationStatus.SUCCEEDED) { String diagnostics = "Diagnostics." + " completed=" + numCompletedContainers.get() + ", allocated=" + allocatedContainers.size() + ", failed=" + numFailedContainers.get(); if (!StringUtils.isEmpty(dnmgr.shutdownDiagnosticsMessage)) { diagnostics += "\n"; diagnostics += dnmgr.shutdownDiagnosticsMessage; } // YARN-208 - as of 2.0.1-alpha dropped by the RM finishReq.setDiagnostics(diagnostics); // expected termination of the master process // application status and diagnostics message are set above } LOG.info("diagnostics: " + finishReq.getDiagnostics()); amRmClient.unregisterApplicationMaster(finishReq.getFinalApplicationStatus(), finishReq.getDiagnostics(), null); } private Token<StramDelegationTokenIdentifier> allocateDelegationToken(String username, InetSocketAddress address) { StramDelegationTokenIdentifier identifier = new StramDelegationTokenIdentifier(new Text(username), new Text(""), new Text("")); String service = address.getAddress().getHostAddress() + ":" + address.getPort(); Token<StramDelegationTokenIdentifier> stramToken = new Token<>(identifier, delegationTokenManager); stramToken.setService(new Text(service)); return stramToken; } /** * Check for containers that were allocated in a previous attempt. * If the containers are still alive, wait for them to check in via heartbeat. */ private void previouslyAllocatedContainers(List<Container> containers) { for (Container container : containers) { this.allocatedContainers.put(container.getId().toString(), new AllocatedContainer(container)); //check the status nmClient.getContainerStatusAsync(container.getId(), container.getNodeId()); } } /** * Ask RM to allocate given no. of containers to this Application Master * * @param containerRequests Containers to ask for from RM * @param removedContainerRequests Container requests to be removed * @param releasedContainers * @return Response from RM to AM with allocated containers * @throws YarnException */ private AllocateResponse sendContainerAskToRM(List<ContainerRequest> containerRequests, List<ContainerRequest> removedContainerRequests, List<ContainerId> releasedContainers) throws YarnException, IOException { if (removedContainerRequests.size() > 0) { LOG.debug("Removing container request: {}", removedContainerRequests); for (ContainerRequest cr : removedContainerRequests) { amRmClient.removeContainerRequest(cr); } } if (containerRequests.size() > 0) { LOG.debug("Asking RM for containers: {}", containerRequests); for (ContainerRequest cr : containerRequests) { amRmClient.addContainerRequest(cr); } } for (ContainerId containerId : releasedContainers) { LOG.info("Released container, id={}", containerId.getId()); amRmClient.releaseAssignedContainer(containerId); } for (String containerIdStr : dnmgr.containerStopRequests.values()) { AllocatedContainer allocatedContainer = this.allocatedContainers.get(containerIdStr); if (allocatedContainer != null && !allocatedContainer.stopRequested) { nmClient.stopContainerAsync(allocatedContainer.container.getId(), allocatedContainer.container.getNodeId()); LOG.info("Requested stop container {}", containerIdStr); allocatedContainer.stopRequested = true; } dnmgr.containerStopRequests.remove(containerIdStr); } return amRmClient.allocate(0); } private class NMCallbackHandler implements NMClientAsync.CallbackHandler { NMCallbackHandler() { } @Override public void onContainerStopped(ContainerId containerId) { LOG.debug("Succeeded to stop Container {}", containerId); } @Override public void onContainerStatusReceived(ContainerId containerId, ContainerStatus containerStatus) { LOG.debug("Container Status: id={}, status={}", containerId, containerStatus); if (containerStatus.getState() != ContainerState.RUNNING) { recoverContainer(containerId); } } @Override public void onContainerStarted(ContainerId containerId, Map<String, ByteBuffer> allServiceResponse) { LOG.debug("Succeeded to start Container {}", containerId); } @Override public void onStartContainerError(ContainerId containerId, Throwable t) { LOG.error("Start container failed for: containerId={}", containerId, t); } @Override public void onGetContainerStatusError(ContainerId containerId, Throwable t) { LOG.error("Failed to query the status of {}", containerId, t); // if the NM is not reachable, consider container lost and recover (occurs during AM recovery) recoverContainer(containerId); } @Override public void onStopContainerError(ContainerId containerId, Throwable t) { LOG.warn("Failed to stop container {}", containerId, t); // container could not be stopped, we won't receive a stop event from AM heartbeat // short circuit and schedule recovery directly recoverContainer(containerId); } private void recoverContainer(final ContainerId containerId) { pendingTasks.add(new Runnable() { @Override public void run() { dnmgr.scheduleContainerRestart(containerId.toString()); allocatedContainers.remove(containerId.toString()); } }); } } private class AllocatedContainer { private final Container container; private boolean stopRequested; private Token<StramDelegationTokenIdentifier> delegationToken; private AllocatedContainer(Container c) { container = c; } } }
package com.github.sbugat.nqueens.solvers.backtracking.instrumentations; import java.util.ArrayList; import java.util.List; import com.github.sbugat.nqueens.GenericInstrumentedNQueensSolver; /** * Back-tracking algorithm for the N queens puzzle solver. * * @author Sylvain Bugat * */ public final class SlowBackTrackingNQueensSolver extends GenericInstrumentedNQueensSolver { /** Chessboard represented by a list of lists. */ private List<List<Integer>> chessboard; public SlowBackTrackingNQueensSolver(final int chessboardSizeArg, final boolean printSolutionArg) { super(chessboardSizeArg, printSolutionArg); chessboard = new ArrayList<>(); for (int x = 0; x < chessboardSizeArg; x++) { final List<Integer> lineList = new ArrayList<>(); for (int y = 0; y < chessboardSizeArg; y++) { lineList.add(Integer.valueOf(0)); } chessboard.add(lineList); } } @Override public long solve() { // Start the algorithm at the first line methodCallsCount++; solve(0); // Return the number of solutions found return solutionCount; } /** * Solving recursive method, do a back-tracking algorithm by testing all valid combinations. * * @param y line position on the chessboard */ private void solve(final int y) { implicitTestsCount++; for (int x = 0; x < chessboardSize; x++) { // Put a queen on the current position queenPlacementsCount++; methodCallsCount += 3; squareWritesCount++; chessboard.get(x).set(y, Integer.valueOf(1)); methodCallsCount++; implicitTestsCount++; if (checkValidChessboard()) { // Last line: all queens are sets then a solution is found present implicitTestsCount++; if (y + 1 >= chessboardSize) { solutionCount++; methodCallsCount++; print(); } else { // Go to the next line methodCallsCount++; solve(y + 1); } } // Remove the current queen methodCallsCount += 3; squareWritesCount++; chessboard.get(x).set(y, Integer.valueOf(0)); implicitTestsCount++; } } /** * Check if the current chessboard is valid (only one queens per lines, columns and diagnonals). * * @return true if the chessboard is valid, false otherwise */ private boolean checkValidChessboard() { // Check if 2 queens are on the same line implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { boolean usedLine = false; implicitTestsCount++; for (int x = 0; x < chessboardSize; x++) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (1 == chessboard.get(x).get(y).intValue()) { explicitTestsCount++; if (usedLine) { return false; } usedLine = true; } implicitTestsCount++; } implicitTestsCount++; implicitTestsCount++; } // Check if 2 queens are on the same column implicitTestsCount++; for (int x = 0; x < chessboardSize; x++) { boolean usedColumn = false; implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (1 == chessboard.get(x).get(y).intValue()) { explicitTestsCount++; if (usedColumn) { return false; } usedColumn = true; } implicitTestsCount++; } implicitTestsCount++; } // Check if 2 queens are on the same descending diagonal implicitTestsCount++; for (int diagonal = 0; diagonal < chessboardSize * 2 - 1; diagonal++) { boolean usedDiagonal = false; implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { final int x = diagonal - y; explicitTestsCount++; if (x >= 0) { explicitTestsCount++; if (x < chessboardSize) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (1 == chessboard.get(x).get(y).intValue()) { explicitTestsCount++; if (usedDiagonal) { return false; } usedDiagonal = true; } } } implicitTestsCount++; } implicitTestsCount++; } // Check if 2 queens are on the same ascending diagonal implicitTestsCount++; for (int diagonal = 0; diagonal < chessboardSize * 2 - 1; diagonal++) { boolean usedDiagonal = false; implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { final int x = diagonal - chessboardSize + 1 + y; explicitTestsCount++; if (x >= 0) { explicitTestsCount++; if (x < chessboardSize) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (1 == chessboard.get(x).get(y).intValue()) { explicitTestsCount++; if (usedDiagonal) { return false; } usedDiagonal = true; } } } implicitTestsCount++; } implicitTestsCount++; } return true; } @Override public void reset() { super.reset(); if (chessboard.size() != chessboardSize) { chessboard = new ArrayList<>(); for (int x = 0; x < chessboardSize; x++) { final List<Integer> lineList = new ArrayList<>(); for (int y = 0; y < chessboardSize; y++) { lineList.add(Integer.valueOf(0)); } chessboard.add(lineList); } } } @Override public boolean getChessboardPosition(final int x, final int y) { return 1 == chessboard.get(x).get(y).intValue(); } @Override public String getName() { return "List back-tracking"; } }
/* * $Id: PortletUrlTagTest.java 609901 2008-01-08 08:18:23Z nilsga $ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts2.views.jsp; import com.mockobjects.servlet.MockJspWriter; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionInvocation; import com.opensymphony.xwork2.ActionProxy; import com.opensymphony.xwork2.util.ValueStack; import com.opensymphony.xwork2.util.ValueStackFactory; import junit.textui.TestRunner; import org.apache.struts2.dispatcher.Dispatcher; import org.apache.struts2.dispatcher.mapper.ActionMapping; import org.apache.struts2.portlet.PortletConstants; import org.apache.struts2.portlet.PortletPhase; import org.apache.struts2.portlet.util.PortletUrlHelper; import org.jmock.Mock; import org.jmock.cglib.MockObjectTestCase; import org.jmock.core.Constraint; import javax.portlet.PortletContext; import javax.portlet.PortletMode; import javax.portlet.PortletURL; import javax.portlet.RenderRequest; import javax.portlet.RenderResponse; import javax.portlet.WindowState; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.PageContext; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import static org.apache.struts2.StrutsStatics.STRUTS_PORTLET_CONTEXT; /** */ @SuppressWarnings("unchecked") public class PortletUrlTagTest extends MockObjectTestCase { URLTag tag = new URLTag(); Mock mockHttpReq = null; Mock mockHttpRes = null; Mock mockPortletReq = null; Mock mockPortletRes = null; Mock mockPageCtx = null; Mock mockPortletUrl = null; MockJspWriter mockJspWriter = null; Mock mockCtx = null; ValueStack stack = null; Mock mockActionProxy = null; Mock mockActionInvocation = null; public static void main(String[] args) { TestRunner.run(PortletUrlTagTest.class); } public void setUp() throws Exception { super.setUp(); Dispatcher du = new Dispatcher(null, new HashMap()); du.init(); Dispatcher.setInstance(du); stack = du.getContainer().getInstance(ValueStackFactory.class).createValueStack(); stack.getContext().put(ActionContext.CONTAINER, du.getContainer()); ActionContext context = new ActionContext(stack.getContext()); ActionContext.setContext(context); mockActionInvocation = mock(ActionInvocation.class); mockActionProxy = mock(ActionProxy.class); mockHttpReq = mock(HttpServletRequest.class); mockHttpRes = mock(HttpServletResponse.class); mockPortletReq = mock(RenderRequest.class); mockPortletRes = mock(RenderResponse.class); mockPageCtx = mock(PageContext.class); mockPortletUrl = mock(PortletURL.class); mockJspWriter = new MockJspWriter(); mockCtx = mock(PortletContext.class); mockActionProxy.stubs().method("getNamespace").will(returnValue("/view")); mockActionInvocation.stubs().method("getProxy").will(returnValue( mockActionProxy.proxy())); mockPageCtx.stubs().method("getRequest").will( returnValue(mockHttpReq.proxy())); mockPageCtx.stubs().method("getResponse").will( returnValue(mockHttpRes.proxy())); mockPageCtx.stubs().method("getOut").will(returnValue(mockJspWriter)); mockHttpReq.stubs().method("getScheme").will(returnValue("http")); mockHttpReq.stubs().method("getAttribute").with( eq("struts.valueStack")).will(returnValue(stack)); mockHttpReq.stubs().method("getAttribute").with( eq("javax.portlet.response")).will( returnValue(mockPortletRes.proxy())); mockHttpReq.stubs().method("getAttribute").with( eq("javax.portlet.request")).will( returnValue(mockPortletReq.proxy())); mockHttpReq.stubs().method("getAttribute").with( eq("javax.servlet.include.servlet_path")).will( returnValue("/servletPath")); mockHttpReq.stubs().method("getParameterMap").will( returnValue(Collections.emptyMap())); mockPortletReq.stubs().method("getPortletMode").will(returnValue(PortletMode.VIEW)); mockPortletReq.stubs().method("getWindowState").will(returnValue(WindowState.NORMAL)); mockPortletReq.stubs().method("getContextPath").will(returnValue("/contextPath")); tag.setPageContext((PageContext) mockPageCtx.proxy()); Map modeMap = new HashMap(); modeMap.put(PortletMode.VIEW, "/view"); modeMap.put(PortletMode.HELP, "/help"); modeMap.put(PortletMode.EDIT, "/edit"); Map<PortletMode,ActionMapping> actionMap = new HashMap<PortletMode,ActionMapping>(); actionMap.put(PortletMode.VIEW, new ActionMapping("defaultView", "/view", "execute", new HashMap<String,Object>())); actionMap.put(PortletMode.HELP, new ActionMapping("defaultHelp", "/help", "execute", new HashMap<String,Object>())); actionMap.put(PortletMode.EDIT, new ActionMapping("defaultEdit", "/edit", "execute", new HashMap<String,Object>())); Map sessionMap = new HashMap(); Map contextMap = new HashMap(); contextMap.put(ActionContext.SESSION, sessionMap); contextMap.put(PortletConstants.REQUEST, mockPortletReq.proxy()); contextMap.put(PortletConstants.RESPONSE, mockPortletRes.proxy()); contextMap.put(PortletConstants.PHASE, PortletPhase.RENDER_PHASE); contextMap.put(PortletConstants.MODE_NAMESPACE_MAP, modeMap); contextMap.put(PortletConstants.DEFAULT_ACTION_MAP, actionMap); contextMap.put(STRUTS_PORTLET_CONTEXT, mockCtx.proxy()); ActionContext ctx = new ActionContext(contextMap); ctx.setValueStack(stack); ActionInvocation ai = (ActionInvocation)mockActionInvocation.proxy(); stack.getContext().put(ActionContext.ACTION_INVOCATION, ai); ActionContext.setContext(ctx); } public void testEnsureParamsAreStringArrays() { Map params = new HashMap(); params.put("param1", "Test1"); params.put("param2", new String[] { "Test2" }); Map result = PortletUrlHelper.ensureParamsAreStringArrays(params); assertEquals(2, result.size()); assertTrue(result.get("param1") instanceof String[]); } public void testSetWindowState() throws Exception { PortletMode mode = PortletMode.VIEW; mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("createRenderURL").will( returnValue(mockPortletUrl.proxy())); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); Map paramMap = new HashMap(); paramMap.put(PortletConstants.ACTION_PARAM, new String[]{"/view/testAction"}); paramMap.put(PortletConstants.MODE_PARAM, new String[]{mode.toString()}); mockPortletUrl.expects(once()).method("setParameters").with(new ParamMapConstraint(paramMap)); mockPortletUrl.expects(once()).method("setWindowState").with(eq(WindowState.MAXIMIZED)); mockPortletUrl.expects(once()).method("setPortletMode").with(eq(PortletMode.VIEW)); tag.setAction("testAction"); tag.setWindowState("maximized"); tag.doStartTag(); tag.doEndTag(); } public void testSetPortletMode() throws Exception { PortletMode mode = PortletMode.HELP; mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("createRenderURL").will( returnValue(mockPortletUrl.proxy())); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); Map paramMap = new HashMap(); paramMap.put(PortletConstants.ACTION_PARAM, new String[]{"/help/testAction"}); paramMap.put(PortletConstants.MODE_PARAM, new String[]{mode.toString()}); mockPortletUrl.expects(once()).method("setParameters").with(new ParamMapConstraint(paramMap)); mockPortletUrl.expects(once()).method("setPortletMode").with(eq(PortletMode.HELP)); mockPortletUrl.expects(once()).method("setWindowState").with(eq(WindowState.NORMAL)); tag.setNamespace("/help"); tag.setAction("testAction"); tag.setPortletMode("help"); tag.doStartTag(); tag.doEndTag(); } public void testWhenPortletModeDiffersFromCurrentAndNoParametersAreSetRenderTheDefaults() throws Exception { PortletMode mode = PortletMode.HELP; mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("createRenderURL").will( returnValue(mockPortletUrl.proxy())); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); Map paramMap = new HashMap(); paramMap.put(PortletConstants.ACTION_PARAM, new String[]{"/help/defaultHelp"}); paramMap.put(PortletConstants.MODE_PARAM, new String[]{mode.toString()}); mockPortletUrl.expects(once()).method("setParameters").with(new ParamMapConstraint(paramMap)); mockPortletUrl.expects(once()).method("setPortletMode").with(eq(PortletMode.HELP)); mockPortletUrl.expects(once()).method("setWindowState").with(eq(WindowState.NORMAL)); tag.setPortletMode("help"); tag.doStartTag(); tag.doEndTag(); } public void testUrlWithQueryParams() throws Exception { PortletMode mode = PortletMode.VIEW; mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("createRenderURL").will( returnValue(mockPortletUrl.proxy())); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); Map paramMap = new HashMap(); paramMap.put(PortletConstants.ACTION_PARAM, new String[]{"/view/testAction"}); paramMap.put("testParam1", new String[]{"testValue1"}); paramMap.put(PortletConstants.MODE_PARAM, new String[]{mode.toString()}); mockPortletUrl.expects(once()).method("setParameters").with(new ParamMapConstraint(paramMap)); mockPortletUrl.expects(once()).method("setPortletMode").with(eq(PortletMode.VIEW)); mockPortletUrl.expects(once()).method("setWindowState").with(eq(WindowState.NORMAL)); tag.setAction("testAction?testParam1=testValue1"); tag.doStartTag(); tag.doEndTag(); } public void testActionUrl() throws Exception { PortletMode mode = PortletMode.VIEW; mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("createActionURL").will( returnValue(mockPortletUrl.proxy())); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); Map paramMap = new HashMap(); paramMap.put(PortletConstants.ACTION_PARAM, new String[]{"/view/testAction"}); paramMap.put(PortletConstants.MODE_PARAM, new String[]{mode.toString()}); mockPortletUrl.expects(once()).method("setParameters").with(new ParamMapConstraint(paramMap)); mockPortletUrl.expects(once()).method("setPortletMode").with(eq(PortletMode.VIEW)); mockPortletUrl.expects(once()).method("setWindowState").with(eq(WindowState.NORMAL)); tag.setNamespace("/view"); tag.setAction("testAction"); tag.setPortletUrlType("action"); tag.doStartTag(); tag.doEndTag(); } public void testResourceUrl() throws Exception { mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("encodeURL").will(returnValue("/contextPath/image.gif")); mockJspWriter.setExpectedData("/contextPath/image.gif"); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); tag.setValue("image.gif"); tag.doStartTag(); tag.doEndTag(); mockJspWriter.verify(); } public void testResourceUrlWithNestedParam() throws Exception { mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("encodeURL").with(eq("/contextPath/image.gif?testParam1=testValue1")).will(returnValue("/contextPath/image.gif?testParam1=testValue1")); mockJspWriter.setExpectedData("/contextPath/image.gif?testParam1=testValue1"); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); ParamTag paramTag = new ParamTag(); paramTag.setPageContext((PageContext)mockPageCtx.proxy()); paramTag.setParent(tag); paramTag.setName("testParam1"); paramTag.setValue("'testValue1'"); tag.setValue("image.gif"); tag.doStartTag(); paramTag.doStartTag(); paramTag.doEndTag(); tag.doEndTag(); mockJspWriter.verify(); } public void testResourceUrlWithTwoNestedParam() throws Exception { mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("encodeURL").with(eq("/contextPath/image.gif?testParam1=testValue1&testParam2=testValue2")).will(returnValue("/contextPath/image.gif?testParam1=testValue1&testParam2=testValue2")); mockJspWriter.setExpectedData("/contextPath/image.gif?testParam1=testValue1&testParam2=testValue2"); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); ParamTag paramTag = new ParamTag(); paramTag.setPageContext((PageContext)mockPageCtx.proxy()); paramTag.setParent(tag); paramTag.setName("testParam1"); paramTag.setValue("'testValue1'"); ParamTag paramTag2 = new ParamTag(); paramTag2.setPageContext((PageContext)mockPageCtx.proxy()); paramTag2.setParent(tag); paramTag2.setName("testParam2"); paramTag2.setValue("'testValue2'"); tag.setValue("image.gif"); tag.doStartTag(); paramTag.doStartTag(); paramTag.doEndTag(); paramTag2.doStartTag(); paramTag2.doEndTag(); tag.doEndTag(); mockJspWriter.verify(); } public void testUrlWithMethod() throws Exception { PortletMode mode = PortletMode.VIEW; mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("createRenderURL").will( returnValue(mockPortletUrl.proxy())); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); tag.setAction("testAction"); Map paramMap = new HashMap(); paramMap.put(PortletConstants.ACTION_PARAM, new String[]{"/view/testAction!input"}); paramMap.put(PortletConstants.MODE_PARAM, new String[]{mode.toString()}); mockPortletUrl.expects(once()).method("setParameters").with(new ParamMapConstraint(paramMap)); mockPortletUrl.expects(once()).method("setPortletMode").with(eq(PortletMode.VIEW)); mockPortletUrl.expects(once()).method("setWindowState").with(eq(WindowState.NORMAL)); tag.setMethod("input"); tag.doStartTag(); tag.doEndTag(); } public void testUrlWithNoActionOrMethod() throws Exception { PortletMode mode = PortletMode.VIEW; mockHttpReq.stubs().method("getQueryString").will(returnValue("")); mockPortletRes.expects(once()).method("createRenderURL").will( returnValue(mockPortletUrl.proxy())); mockCtx.expects(atLeastOnce()).method("getMajorVersion").will(returnValue(1)); Map paramMap = new HashMap(); mockActionProxy.stubs().method("getActionName").will(returnValue("currentExecutingAction")); paramMap.put(PortletConstants.ACTION_PARAM, new String[]{"/view/currentExecutingAction"}); paramMap.put(PortletConstants.MODE_PARAM, new String[]{mode.toString()}); mockPortletUrl.expects(once()).method("setParameters").with(new ParamMapConstraint(paramMap)); mockPortletUrl.expects(once()).method("setPortletMode").with(eq(PortletMode.VIEW)); mockPortletUrl.expects(once()).method("setWindowState").with(eq(WindowState.NORMAL)); tag.doStartTag(); tag.doEndTag(); } private static class ParamMapConstraint implements Constraint { private Map myExpectedMap = null; private Map myActualMap = null; public ParamMapConstraint(Map expectedMap) { if(expectedMap == null) { throw new IllegalArgumentException("Use an isNull constraint instead!"); } myExpectedMap = expectedMap; } /* (non-Javadoc) * @see org.jmock.core.Constraint#eval(java.lang.Object) */ public boolean eval(Object val) { myActualMap = (Map)val; boolean result = false; if(val != null) { if(myExpectedMap.size() == myActualMap.size()) { Iterator keys = myExpectedMap.keySet().iterator(); boolean allSame = true; while(keys.hasNext()) { Object key = keys.next(); if(!myActualMap.containsKey(key)) { allSame = false; break; } else { String[] expected = (String[])myExpectedMap.get(key); String[] actual = (String[])myActualMap.get(key); if(!Arrays.equals(expected, actual)) { allSame = false; break; } } } result = allSame; } } return result; } /* (non-Javadoc) * @see org.jmock.core.SelfDescribing#describeTo(java.lang.StringBuffer) */ public StringBuffer describeTo(StringBuffer sb) { sb.append("\n Expected: "); describeTo(myExpectedMap, sb); sb.append("\n Actual: "); describeTo(myActualMap, sb); return sb; } private StringBuffer describeTo(Map map,StringBuffer sb) { Iterator<String> it = map.keySet().iterator(); while(it.hasNext()) { String key = it.next(); sb.append(key).append("="); String[] value = (String[])map.get(key); sb.append(value[0]); if(it.hasNext()) { sb.append(", "); } } return sb; } } }
package it.unibz.krdb.sql.api; /* * #%L * ontop-obdalib-core * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.parser.AggregationVisitor; import it.unibz.krdb.obda.parser.AliasMapVisitor; import it.unibz.krdb.obda.parser.ColumnsVisitor; import it.unibz.krdb.obda.parser.JoinConditionVisitor; import it.unibz.krdb.obda.parser.ProjectionVisitor; import it.unibz.krdb.obda.parser.WhereClauseVisitor; import it.unibz.krdb.obda.parser.SubSelectVisitor; import it.unibz.krdb.obda.parser.TableNameVisitor; import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.parser.CCJSqlParserUtil; import net.sf.jsqlparser.statement.Statement; import net.sf.jsqlparser.statement.select.Select; /** * A structure to store the parsed SQL query string. It returns the information * about the query using the visitor classes */ public class ParsedSQLQuery implements Serializable { private static final long serialVersionUID = -4590590361733833782L; private String query; private Statement stm; boolean deepParsing = false; // used to remove all quotes from the query private Select selectQuery; // the parsed query public static Pattern pQuotes = Pattern.compile("[\"`\\['][^\\.]*[\"`\\]']"); private List<RelationJSQL> tables; private List<SelectJSQL> subSelects; private Map<String, String> aliasMap; private List<Expression> joins; private SelectionJSQL whereClause; private ProjectionJSQL projection; private AggregationJSQL groupByClause; /** * Parse a query given as a String * * @param queryString * the SQL query to parse * @param deepParsing * if true removes quotes from columns and generate exceptions * for unsupported query in the mapping otherwise, keeps the * quotes from the columns, and support all the SQLs which can be * parsed by JSQLParser * @throws JSQLParserException */ public ParsedSQLQuery(String queryString, boolean deepParsing) throws JSQLParserException { /** * pattern used to remove quotes from the beginning and the end of * columns */ query = queryString; this.deepParsing = deepParsing; stm = CCJSqlParserUtil.parse(query); if (stm instanceof Select) { selectQuery = (Select) stm; // getting the values we also eliminate or handle the quotes if // deepParsing is set to true if (deepParsing) { tables = getTables(); whereClause = getWhereClause(); projection = getProjection(); joins = getJoinConditions(); aliasMap = getAliasMap(); groupByClause = getGroupByClause(); } } // catch exception about wrong inserted columns else throw new JSQLParserException( "The inserted query is not a SELECT statement"); } /** * The query is not parsed again * * @param statement * we pass already a parsed statement * @param deepParsing * if true removes quotes from columns and generate exceptions * for unsupported query in the mapping * @throws JSQLParserException */ public ParsedSQLQuery(Statement statement, boolean deepParsing) throws JSQLParserException { // pQuotes = Pattern.compile("[\"`\\[].*[\"`\\]]"); query = statement.toString(); stm = statement; this.deepParsing = deepParsing; if (stm instanceof Select) { selectQuery = (Select) stm; /** * Getting the values we also eliminate or handle the quotes if * deepParsing is set to true and we throw errors for unsupported values */ if (deepParsing) { tables = getTables(); whereClause = getWhereClause(); projection = getProjection(); joins = getJoinConditions(); aliasMap = getAliasMap(); groupByClause = getGroupByClause(); } } // catch exception about wrong inserted columns else throw new JSQLParserException( "The inserted query is not a SELECT statement"); } /** * Unquote the query and throw errors for unsupported values * * @throws JSQLParserException */ public void deepParsing() throws JSQLParserException { this.deepParsing = true; tables = getTables(); whereClause = getWhereClause(); projection = getProjection(); joins = getJoinConditions(); aliasMap = getAliasMap(); groupByClause = getGroupByClause(); } /** * Unquote the query * * @throws JSQLParserException */ public void unquote() throws JSQLParserException { tables = getTables(); whereClause = getWhereClause(); projection = getProjection(); joins = getJoinConditions(); aliasMap = getAliasMap(); groupByClause = getGroupByClause(); } @Override public String toString() { return selectQuery.toString(); } /** * Returns all the tables in this query. */ public List<RelationJSQL> getTables() throws JSQLParserException { if (tables == null) { TableNameVisitor visitor = new TableNameVisitor(); tables = visitor.getTables(selectQuery, deepParsing); } return tables; } /** * Returns all the subSelect in this query . */ public List<SelectJSQL> getSubSelects() { if (subSelects == null) { SubSelectVisitor visitor = new SubSelectVisitor(); subSelects = visitor.getSubSelects(selectQuery, deepParsing); } return subSelects; } /** * Get the string construction of alias name. */ public Map<String, String> getAliasMap() { if (aliasMap == null) { AliasMapVisitor visitor = new AliasMapVisitor(); aliasMap = visitor.getAliasMap(selectQuery, deepParsing); } return aliasMap; } /** * Get the string construction of the join condition. The string has the * format of "VAR1=VAR2". */ public List<Expression> getJoinConditions() throws JSQLParserException { if (joins == null) { JoinConditionVisitor visitor = new JoinConditionVisitor(); joins = visitor.getJoinConditions(selectQuery, deepParsing); } return joins; } /** * Get the object construction for the WHERE clause. * * @throws JSQLParserException */ public SelectionJSQL getWhereClause() throws JSQLParserException { if (whereClause == null) { WhereClauseVisitor visitor = new WhereClauseVisitor(); whereClause = visitor.getWhereClause(selectQuery, deepParsing); } return whereClause; } /** * Get the object construction for the SELECT clause. * * @throws JSQLParserException */ public ProjectionJSQL getProjection() throws JSQLParserException { if (projection == null) { ProjectionVisitor visitor = new ProjectionVisitor(); projection = visitor.getProjection(selectQuery, deepParsing); } return projection; } /** * Get the list of columns do not remove quotes * * @return */ public List<String> getColumns() { ColumnsVisitor visitor = new ColumnsVisitor(); return visitor.getColumns(selectQuery); } /** * Set the object construction for the SELECT clause, modifying the current * statement * * @param projection */ public void setProjection(ProjectionJSQL projection) { ProjectionVisitor visitor = new ProjectionVisitor(); visitor.setProjection(selectQuery, projection); this.projection = projection; } /** * Set the object construction for the WHERE clause, modifying the current * statement * * @param whereClause */ public void setWhereClause(SelectionJSQL whereClause) { WhereClauseVisitor sel = new WhereClauseVisitor(); sel.setWhereClause(selectQuery, whereClause); this.whereClause = whereClause; } /** * Constructs the GROUP BY statement based on the Aggregation object. */ public AggregationJSQL getGroupByClause() { if (groupByClause == null) { AggregationVisitor agg = new AggregationVisitor(); groupByClause = agg.getAggregation(selectQuery, deepParsing); } return groupByClause; } public Statement getStatement() { return selectQuery; } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.airavata.common.utils; import org.apache.airavata.common.exception.ApplicationSettingsException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.HashMap; import java.util.Map; public class ServerSettings extends ApplicationSettings { private static final Logger log = LoggerFactory.getLogger(ServerSettings.class); private static final String DEFAULT_USER = "default.registry.user"; private static final String DEFAULT_USER_PASSWORD = "default.registry.password"; private static final String DEFAULT_USER_GATEWAY = "default.registry.gateway"; private static final String ENABLE_SHARING = "enable.sharing"; public static final String IP = "ip"; private static final String API_SERVER_TLS_ENABLED = "apiserver.tls.enabled"; private static final String API_SERVER_KEYSTORE = "apiserver.keystore"; private static final String API_SERVER_KEYSTORE_PASSWD = "apiserver.keystore.password"; // Orchestrator Constants public static final String ORCHESTRATOR_SERVER_HOST = "orchestrator.server.host"; public static final String ORCHESTRATOR_SERVER_PORT = "orchestrator.server.port"; public static final String ORCHESTRATOR_SERVER_NAME = "orchestrator.server.name"; // Gfac constants public static final String GFAC_SERVER_HOST = "gfac.server.host"; public static final String GFAC_SERVER_PORT = "gfac.server.port"; public static final String GFAC_SERVER_NAME = "gfac.server.name"; public static final String GFAC_THREAD_POOL_SIZE = "gfac.thread.pool.size"; public static final int DEFAULT_GFAC_THREAD_POOL_SIZE = 50; public static final String GFAC_CONFIG_XML = "gfac-config.xml"; // Credential Store constants public static final String CREDENTIAL_SERVER_HOST = "credential.store.server.host"; public static final String CREDENTIAL_SERVER_PORT = "credential.store.server.port"; // Zookeeper + curator constants public static final String EMBEDDED_ZK = "embedded.zk"; public static final String ZOOKEEPER_SERVER_CONNECTION = "zookeeper.server.connection"; public static final String ZOOKEEPER_TIMEOUT = "zookeeper.timeout"; // Aurora Scheduler Constants public static final String AURORA_SCHEDULER_HOSTS = "aurora.scheduler.hosts"; public static final String AURORA_EXECUTOR_NAME = "aurora.executor.name"; public static final String MESOS_CLUSTER_NAME = "mesos.cluster.name"; public static final String AURORA_SCHEDULER_CONNECT_TIMEOUT_MS = "aurora.scheduler.timeoutms"; public static final String AURORA_EXECUTOR_CONFIG_TEMPLATE_FILE = "aurora.executor.config.template.filename"; private static final String CREDENTIAL_STORE_DB_URL = "credential.store.jdbc.url"; private static final String CREDENTIAL_STORE_DB_USER = "credential.store.jdbc.user"; private static final String CREDENTIAL_STORE_DB_PASSWORD = "credential.store.jdbc.password"; private static final String CREDENTIAL_STORE_DB_DRIVER = "credential.store.jdbc.driver"; private static final java.lang.String SHARING_REGISTRY_PORT = "sharing.registry.server.port"; private static final java.lang.String SHARING_REGISTRY_HOST = "sharing.registry.server.host"; private static String USER_PROFILE_MONGODB_HOST = "userprofile.mongodb.host"; private static String USER_PROFILE_MONGODB_PORT = "userprofile.mongodb.port"; private static final String REGISTRY_DB_URL = "registry.jdbc.url"; private static final String REGISTRY_DB_USER = "registry.jdbc.user"; private static final String REGISTRY_DB_PASSWORD = "registry.jdbc.password"; private static final String REGISTRY_DB_DRIVER = "registry.jdbc.driver"; private static final String HOST_SCHEDULER = "host.scheduler"; private static final String MY_PROXY_SERVER = "myproxy.server"; private static final String MY_PROXY_USER = "myproxy.user"; private static final String MY_PROXY_PASSWORD = "myproxy.password"; private static final String MY_PROXY_LIFETIME = "myproxy.life"; public static final String JOB_NOTIFICATION_ENABLE = "job.notification.enable"; public static final String JOB_NOTIFICATION_EMAILIDS = "job.notification.emailids"; public static final String JOB_NOTIFICATION_FLAGS = "job.notification.flags"; public static final String RABBITMQ_BROKER_URL = "rabbitmq.broker.url"; public static final String RABBITMQ_STATUS_EXCHANGE_NAME = "rabbitmq.status.exchange.name"; public static final String RABBITMQ_PROCESS_EXCHANGE_NAME = "rabbitmq.process.exchange.name"; public static final String RABBITMQ_EXPERIMENT_EXCHANGE_NAME = "rabbitmq.experiment.exchange.name"; public static final String RABBITMQ_PROCESS_LAUNCH_QUEUE_NAME = "process.launch.queue.name"; public static final String RABBITMQ_EXPERIMENT_LAUNCH_QUEUE_NAME = "experiment.launch.queue.name"; public static final String RABBITMQ_DURABLE_QUEUE="durable.queue"; public static final String RABBITMQ_PREFETCH_COUNT="prefetch.count"; // Workflow Enactment Service component configuration. private static final String ENACTMENT_THREAD_POOL_SIZE = "enactment.thread.pool.size"; private static final int DEFAULT_ENACTMENT_THREAD_POOL_SIZE = 10; private static final String WORKFLOW_PARSER = "workflow.parser"; // email based monitoring configurations private static final String EMAIL_BASED_MONITORING_PERIOD = "email.based.monitoring.period"; private static final String EMAIL_BASED_MONITOR_HOST = "email.based.monitor.host"; private static final String EMAIL_BASED_MONITOR_ADDRESS = "email.based.monitor.address"; private static final String EMAIL_BASED_MONITOR_PASSWORD = "email.based.monitor.password"; private static final String EMAIL_BASED_MONITOR_FOLDER_NAME = "email.based.monitor.folder.name"; private static final String EMAIL_BASED_MONITOR_STORE_PROTOCOL = "email.based.monitor.store.protocol"; private static final String ENABLE_EMAIL_BASED_MONITORING = "enable.email.based.monitoring"; private static final String IS_RUNNING_ON_AWS = "isRunningOnAws"; private static final String ENABLE_KAFKA_LOGGING = "enable.kafka.logging"; private static final String KAFKA_BROKER_LIST = "kafka.broker.list"; private static final String KAFKA_TOPIC_PREFIX = "kafka.topic.prefix"; private static final String SERVER_ROLES = "server.roles"; //User Profile onstants public static final String USER_PROFILE_SERVER_HOST = "user.profile.server.host"; public static final String USER_PROFILE_SERVER_PORT = "user.profile.server.port"; /* Caching */ private static final String SESSION_CACHE_ACCESS_TIME_OUT = "ssh.session.cache.access.timeout"; // todo until AIRAVATA-2066 is finished, keep server side list configurations here. private static Map<String, String[]> listConfigurations = new HashMap<>(); private static boolean stopAllThreads = false; private static boolean emailBaseNotificationEnable; private static String outputLocation; public static String getDefaultUser() throws ApplicationSettingsException { return getSetting(DEFAULT_USER); } public static String getRabbitmqProcessLaunchQueueName() { return getSetting(RABBITMQ_PROCESS_LAUNCH_QUEUE_NAME, "process.launch.queue"); } public static String getRabbitmqExperimentLaunchQueueName() { return getSetting(RABBITMQ_EXPERIMENT_EXCHANGE_NAME, "experiment.launch.queue"); } public static String getRabbitmqBrokerUrl() { return getSetting(RABBITMQ_BROKER_URL, "amqp://localhost:5672"); } public static String getRabbitmqStatusExchangeName(){ return getSetting(RABBITMQ_STATUS_EXCHANGE_NAME, "status_exchange"); } public static String getRabbitmqProcessExchangeName(){ return getSetting(RABBITMQ_PROCESS_EXCHANGE_NAME, "process_exchange"); } public static String getRabbitmqExperimentExchangeName() { return getSetting(RABBITMQ_EXPERIMENT_EXCHANGE_NAME, "experiment_exchange"); } public static boolean getRabbitmqDurableQueue(){ return Boolean.valueOf(getSetting(RABBITMQ_DURABLE_QUEUE, "false")); } public static int getRabbitmqPrefetchCount(){ return Integer.valueOf(getSetting(RABBITMQ_PREFETCH_COUNT, "200")); } public static String getDefaultUserPassword() throws ApplicationSettingsException { return getSetting(DEFAULT_USER_PASSWORD); } public static String getDefaultUserGateway() throws ApplicationSettingsException { return getSetting(DEFAULT_USER_GATEWAY); } public static String getCredentialStoreDBUser() throws ApplicationSettingsException { try { return getSetting(CREDENTIAL_STORE_DB_USER); } catch (ApplicationSettingsException e) { return getSetting(REGISTRY_DB_USER); } } public static String getCredentialStoreDBPassword() throws ApplicationSettingsException { try { return getSetting(CREDENTIAL_STORE_DB_PASSWORD); } catch (ApplicationSettingsException e) { return getSetting(REGISTRY_DB_PASSWORD); } } public static String getCredentialStoreDBDriver() throws ApplicationSettingsException { try { return getSetting(CREDENTIAL_STORE_DB_DRIVER); } catch (ApplicationSettingsException e) { return getSetting(REGISTRY_DB_DRIVER); } } public static String getCredentialStoreDBURL() throws ApplicationSettingsException { try { return getSetting(CREDENTIAL_STORE_DB_URL); } catch (ApplicationSettingsException e) { return getSetting(REGISTRY_DB_URL); } } public static boolean isAPIServerTLSEnabled() { try { return Boolean.parseBoolean(getSetting(API_SERVER_TLS_ENABLED)); } catch (ApplicationSettingsException e) { return false; } } public static String getApiServerKeystorePasswd() throws ApplicationSettingsException{ return getSetting(API_SERVER_KEYSTORE_PASSWD); } public static String getApiServerKeystore() throws ApplicationSettingsException{ return getSetting(API_SERVER_KEYSTORE); } public static String getHostScheduler() throws ApplicationSettingsException { return getSetting(HOST_SCHEDULER); } public static boolean isStopAllThreads() { return stopAllThreads; } public static void setStopAllThreads(boolean stopAllThreads) { ServerSettings.stopAllThreads = stopAllThreads; } public static String getMyProxyServer() throws ApplicationSettingsException { return getSetting(MY_PROXY_SERVER); } public static String getMyProxyUser() throws ApplicationSettingsException { return getSetting(MY_PROXY_USER); } public static String getMyProxyPassword() throws ApplicationSettingsException { return getSetting(MY_PROXY_PASSWORD); } public static int getMyProxyLifetime() throws ApplicationSettingsException { return Integer.parseInt(getSetting(MY_PROXY_LIFETIME)); } public static boolean isEmbeddedZK() { return Boolean.parseBoolean(getSetting(EMBEDDED_ZK, "true")); } public static String getIp() { try { return getSetting(IP); } catch (ApplicationSettingsException e) { try { return InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException e1) { e1.printStackTrace(); } } return null; } public static int getEnactmentThreadPoolSize() { String threadPoolSize = null; try { threadPoolSize = getSetting(ENACTMENT_THREAD_POOL_SIZE); } catch (ApplicationSettingsException e) { return DEFAULT_ENACTMENT_THREAD_POOL_SIZE; } return Integer.valueOf(threadPoolSize); } public static String getWorkflowParser() throws ApplicationSettingsException { return getSetting(WORKFLOW_PARSER); } public static int getEmailMonitorPeriod() throws ApplicationSettingsException { return Integer.valueOf(getSetting(EMAIL_BASED_MONITORING_PERIOD, "100000")); } public static String getEmailBasedMonitorHost() throws ApplicationSettingsException { return getSetting(EMAIL_BASED_MONITOR_HOST); } public static String getEmailBasedMonitorAddress() throws ApplicationSettingsException { return getSetting(EMAIL_BASED_MONITOR_ADDRESS); } public static String getEmailBasedMonitorPassword() throws ApplicationSettingsException { return getSetting(EMAIL_BASED_MONITOR_PASSWORD); } public static String getEmailBasedMonitorFolderName() throws ApplicationSettingsException { return getSetting(EMAIL_BASED_MONITOR_FOLDER_NAME); } public static String getEmailBasedMonitorStoreProtocol() throws ApplicationSettingsException { return getSetting(EMAIL_BASED_MONITOR_STORE_PROTOCOL); } public static boolean isEmailBasedNotificationEnable() { return Boolean.valueOf(getSetting(ENABLE_EMAIL_BASED_MONITORING, "false")); } public static boolean isAPISecured() throws ApplicationSettingsException { return Boolean.valueOf(getSetting(Constants.IS_API_SECURED)); } public static String getRemoteAuthzServerUrl() throws ApplicationSettingsException { return getSetting(Constants.REMOTE_OAUTH_SERVER_URL); } public static String getAuthorizationPoliyName() throws ApplicationSettingsException { return getSetting(Constants.AUTHORIZATION_POLICY_NAME); } public static String getZookeeperConnection() throws ApplicationSettingsException { return getSetting(ZOOKEEPER_SERVER_CONNECTION, "localhost:2181"); } public static int getZookeeperTimeout() { return Integer.valueOf(getSetting(ZOOKEEPER_TIMEOUT, "3000")); } public static String getGFacServerName() throws ApplicationSettingsException { return getSetting(GFAC_SERVER_NAME); } public static String getGfacServerHost() throws ApplicationSettingsException { return getSetting(GFAC_SERVER_HOST); } public static String getGFacServerPort() throws ApplicationSettingsException { return getSetting(GFAC_SERVER_PORT); } public static int getGFacThreadPoolSize() { try { String threadPoolSize = getSetting(GFAC_THREAD_POOL_SIZE); if (threadPoolSize != null && !threadPoolSize.isEmpty()) { return Integer.valueOf(threadPoolSize); } else { log.warn("Thread pool size is not configured, use default gfac thread pool size " + DEFAULT_GFAC_THREAD_POOL_SIZE); } } catch (ApplicationSettingsException e) { log.warn("Couldn't read thread pool size from configuration on exception, use default gfac thread pool " + "size " + DEFAULT_GFAC_THREAD_POOL_SIZE); } return DEFAULT_GFAC_THREAD_POOL_SIZE; } public static String getOrchestratorServerName() throws ApplicationSettingsException { return getSetting(ORCHESTRATOR_SERVER_NAME); } public static String getOrchestratorServerHost() throws ApplicationSettingsException { return getSetting(ORCHESTRATOR_SERVER_HOST); } public static int getOrchestratorServerPort() throws ApplicationSettingsException { return Integer.valueOf(getSetting(ORCHESTRATOR_SERVER_PORT)); } public static boolean isTLSEnabled() throws ApplicationSettingsException { return Boolean.valueOf(getSetting(Constants.IS_TLS_ENABLED)); } public static int getTLSServerPort() throws ApplicationSettingsException { return Integer.valueOf(getSetting(Constants.TLS_SERVER_PORT)); } public static String getKeyStorePath() throws ApplicationSettingsException { return getSetting(Constants.KEYSTORE_PATH); } public static String getKeyStorePassword() throws ApplicationSettingsException { return getSetting(Constants.KEYSTORE_PASSWORD); } public static int getTLSClientTimeout() throws ApplicationSettingsException { return Integer.valueOf(getSetting(Constants.TLS_CLIENT_TIMEOUT)); } public static String getSecurityManagerClassName() throws ApplicationSettingsException { return getSetting(Constants.SECURITY_MANAGER_CLASS); } public static String getAuthzCacheManagerClassName() throws ApplicationSettingsException { return getSetting(Constants.AUTHZ_CACHE_MANAGER_CLASS); } public static boolean isAuthzCacheEnabled() throws ApplicationSettingsException { return Boolean.valueOf(getSetting(Constants.AUTHZ_CACHE_ENABLED)); } public static int getCacheSize() throws ApplicationSettingsException { return Integer.valueOf(getSetting(Constants.IN_MEMORY_CACHE_SIZE)); } public static String getUserProfileMongodbHost() throws ApplicationSettingsException{ return getSetting(USER_PROFILE_MONGODB_HOST); } public static int getUserProfileMongodbPort() throws ApplicationSettingsException{ return Integer.parseInt(getSetting(USER_PROFILE_MONGODB_PORT)); } public static String getLocalDataLocation() { return System.getProperty("java.io.tmpdir"); } public static Boolean isEnableSharing() throws ApplicationSettingsException { return Boolean.parseBoolean(getSetting(ENABLE_SHARING)); } public static boolean isRunningOnAws() { return Boolean.valueOf(getSetting(IS_RUNNING_ON_AWS, "false")); } public static String getKafkaBrokerList() { return getSetting(KAFKA_BROKER_LIST, null); } public static String getKafkaTopicPrefix() { return getSetting(KAFKA_TOPIC_PREFIX, "all"); } public static boolean isEnabledKafkaLogging() { return Boolean.valueOf(getSetting(ENABLE_KAFKA_LOGGING, "false")); } public static void setServerRoles(String[] roles) { listConfigurations.put(SERVER_ROLES, roles); } public static String[] getServerRoles() { return listConfigurations.get(SERVER_ROLES); } public static String getAuroraSchedulerHosts() throws ApplicationSettingsException { return getSetting(AURORA_SCHEDULER_HOSTS); } public static String getMesosClusterName() throws ApplicationSettingsException { return getSetting(MESOS_CLUSTER_NAME); } public static String getAuroraExecutorName() throws ApplicationSettingsException { return getSetting(AURORA_EXECUTOR_NAME); } public static String getAuroraExecutorConfigTemplateFileName() throws ApplicationSettingsException { return getSetting(AURORA_EXECUTOR_CONFIG_TEMPLATE_FILE); } public static int getAuroraSchedulerTimeout() throws ApplicationSettingsException { return Integer.valueOf(getSetting(AURORA_SCHEDULER_CONNECT_TIMEOUT_MS)); } public static int getSessionCacheAccessTimeout() { return Integer.valueOf(getSetting(SESSION_CACHE_ACCESS_TIME_OUT, "30")); } public static String getSharingRegistryPort() { return getSetting(SHARING_REGISTRY_PORT, "7878"); } public static String getSharingRegistryHost() { return getSetting(SHARING_REGISTRY_HOST, "localhost"); } }
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.auraframework.integration.test.root.intf; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.auraframework.def.AttributeDef; import org.auraframework.def.DefDescriptor; import org.auraframework.def.EventDef; import org.auraframework.def.InterfaceDef; import org.auraframework.def.ProviderDef; import org.auraframework.def.RegisterEventDef; import org.auraframework.impl.AuraImplTestCase; import org.auraframework.impl.root.event.RegisterEventDefImpl; import org.auraframework.impl.root.intf.InterfaceDefImpl; import org.auraframework.system.AuraContext; import org.auraframework.system.Location; import org.auraframework.throwable.quickfix.DefinitionNotFoundException; import org.auraframework.throwable.quickfix.InvalidDefinitionException; import org.junit.Ignore; import org.junit.Test; public class InterfaceDefTest extends AuraImplTestCase { @Test public void testHashCode() { InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls( vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, vendor.makeLocation("filename1", 5, 5, 0), null, null); assertEquals( "Hashcode should be the same for both aura:testinterfacechild defs", vendor.makeInterfaceDefWithNulls(vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, vendor.makeLocation("filename1", 5, 5, 0), null, null).hashCode(), intDef2.hashCode()); } @Test public void testAppendDependencies() throws Exception { Set<DefDescriptor<InterfaceDef>> extensions = new HashSet<>(); extensions.add(vendor.makeInterfaceDefDescriptor("aura:testinterfaceparent")); DefDescriptor<InterfaceDef> ifcDescriptor = vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"); Map<String, RegisterEventDef> eventDefs = new HashMap<>(); DefDescriptor<EventDef> eventDescriptor = definitionService.getDefDescriptor("aura:testevent", EventDef.class); RegisterEventDef red = vendor.makeRegisterEventDefWithNulls(ifcDescriptor, eventDescriptor, true, null); eventDefs.put("buckfutter", red); InterfaceDefImpl def = vendor.makeInterfaceDefWithNulls(ifcDescriptor, null, eventDefs, null, extensions, "java://org.auraframework.impl.java.provider.TestComponentDescriptorProvider"); Set<DefDescriptor<?>> expected = new HashSet<>(); expected.add(definitionService.getDefDescriptor( "java://org.auraframework.impl.java.provider.TestComponentDescriptorProvider", ProviderDef.class)); expected.add(vendor.makeInterfaceDefDescriptor("aura:testinterfaceparent")); expected.add(eventDescriptor); Set<DefDescriptor<?>> dependencies = def.getDependencySet(); assertEquals("dependencies are incorrect", expected, dependencies); } @Test public void testValidateDefinitionNullDescriptor() throws Exception { InterfaceDefImpl def = vendor.makeInterfaceDefWithNulls(null, null, null, null, null, null); Exception thrown = null; try { def.validateDefinition(); } catch (InvalidDefinitionException expected) { thrown = expected; } assertNotNull("Should have thrown InvalidDefinitionException for AuraDescriptor<InterfaceDef> being null", thrown); } @Test @Ignore("FIXME: goliver - need to figure out how to test") public void testValidateReferences() throws Exception { //FakeRegistry fake = createFakeRegistry(); //InterfaceDef ed = vendor.makeInterfaceDef(); //InterfaceDef extendsID = vendor.makeInterfaceDef(vendor.getParentInterfaceDefDescriptor()); //fake.putDefinition(extendsID); //fake.putDefinition(vendor.makeEventDef()); //ed.validateReferences(); } @Test public void testValidateValidDefinition() throws Exception { DefDescriptor<EventDef> eventDescriptor = definitionService.getDefDescriptor("aura:testevent", EventDef.class); DefDescriptor<InterfaceDef> ifcDescriptor = vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"); RegisterEventDefImpl red = vendor.makeRegisterEventDefWithNulls(ifcDescriptor, eventDescriptor, true, null); Map<String, RegisterEventDef> eventDefs = new HashMap<>(); eventDefs.put("eventHandler", red); Map<DefDescriptor<AttributeDef>, AttributeDef> attDefs = new HashMap<>(); vendor.insertAttributeDef(attDefs, eventDescriptor, "testattribute", "String", false, AttributeDef.SerializeToType.BOTH, null, AuraContext.Access.PRIVATE); InterfaceDefImpl def = vendor.makeInterfaceDefWithNulls(ifcDescriptor, attDefs, eventDefs, null, null, null); def.validateDefinition(); } @Test public void testGetRegisterEventDefs() throws Exception { Set<DefDescriptor<InterfaceDef>> extendsIntf = new HashSet<>(); extendsIntf.add(vendor.makeInterfaceDefDescriptor("test:testinterfaceparent")); InterfaceDefImpl id = vendor.makeInterfaceDef(extendsIntf); Map<String, RegisterEventDef> registeredED = id.getRegisterEventDefs(); assertEquals(2, registeredED.size()); assertNotNull(registeredED.get("parentEvent")); } @Test public void testGetAttributeDefs() throws Exception { Set<DefDescriptor<InterfaceDef>> extendsIntf = new HashSet<>(); extendsIntf.add(vendor.makeInterfaceDefDescriptor("test:testinterfaceparent")); InterfaceDef id = vendor.makeInterfaceDef(extendsIntf); Map<DefDescriptor<AttributeDef>, AttributeDef> attributes = id.getAttributeDefs(); assertEquals(2, attributes.size()); assertTrue("Attribute from parent should be in the map", attributes.containsKey(definitionService.getDefDescriptor("mystring", AttributeDef.class))); assertTrue("Attribute from child should be in the map", attributes.containsKey(definitionService.getDefDescriptor(vendor.getAttributeName(), AttributeDef.class))); } @Test public void testGetEventDefsWithoutExtensions() throws Exception { DefDescriptor<EventDef> eventTestDescriptor = definitionService.getDefDescriptor("aura:testevent", EventDef.class); DefDescriptor<InterfaceDef> ifcDescriptor = vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"); RegisterEventDef regEventDef = vendor.makeRegisterEventDefWithNulls(ifcDescriptor, eventTestDescriptor, true, null); Map<String, RegisterEventDef> eventDefs = new HashMap<>(); eventDefs.put("cans", regEventDef); InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls(ifcDescriptor, null, eventDefs, null, null, null); assertEquals(eventDefs, intDef2.getRegisterEventDefs()); } @Test public void testGetAttributeDefsWithoutExtensions() throws Exception { Map<DefDescriptor<AttributeDef>, AttributeDef> attributes = new HashMap<>(); DefDescriptor<InterfaceDef> descriptor = vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"); vendor.insertAttributeDef(attributes, descriptor, "fakeAttribute", "String", false, AttributeDef.SerializeToType.BOTH, null, AuraContext.Access.PUBLIC); InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls( descriptor, attributes, null, null, null, null); Map<DefDescriptor<AttributeDef>, AttributeDef> returnedAttributes = intDef2.getAttributeDefs(); assertEquals(1, returnedAttributes.size()); assertEquals(attributes, returnedAttributes); } @Test public void testGetExtendsDescriptor() { Set<DefDescriptor<InterfaceDef>> extensions = new HashSet<>(); extensions.add(vendor.makeInterfaceDefDescriptor("aura:testinterfacechild")); InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls( vendor.makeInterfaceDefDescriptor("aura:testinterfaceparent"), null, null, null, extensions, null); assertEquals(extensions, intDef2.getExtendsDescriptors()); } @Test public void testEqualsObject() { Set<DefDescriptor<InterfaceDef>> extensions = new HashSet<>(); extensions.add(vendor.makeInterfaceDefDescriptor("aura:testinterfaceparent")); InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls( vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, vendor.makeLocation("filename1", 5, 5, 0), extensions, null); InterfaceDefImpl intDef3 = vendor.makeInterfaceDefWithNulls( vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, vendor.makeLocation("filename1", 5, 5, 0), extensions, null); assertTrue("Two interfaceDefs with the same attributes failed equality", intDef2.equals(intDef3)); } @SuppressWarnings("unlikely-arg-type") @Test public void testEqualsWithDifferentTypes() { DefDescriptor<EventDef> eventTestDescriptor = definitionService.getDefDescriptor("aura:testevent", EventDef.class); DefDescriptor<InterfaceDef> ifcDescriptor = vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"); RegisterEventDefImpl regEventDef = vendor.makeRegisterEventDefWithNulls(ifcDescriptor, eventTestDescriptor, true, null); assertFalse("Two different Defs shouldn't have been equal", vendor.makeInterfaceDef(ifcDescriptor, null, null, vendor.makeLocation("filename1", 5, 5, 0), null, AuraContext.Access.INTERNAL).equals(regEventDef)); } @Test public void testEqualsWithDifferentExtensions() { Set<DefDescriptor<InterfaceDef>> extensions = new HashSet<>(); extensions.add(vendor.makeInterfaceDefDescriptor("aura:testinterfaceparent")); InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls( vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, vendor.makeLocation("filename1", 5, 5, 0), extensions, null); assertFalse( "InterfacesDefs with different extensions shouldn't have been equal", vendor.makeInterfaceDef(vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, vendor.makeLocation("filename1", 5, 5, 0), null, AuraContext.Access.INTERNAL).equals(intDef2)); } @Test public void testEqualsWithDifferentLocations() { InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls(vendor .makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, new Location("filename1", 4, 4, 1000), null, null); assertFalse( "InterfacesDefs with different locations shouldn't have been equal", vendor.makeInterfaceDef(vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"), null, null, vendor.makeLocation("filename1", 5, 5, 0), null, AuraContext.Access.INTERNAL).equals(intDef2)); } @Test public void testSerialize() throws Exception { Set<DefDescriptor<InterfaceDef>> extensions = new HashSet<>(); extensions.add(vendor.makeInterfaceDefDescriptor("aura:testinterfaceparent")); Map<DefDescriptor<AttributeDef>, AttributeDef> attributes = new HashMap<>(); DefDescriptor<EventDef> eventTestDescriptor = definitionService.getDefDescriptor("aura:testevent", EventDef.class); DefDescriptor<InterfaceDef> ifcDescriptor = vendor.makeInterfaceDefDescriptor("aura:testinterfacechild"); vendor.insertAttributeDef(attributes, eventTestDescriptor, "fakeAttribute", "String", false, AttributeDef.SerializeToType.BOTH, null, AuraContext.Access.PRIVATE); RegisterEventDef regEventDef = vendor.makeRegisterEventDefWithNulls(ifcDescriptor, eventTestDescriptor, true, null); Map<String, RegisterEventDef> eventDefs = new HashMap<>(); eventDefs.put("event", regEventDef); InterfaceDefImpl intDef2 = vendor.makeInterfaceDefWithNulls(ifcDescriptor, attributes, eventDefs, vendor.makeLocation("filename1", 5, 5, 0), extensions, null); serializeAndGoldFile(intDef2); } @Test public void testExtendsItself() throws Exception { DefDescriptor<InterfaceDef> extendsSelf = addSourceAutoCleanup(InterfaceDef.class, ""); getAuraTestingUtil().updateSource(extendsSelf, String.format("<aura:interface extends='%s'> </aura:interface>", extendsSelf.getDescriptorName())); try { definitionService.getDefinition(extendsSelf); fail("An interface should not be able to extend itself."); } catch (InvalidDefinitionException expected) { assertEquals(extendsSelf.getQualifiedName() + " cannot extend itself", expected.getMessage()); } } @Test public void testExtendsNonExistent() { DefDescriptor<InterfaceDef> cmpDesc = addSourceAutoCleanup(InterfaceDef.class, "<aura:interface extends='aura:iDontExist'></aura:interface>"); try { definitionService.getDefinition(cmpDesc); fail("Did not get expected exception: " + DefinitionNotFoundException.class.getName()); } catch (Exception e) { checkExceptionFull(e, DefinitionNotFoundException.class, "No INTERFACE named markup://aura:iDontExist found : [" + cmpDesc.getQualifiedName()+"]", cmpDesc.getQualifiedName()); } } @Test public void testImplementsAnInterface() throws Exception { DefDescriptor<InterfaceDef> d = addSourceAutoCleanup(InterfaceDef.class, "<aura:interface implements='test:fakeInterface'> </aura:interface>"); try { definitionService.getDefinition(d); fail("An interface cannot implement another interface, it can only extend it."); } catch (InvalidDefinitionException ignored) { } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import org.apache.lucene.util.Constants; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.io.Closeable; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.settings.Settings.settingsBuilder; /** * Simple helper class to start external nodes to be used within a test cluster */ final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "zen") .put("node.mode", "network").build(); // we need network mode for this private final Path path; private final Random random; private final NodeConfigurationSource nodeConfigurationSource; private Process process; private NodeInfo nodeInfo; private final String clusterName; private TransportClient client; private final ESLogger logger = Loggers.getLogger(getClass()); private Settings externalNodeSettings; ExternalNode(Path path, long seed, NodeConfigurationSource nodeConfigurationSource) { this(path, null, seed, nodeConfigurationSource); } ExternalNode(Path path, String clusterName, long seed, NodeConfigurationSource nodeConfigurationSource) { if (!Files.isDirectory(path)) { throw new IllegalArgumentException("path must be a directory"); } this.path = path; this.clusterName = clusterName; this.random = new Random(seed); this.nodeConfigurationSource = nodeConfigurationSource; } synchronized ExternalNode start(Client localNode, Settings defaultSettings, String nodeName, String clusterName, int nodeOrdinal) throws IOException, InterruptedException { ExternalNode externalNode = new ExternalNode(path, clusterName, random.nextLong(), nodeConfigurationSource); Settings settings = Settings.builder().put(defaultSettings).put(nodeConfigurationSource.nodeSettings(nodeOrdinal)).build(); externalNode.startInternal(localNode, settings, nodeName, clusterName); return externalNode; } @SuppressForbidden(reason = "needs java.io.File api to start a process") synchronized void startInternal(Client client, Settings settings, String nodeName, String clusterName) throws IOException, InterruptedException { if (process != null) { throw new IllegalStateException("Already started"); } List<String> params = new ArrayList<>(); if (!Constants.WINDOWS) { params.add("bin/elasticsearch"); } else { params.add("bin/elasticsearch.bat"); } params.add("-Des.cluster.name=" + clusterName); params.add("-Des.node.name=" + nodeName); Settings.Builder externaNodeSettingsBuilder = Settings.builder(); for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) { switch (entry.getKey()) { case "cluster.name": case "node.name": case "path.home": case "node.mode": case "node.local": case NetworkModule.TRANSPORT_TYPE_KEY: case DiscoveryModule.DISCOVERY_TYPE_KEY: case NetworkModule.TRANSPORT_SERVICE_TYPE_KEY: case InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING: continue; default: externaNodeSettingsBuilder.put(entry.getKey(), entry.getValue()); } } this.externalNodeSettings = externaNodeSettingsBuilder.put(REQUIRED_SETTINGS).build(); for (Map.Entry<String, String> entry : externalNodeSettings.getAsMap().entrySet()) { params.add("-Des." + entry.getKey() + "=" + entry.getValue()); } params.add("-Des.path.home=" + PathUtils.get(".").toAbsolutePath()); params.add("-Des.path.conf=" + path + "/config"); ProcessBuilder builder = new ProcessBuilder(params); builder.directory(path.toFile()); builder.inheritIO(); boolean success = false; try { logger.info("starting external node [{}] with: {}", nodeName, builder.command()); process = builder.start(); this.nodeInfo = null; if (waitForNode(client, nodeName)) { nodeInfo = nodeInfo(client, nodeName); assert nodeInfo != null; logger.info("external node {} found, version [{}], build {}", nodeInfo.getNode(), nodeInfo.getVersion(), nodeInfo.getBuild()); } else { throw new IllegalStateException("Node [" + nodeName + "] didn't join the cluster"); } success = true; } finally { if (!success) { stop(); } } } static boolean waitForNode(final Client client, final String name) throws InterruptedException { return ESTestCase.awaitBusy(() -> { final NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().get(); final NodeInfo[] nodes = nodeInfos.getNodes(); for (NodeInfo info : nodes) { if (name.equals(info.getNode().getName())) { return true; } } return false; }, 30, TimeUnit.SECONDS); } static NodeInfo nodeInfo(final Client client, final String nodeName) { final NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().get(); final NodeInfo[] nodes = nodeInfos.getNodes(); for (NodeInfo info : nodes) { if (nodeName.equals(info.getNode().getName())) { return info; } } return null; } synchronized TransportAddress getTransportAddress() { if (nodeInfo == null) { throw new IllegalStateException("Node has not started yet"); } return nodeInfo.getTransport().getAddress().publishAddress(); } synchronized Client getClient() { if (nodeInfo == null) { throw new IllegalStateException("Node has not started yet"); } if (client == null) { TransportAddress addr = nodeInfo.getTransport().getAddress().publishAddress(); // verify that the end node setting will have network enabled. Settings clientSettings = settingsBuilder().put(externalNodeSettings) .put("client.transport.nodes_sampler_interval", "1s") .put("name", "transport_client_" + nodeInfo.getNode().name()) .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", false).build(); TransportClient client = TransportClient.builder().settings(clientSettings).build(); client.addTransportAddress(addr); this.client = client; } return client; } synchronized void reset(long seed) { this.random.setSeed(seed); } synchronized void stop() throws InterruptedException { if (running()) { try { if (this.client != null) { client.close(); } } finally { process.destroy(); process.waitFor(); process = null; nodeInfo = null; } } } synchronized boolean running() { return process != null; } @Override public void close() { try { stop(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } synchronized String getName() { if (nodeInfo == null) { throw new IllegalStateException("Node has not started yet"); } return nodeInfo.getNode().getName(); } }
package com.scienceminer.nerd.mention; import org.grobid.core.layout.LayoutToken; import org.grobid.core.lexicon.NERLexicon; import org.grobid.core.layout.BoundingBox; import org.grobid.core.data.Entity; import org.grobid.core.data.Entity.Origin; import org.grobid.core.data.Sense; import org.grobid.core.utilities.*; import org.grobid.core.utilities.GrobidConfig.ModelParameters; import com.scienceminer.nerd.exceptions.NerdException; import com.scienceminer.nerd.service.NerdQuery; import com.scienceminer.nerd.utilities.StringPos; import com.scienceminer.nerd.utilities.Utilities; import com.scienceminer.nerd.kb.LowerKnowledgeBase; import com.scienceminer.nerd.kb.UpperKnowledgeBase; import com.scienceminer.nerd.kb.model.Label; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.util.*; import java.util.regex.*; /** * Common representation of an unresolved mention in a textual document. * * @author Patrice Lopez * */ public class Mention implements Comparable<Mention> { protected String rawName = null; protected String normalisedName = null; private ProcessText.MentionMethod source = null; // relative offset positions in context, if defined protected OffsetPosition offsets = null; // optional bounding box in the source document protected List<BoundingBox> boundingBoxes = null; // optional layout tokens corresponding to the current mention private List<LayoutToken> layoutTokens = null; // if the mention is an acronym; if true, the normalisedName will give the found expended form private boolean isAcronym = false; private Entity entity = null; public Mention() { this.offsets = new OffsetPosition(); } public Mention(String raw) { this(); this.rawName = raw; } public Mention(String rawName, int start, int end) { this(rawName); this.setOffsetStart(start); this.setOffsetEnd(end); } public Mention(String rawText, ProcessText.MentionMethod source) { this(rawText); this.source = source; } public Mention(Entity ent) { rawName = ent.getRawName(); normalisedName = ent.getNormalisedName(); offsets = ent.getOffsets(); boundingBoxes = ent.getBoundingBoxes(); isAcronym = ent.getIsAcronym(); entity = ent; layoutTokens = ent.getLayoutTokens(); //startTokenPos = ent.startTokenPos; //endTokenPos = ent.startTokenPos; } public Mention(Mention ent) { rawName = ent.rawName; normalisedName = ent.normalisedName; offsets = ent.offsets; boundingBoxes = ent.boundingBoxes; isAcronym = ent.isAcronym; entity = ent.entity; source = ent.source; layoutTokens = ent.layoutTokens; //startTokenPos = ent.startTokenPos; //endTokenPos = ent.startTokenPos; } public String getRawName() { return rawName; } public void setRawName(String raw) { this.rawName = raw; } public String getNormalisedName() { return normalisedName; } public void setNormalisedName(String raw) { this.normalisedName = raw; } public ProcessText.MentionMethod getSource() { return this.source; } public void setSource(ProcessText.MentionMethod source) { this.source = source; } public OffsetPosition getOffsets() { return offsets; } public void setOffsets(OffsetPosition offsets) { this.offsets = offsets; } public void setOffsetStart(int start) { offsets.start = start; } public int getOffsetStart() { return offsets.start; } public void setOffsetEnd(int end) { offsets.end = end; } public int getOffsetEnd() { return offsets.end; } public double getProb() { if (entity != null) return entity.getProb(); else return 0.0; } public void setProb(double prob) { if (entity == null) entity = new Entity(); entity.setProb(prob); } public double getConf() { if (entity != null) return entity.getConf(); else return 0.0; } public void setConf(double conf) { if (entity == null) entity = new Entity(); entity.setConf(conf); } public Sense getSense() { if (entity != null) return entity.getSense(); else return null; } public void setSense(Sense sense) { if (entity == null) entity = new Entity(); entity.setSense(sense); } public Origin getOrigin() { if (entity != null) return entity.getOrigin(); else return null; } public void setOrigin(Origin origin) { if (entity == null) entity = new Entity(); entity.setOrigin(origin); } public NERLexicon.NER_Type getType() { if (entity != null) return entity.getType(); else return null; } public void setType(NERLexicon.NER_Type theType) { if (entity == null) entity = new Entity(); entity.setType(theType); } public List<String> getSubTypes() { if (entity != null) return entity.getSubTypes(); else return null; } public void setSubTypes(List<String> theSubTypes) { if (entity == null) entity = new Entity(); entity.setSubTypes(theSubTypes); } public void setBoundingBoxes(List<BoundingBox> boundingBoxes) { this.boundingBoxes = boundingBoxes; } public List<BoundingBox> getBoundingBoxes() { return boundingBoxes; } public void addBoundingBoxes(BoundingBox boundingBox) { if (this.boundingBoxes == null) this.boundingBoxes = new ArrayList<BoundingBox>(); this.boundingBoxes.add(boundingBox); } public boolean getIsAcronym() { return this.isAcronym; } public void setIsAcronym(boolean acronym) { this.isAcronym = acronym; } public Entity getEntity() { return entity; } @Override public boolean equals(Object object) { boolean result = false; if ((object != null) && object instanceof Mention) { int start = ((Mention) object).getOffsetStart(); int end = ((Mention) object).getOffsetEnd(); if ((start != -1) && (end != -1)) { if ((start == offsets.start) && (end == offsets.end) && (source == ((Mention) object).getSource())) { result = true; } } /*else { int startToken = ((Entity)object).getStartTokenPos(); int endToken = ((Entity)object).getEndTokenPos(); if ( (startToken != -1) && (endToken != -1) ) { if ( (startToken == startTokenPos) && (endToken == endTokenPos) ) { result = true; } } }*/ } return result; } @Override public int compareTo(Mention theEntity) { int start = theEntity.getOffsetStart(); int end = theEntity.getOffsetEnd(); //if ((start != -1) && (end != -1)) { if (offsets.start != start) return offsets.start - start; else if (offsets.end != end) return offsets.end - end; else { return source.compareTo(theEntity.getSource()); } /*} else { int startToken = theEntity.getStartTokenPos(); int endToken =theEntity.getEndTokenPos(); if ( (startToken != -1) && (endToken != -1) ) { if (startToken != startTokenPos) return startTokenPos - startToken; else return endTokenPos - endToken; } else { // it's too underspecified to be comparable, and for // sure it's not equal // throw an exception ? return -1; } }*/ } @Override public String toString() { StringBuffer buffer = new StringBuffer(); if (rawName != null) { buffer.append(rawName + "\t"); } if (normalisedName != null) { buffer.append(normalisedName + "\t"); } if (source != null) { buffer.append(source + "\t"); } if (getType() != null) { buffer.append(getType() + "\t"); } if (getSubTypes() != null) { for (String subType : getSubTypes()) buffer.append(subType + "\t"); } if (offsets != null) { buffer.append(offsets.toString() + "\t"); } if (getSense() != null) { if (getSense().getFineSense() != null) { buffer.append(getSense().getFineSense() + "\t"); } if (getSense().getCoarseSense() != null) { if ((getSense().getFineSense() == null) || ((getSense().getFineSense() != null) && !getSense().getCoarseSense().equals(getSense().getFineSense()))) { buffer.append(getSense().getCoarseSense() + "\t"); } } } return buffer.toString(); } public List<LayoutToken> getLayoutTokens() { return layoutTokens; } public void setLayoutTokens(List<LayoutToken> layoutTokens) { this.layoutTokens = layoutTokens; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.processing.merger; import java.io.File; import java.io.IOException; import java.util.List; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.datastore.block.SegmentProperties; import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException; import org.apache.carbondata.core.datastore.row.CarbonRow; import org.apache.carbondata.core.metadata.datatype.DataType; import org.apache.carbondata.core.metadata.encoder.Encoding; import org.apache.carbondata.core.metadata.schema.table.CarbonTable; import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension; import org.apache.carbondata.core.scan.result.iterator.RawResultIterator; import org.apache.carbondata.core.scan.wrappers.ByteArrayWrapper; import org.apache.carbondata.core.util.CarbonUtil; import org.apache.carbondata.processing.model.CarbonLoadModel; import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException; import org.apache.carbondata.processing.sortandgroupby.sortdata.SortDataRows; import org.apache.carbondata.processing.sortandgroupby.sortdata.SortIntermediateFileMerger; import org.apache.carbondata.processing.sortandgroupby.sortdata.SortParameters; import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel; import org.apache.carbondata.processing.store.CarbonFactHandler; import org.apache.carbondata.processing.store.CarbonFactHandlerFactory; import org.apache.carbondata.processing.store.SingleThreadFinalSortFilesMerger; import org.apache.carbondata.processing.util.CarbonDataProcessorUtil; import org.apache.spark.sql.types.Decimal; /** * This class will process the query result and convert the data * into a format compatible for data load */ public class CompactionResultSortProcessor extends AbstractResultProcessor { /** * LOGGER */ private static final LogService LOGGER = LogServiceFactory.getLogService(CompactionResultSortProcessor.class.getName()); /** * carbon load model that contains all the required information for load */ private CarbonLoadModel carbonLoadModel; /** * carbon table */ private CarbonTable carbonTable; /** * sortDataRows instance for sorting each row read ad writing to sort temp file */ private SortDataRows sortDataRows; /** * final merger for merge sort */ private SingleThreadFinalSortFilesMerger finalMerger; /** * data handler VO object */ private CarbonFactHandler dataHandler; /** * segment properties for getting dimension cardinality and other required information of a block */ private SegmentProperties segmentProperties; /** * compaction type to decide whether taskID need to be extracted from carbondata files */ private CompactionType compactionType; /** * boolean mapping for no dictionary columns in schema */ private boolean[] noDictionaryColMapping; /** * agg type defined for measures */ private DataType[] dataTypes; /** * segment id */ private String segmentId; /** * temp store location to be sued during data load */ private String[] tempStoreLocation; /** * table name */ private String tableName; /** * no dictionary column count in schema */ private int noDictionaryCount; /** * total count of measures in schema */ private int measureCount; /** * dimension count excluding complex dimension and no dictionary column count */ private int dimensionColumnCount; /** * whether the allocated tasks has any record */ private boolean isRecordFound; /** * intermediate sort merger */ private SortIntermediateFileMerger intermediateFileMerger; /** * @param carbonLoadModel * @param carbonTable * @param segmentProperties * @param compactionType * @param tableName */ public CompactionResultSortProcessor(CarbonLoadModel carbonLoadModel, CarbonTable carbonTable, SegmentProperties segmentProperties, CompactionType compactionType, String tableName) { this.carbonLoadModel = carbonLoadModel; this.carbonTable = carbonTable; this.segmentProperties = segmentProperties; this.segmentId = carbonLoadModel.getSegmentId(); this.compactionType = compactionType; this.tableName = tableName; } /** * This method will iterate over the query result and convert it into a format compatible * for data loading * * @param resultIteratorList */ public boolean execute(List<RawResultIterator> resultIteratorList) { boolean isCompactionSuccess = false; try { initTempStoreLocation(); initSortDataRows(); initAggType(); processResult(resultIteratorList); // After delete command, if no records are fetched from one split, // below steps are not required to be initialized. if (isRecordFound) { initializeFinalThreadMergerForMergeSort(); initDataHandler(); readAndLoadDataFromSortTempFiles(); } isCompactionSuccess = true; } catch (Exception e) { LOGGER.error(e, "Compaction failed: " + e.getMessage()); } finally { // clear temp files and folders created during compaction deleteTempStoreLocation(); } return isCompactionSuccess; } /** * This method will clean up the local folders and files created during compaction process */ private void deleteTempStoreLocation() { if (null != tempStoreLocation) { for (String tempLoc : tempStoreLocation) { try { CarbonUtil.deleteFoldersAndFiles(new File(tempLoc)); } catch (IOException | InterruptedException e) { LOGGER.error("Problem deleting local folders during compaction: " + e.getMessage()); } } } } /** * This method will iterate over the query result and perform row sorting operation * * @param resultIteratorList */ private void processResult(List<RawResultIterator> resultIteratorList) throws Exception { for (RawResultIterator resultIterator : resultIteratorList) { while (resultIterator.hasNext()) { addRowForSorting(prepareRowObjectForSorting(resultIterator.next())); isRecordFound = true; } } try { sortDataRows.startSorting(); } catch (CarbonSortKeyAndGroupByException e) { LOGGER.error(e); throw new Exception("Problem loading data during compaction: " + e.getMessage()); } } /** * This method will prepare the data from raw object that will take part in sorting * * @param row * @return */ private Object[] prepareRowObjectForSorting(Object[] row) { ByteArrayWrapper wrapper = (ByteArrayWrapper) row[0]; // ByteBuffer[] noDictionaryBuffer = new ByteBuffer[noDictionaryCount]; List<CarbonDimension> dimensions = segmentProperties.getDimensions(); Object[] preparedRow = new Object[dimensions.size() + measureCount]; // convert the dictionary from MDKey to surrogate key byte[] dictionaryKey = wrapper.getDictionaryKey(); long[] keyArray = segmentProperties.getDimensionKeyGenerator().getKeyArray(dictionaryKey); Object[] dictionaryValues = new Object[dimensionColumnCount + measureCount]; for (int i = 0; i < keyArray.length; i++) { dictionaryValues[i] = Long.valueOf(keyArray[i]).intValue(); } int noDictionaryIndex = 0; int dictionaryIndex = 0; for (int i = 0; i < dimensions.size(); i++) { CarbonDimension dims = dimensions.get(i); if (dims.hasEncoding(Encoding.DICTIONARY)) { // dictionary preparedRow[i] = dictionaryValues[dictionaryIndex++]; } else { // no dictionary dims preparedRow[i] = wrapper.getNoDictionaryKeyByIndex(noDictionaryIndex++); } } // fill all the measures // measures will always start from 1st index in the row object array int measureIndexInRow = 1; for (int i = 0; i < measureCount; i++) { preparedRow[dimensionColumnCount + i] = getConvertedMeasureValue(row[measureIndexInRow++], dataTypes[i]); } return preparedRow; } /** * This method will convert the spark decimal to java big decimal type * * @param value * @param type * @return */ private Object getConvertedMeasureValue(Object value, DataType type) { switch (type) { case DECIMAL: if (value != null) { value = ((Decimal) value).toJavaBigDecimal(); } return value; default: return value; } } /** * This method will read sort temp files, perform merge sort and add it to store for data loading */ private void readAndLoadDataFromSortTempFiles() throws Exception { try { intermediateFileMerger.finish(); finalMerger.startFinalMerge(); while (finalMerger.hasNext()) { Object[] row = finalMerger.next(); dataHandler.addDataToStore(new CarbonRow(row)); } dataHandler.finish(); } catch (CarbonDataWriterException e) { LOGGER.error(e); throw new Exception("Problem loading data during compaction: " + e.getMessage()); } catch (Exception e) { LOGGER.error(e); throw new Exception("Problem loading data during compaction: " + e.getMessage()); } finally { if (null != dataHandler) { try { dataHandler.closeHandler(); } catch (CarbonDataWriterException e) { LOGGER.error(e); throw new Exception("Problem loading data during compaction: " + e.getMessage()); } } } } /** * add row to a temp array which will we written to a sort temp file after sorting * * @param row */ private void addRowForSorting(Object[] row) throws Exception { try { sortDataRows.addRow(row); } catch (CarbonSortKeyAndGroupByException e) { LOGGER.error(e); throw new Exception("Row addition for sorting failed during compaction: " + e.getMessage()); } } /** * create an instance of sort data rows */ private void initSortDataRows() throws Exception { measureCount = carbonTable.getMeasureByTableName(tableName).size(); List<CarbonDimension> dimensions = carbonTable.getDimensionByTableName(tableName); noDictionaryColMapping = new boolean[dimensions.size()]; int i = 0; for (CarbonDimension dimension : dimensions) { if (CarbonUtil.hasEncoding(dimension.getEncoder(), Encoding.DICTIONARY)) { i++; continue; } noDictionaryColMapping[i++] = true; noDictionaryCount++; } dimensionColumnCount = dimensions.size(); SortParameters parameters = createSortParameters(); intermediateFileMerger = new SortIntermediateFileMerger(parameters); // TODO: Now it is only supported onheap merge, but we can have unsafe merge // as well by using UnsafeSortDataRows. this.sortDataRows = new SortDataRows(parameters, intermediateFileMerger); try { this.sortDataRows.initialize(); } catch (CarbonSortKeyAndGroupByException e) { LOGGER.error(e); throw new Exception( "Error initializing sort data rows object during compaction: " + e.getMessage()); } } /** * This method will create the sort parameters VO object * * @return */ private SortParameters createSortParameters() { return SortParameters .createSortParameters(carbonTable, carbonLoadModel.getDatabaseName(), tableName, dimensionColumnCount, segmentProperties.getComplexDimensions().size(), measureCount, noDictionaryCount, carbonLoadModel.getPartitionId(), segmentId, carbonLoadModel.getTaskNo(), noDictionaryColMapping, true); } /** * create an instance of finalThread merger which will perform merge sort on all the * sort temp files */ private void initializeFinalThreadMergerForMergeSort() { boolean[] noDictionarySortColumnMapping = null; if (noDictionaryColMapping.length == this.segmentProperties.getNumberOfSortColumns()) { noDictionarySortColumnMapping = noDictionaryColMapping; } else { noDictionarySortColumnMapping = new boolean[this.segmentProperties.getNumberOfSortColumns()]; System.arraycopy(noDictionaryColMapping, 0, noDictionarySortColumnMapping, 0, noDictionarySortColumnMapping.length); } String[] sortTempFileLocation = CarbonDataProcessorUtil.arrayAppend(tempStoreLocation, CarbonCommonConstants.FILE_SEPARATOR, CarbonCommonConstants.SORT_TEMP_FILE_LOCATION); finalMerger = new SingleThreadFinalSortFilesMerger(sortTempFileLocation, tableName, dimensionColumnCount, segmentProperties.getComplexDimensions().size(), measureCount, noDictionaryCount, dataTypes, noDictionaryColMapping, noDictionarySortColumnMapping); } /** * initialise carbon data writer instance */ private void initDataHandler() throws Exception { CarbonFactDataHandlerModel carbonFactDataHandlerModel = CarbonFactDataHandlerModel .getCarbonFactDataHandlerModel(carbonLoadModel, carbonTable, segmentProperties, tableName, tempStoreLocation); setDataFileAttributesInModel(carbonLoadModel, compactionType, carbonTable, carbonFactDataHandlerModel); dataHandler = CarbonFactHandlerFactory.createCarbonFactHandler(carbonFactDataHandlerModel, CarbonFactHandlerFactory.FactHandlerType.COLUMNAR); try { dataHandler.initialise(); } catch (CarbonDataWriterException e) { LOGGER.error(e); throw new Exception("Problem initialising data handler during compaction: " + e.getMessage()); } } /** * initialise temporary store location */ private void initTempStoreLocation() { tempStoreLocation = CarbonDataProcessorUtil .getLocalDataFolderLocation(carbonLoadModel.getDatabaseName(), tableName, carbonLoadModel.getTaskNo(), carbonLoadModel.getPartitionId(), segmentId, true, false); } /** * initialise aggregation type for measures for their storage format */ private void initAggType() { dataTypes = CarbonDataProcessorUtil.initDataType(carbonTable, tableName, measureCount); } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * ActivityGroup.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202108; /** * Activities are organized within activity groups, which are sets * of activities that share the * same configuration. You create and manage activities from * within activity groups. */ public class ActivityGroup implements java.io.Serializable { /* The unique ID of the {@code ActivityGroup}. This attribute * is readonly and is assigned * by Google. */ private java.lang.Long id; /* The name of the {@code ActivityGroup}. This attribute is required * to create an activity group * and has a maximum length of 255 characters. */ private java.lang.String name; /* The company ids whose ads will be included for conversion tracking * on the activities in this * group. Only clicks and impressions of ads from these * companies will lead to conversions on * the containing activities. This attribute is required * when creating an activity group. * * The company types allowed are: {@link Company.Type#ADVERTISER}, * and * {@link Company.Type#AD_NETWORK}, and {@link Company.Type#HOUSE_ADVERTISER} */ private long[] companyIds; /* Ad Manager records view-through conversions for users who have * previously viewed an Ad Manager * ad within the number of days that you set here (1 * to 30 days), then visits a webpage * containing activity tags from this activity group. * To be counted, the ad needs to belong to * one of the companies associated with the activity * group. This attribute is required to create * an activity group. */ private java.lang.Integer impressionsLookback; /* Ad Manager records click-through conversions for users who * have previously clicked on an * Ad Manager ad within the number of days that you set * here (1 to 30 days), then visits a * webpage containing activity tags from this activity * group. To be counted, the ad needs to * belong to one of the companies associated with the * activity group. This attribute is required * to create an activity group. */ private java.lang.Integer clicksLookback; /* The status of this activity group. This attribute is readonly. */ private com.google.api.ads.admanager.axis.v202108.ActivityGroupStatus status; public ActivityGroup() { } public ActivityGroup( java.lang.Long id, java.lang.String name, long[] companyIds, java.lang.Integer impressionsLookback, java.lang.Integer clicksLookback, com.google.api.ads.admanager.axis.v202108.ActivityGroupStatus status) { this.id = id; this.name = name; this.companyIds = companyIds; this.impressionsLookback = impressionsLookback; this.clicksLookback = clicksLookback; this.status = status; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("clicksLookback", getClicksLookback()) .add("companyIds", getCompanyIds()) .add("id", getId()) .add("impressionsLookback", getImpressionsLookback()) .add("name", getName()) .add("status", getStatus()) .toString(); } /** * Gets the id value for this ActivityGroup. * * @return id * The unique ID of the {@code ActivityGroup}. This attribute * is readonly and is assigned * by Google. */ public java.lang.Long getId() { return id; } /** * Sets the id value for this ActivityGroup. * * @param id * The unique ID of the {@code ActivityGroup}. This attribute * is readonly and is assigned * by Google. */ public void setId(java.lang.Long id) { this.id = id; } /** * Gets the name value for this ActivityGroup. * * @return name * The name of the {@code ActivityGroup}. This attribute is required * to create an activity group * and has a maximum length of 255 characters. */ public java.lang.String getName() { return name; } /** * Sets the name value for this ActivityGroup. * * @param name * The name of the {@code ActivityGroup}. This attribute is required * to create an activity group * and has a maximum length of 255 characters. */ public void setName(java.lang.String name) { this.name = name; } /** * Gets the companyIds value for this ActivityGroup. * * @return companyIds * The company ids whose ads will be included for conversion tracking * on the activities in this * group. Only clicks and impressions of ads from these * companies will lead to conversions on * the containing activities. This attribute is required * when creating an activity group. * * The company types allowed are: {@link Company.Type#ADVERTISER}, * and * {@link Company.Type#AD_NETWORK}, and {@link Company.Type#HOUSE_ADVERTISER} */ public long[] getCompanyIds() { return companyIds; } /** * Sets the companyIds value for this ActivityGroup. * * @param companyIds * The company ids whose ads will be included for conversion tracking * on the activities in this * group. Only clicks and impressions of ads from these * companies will lead to conversions on * the containing activities. This attribute is required * when creating an activity group. * * The company types allowed are: {@link Company.Type#ADVERTISER}, * and * {@link Company.Type#AD_NETWORK}, and {@link Company.Type#HOUSE_ADVERTISER} */ public void setCompanyIds(long[] companyIds) { this.companyIds = companyIds; } public long getCompanyIds(int i) { return this.companyIds[i]; } public void setCompanyIds(int i, long _value) { this.companyIds[i] = _value; } /** * Gets the impressionsLookback value for this ActivityGroup. * * @return impressionsLookback * Ad Manager records view-through conversions for users who have * previously viewed an Ad Manager * ad within the number of days that you set here (1 * to 30 days), then visits a webpage * containing activity tags from this activity group. * To be counted, the ad needs to belong to * one of the companies associated with the activity * group. This attribute is required to create * an activity group. */ public java.lang.Integer getImpressionsLookback() { return impressionsLookback; } /** * Sets the impressionsLookback value for this ActivityGroup. * * @param impressionsLookback * Ad Manager records view-through conversions for users who have * previously viewed an Ad Manager * ad within the number of days that you set here (1 * to 30 days), then visits a webpage * containing activity tags from this activity group. * To be counted, the ad needs to belong to * one of the companies associated with the activity * group. This attribute is required to create * an activity group. */ public void setImpressionsLookback(java.lang.Integer impressionsLookback) { this.impressionsLookback = impressionsLookback; } /** * Gets the clicksLookback value for this ActivityGroup. * * @return clicksLookback * Ad Manager records click-through conversions for users who * have previously clicked on an * Ad Manager ad within the number of days that you set * here (1 to 30 days), then visits a * webpage containing activity tags from this activity * group. To be counted, the ad needs to * belong to one of the companies associated with the * activity group. This attribute is required * to create an activity group. */ public java.lang.Integer getClicksLookback() { return clicksLookback; } /** * Sets the clicksLookback value for this ActivityGroup. * * @param clicksLookback * Ad Manager records click-through conversions for users who * have previously clicked on an * Ad Manager ad within the number of days that you set * here (1 to 30 days), then visits a * webpage containing activity tags from this activity * group. To be counted, the ad needs to * belong to one of the companies associated with the * activity group. This attribute is required * to create an activity group. */ public void setClicksLookback(java.lang.Integer clicksLookback) { this.clicksLookback = clicksLookback; } /** * Gets the status value for this ActivityGroup. * * @return status * The status of this activity group. This attribute is readonly. */ public com.google.api.ads.admanager.axis.v202108.ActivityGroupStatus getStatus() { return status; } /** * Sets the status value for this ActivityGroup. * * @param status * The status of this activity group. This attribute is readonly. */ public void setStatus(com.google.api.ads.admanager.axis.v202108.ActivityGroupStatus status) { this.status = status; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof ActivityGroup)) return false; ActivityGroup other = (ActivityGroup) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.id==null && other.getId()==null) || (this.id!=null && this.id.equals(other.getId()))) && ((this.name==null && other.getName()==null) || (this.name!=null && this.name.equals(other.getName()))) && ((this.companyIds==null && other.getCompanyIds()==null) || (this.companyIds!=null && java.util.Arrays.equals(this.companyIds, other.getCompanyIds()))) && ((this.impressionsLookback==null && other.getImpressionsLookback()==null) || (this.impressionsLookback!=null && this.impressionsLookback.equals(other.getImpressionsLookback()))) && ((this.clicksLookback==null && other.getClicksLookback()==null) || (this.clicksLookback!=null && this.clicksLookback.equals(other.getClicksLookback()))) && ((this.status==null && other.getStatus()==null) || (this.status!=null && this.status.equals(other.getStatus()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getId() != null) { _hashCode += getId().hashCode(); } if (getName() != null) { _hashCode += getName().hashCode(); } if (getCompanyIds() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getCompanyIds()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getCompanyIds(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getImpressionsLookback() != null) { _hashCode += getImpressionsLookback().hashCode(); } if (getClicksLookback() != null) { _hashCode += getClicksLookback().hashCode(); } if (getStatus() != null) { _hashCode += getStatus().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(ActivityGroup.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ActivityGroup")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("id"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "id")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("name"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "name")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("companyIds"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "companyIds")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("impressionsLookback"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "impressionsLookback")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("clicksLookback"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "clicksLookback")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("status"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "status")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ActivityGroup.Status")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.location.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A summary of the request sent by using <code>SearchPlaceIndexForSuggestions</code>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/location-2020-11-19/SearchPlaceIndexForSuggestionsSummary" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SearchPlaceIndexForSuggestionsSummary implements Serializable, Cloneable, StructuredPojo { /** * <p> * Contains the coordinates for the optional bias position specified in the request. * </p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the second * number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude <code>-123.1174</code> and * latitude <code>49.2847</code>. * </p> */ private java.util.List<Double> biasPosition; /** * <p> * The geospatial data provider attached to the place index resource specified in the request. Values can be one of * the following: * </p> * <ul> * <li> * <p> * Esri * </p> * </li> * <li> * <p> * Here * </p> * </li> * </ul> * <p> * For more information about data providers, see <a * href="https://docs.aws.amazon.com/location/latest/developerguide/what-is-data-provider.html">Amazon Location * Service data providers</a>. * </p> */ private String dataSource; /** * <p> * Contains the coordinates for the optional bounding box specified in the request. * </p> */ private java.util.List<Double> filterBBox; /** * <p> * Contains the optional country filter specified in the request. * </p> */ private java.util.List<String> filterCountries; /** * <p> * The preferred language used to return results. Matches the language in the request. The value is a valid <a * href="https://tools.ietf.org/search/bcp47">BCP 47</a> language tag, for example, <code>en</code> for English. * </p> */ private String language; /** * <p> * Contains the optional result count limit specified in the request. * </p> */ private Integer maxResults; /** * <p> * The free-form partial text input specified in the request. * </p> */ private String text; /** * <p> * Contains the coordinates for the optional bias position specified in the request. * </p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the second * number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude <code>-123.1174</code> and * latitude <code>49.2847</code>. * </p> * * @return Contains the coordinates for the optional bias position specified in the request.</p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; * the second number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude * <code>-123.1174</code> and latitude <code>49.2847</code>. */ public java.util.List<Double> getBiasPosition() { return biasPosition; } /** * <p> * Contains the coordinates for the optional bias position specified in the request. * </p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the second * number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude <code>-123.1174</code> and * latitude <code>49.2847</code>. * </p> * * @param biasPosition * Contains the coordinates for the optional bias position specified in the request.</p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the * second number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude * <code>-123.1174</code> and latitude <code>49.2847</code>. */ public void setBiasPosition(java.util.Collection<Double> biasPosition) { if (biasPosition == null) { this.biasPosition = null; return; } this.biasPosition = new java.util.ArrayList<Double>(biasPosition); } /** * <p> * Contains the coordinates for the optional bias position specified in the request. * </p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the second * number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude <code>-123.1174</code> and * latitude <code>49.2847</code>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setBiasPosition(java.util.Collection)} or {@link #withBiasPosition(java.util.Collection)} if you want to * override the existing values. * </p> * * @param biasPosition * Contains the coordinates for the optional bias position specified in the request.</p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the * second number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude * <code>-123.1174</code> and latitude <code>49.2847</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withBiasPosition(Double... biasPosition) { if (this.biasPosition == null) { setBiasPosition(new java.util.ArrayList<Double>(biasPosition.length)); } for (Double ele : biasPosition) { this.biasPosition.add(ele); } return this; } /** * <p> * Contains the coordinates for the optional bias position specified in the request. * </p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the second * number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude <code>-123.1174</code> and * latitude <code>49.2847</code>. * </p> * * @param biasPosition * Contains the coordinates for the optional bias position specified in the request.</p> * <p> * This parameter contains a pair of numbers. The first number represents the X coordinate, or longitude; the * second number represents the Y coordinate, or latitude. * </p> * <p> * For example, <code>[-123.1174, 49.2847]</code> represents the position with longitude * <code>-123.1174</code> and latitude <code>49.2847</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withBiasPosition(java.util.Collection<Double> biasPosition) { setBiasPosition(biasPosition); return this; } /** * <p> * The geospatial data provider attached to the place index resource specified in the request. Values can be one of * the following: * </p> * <ul> * <li> * <p> * Esri * </p> * </li> * <li> * <p> * Here * </p> * </li> * </ul> * <p> * For more information about data providers, see <a * href="https://docs.aws.amazon.com/location/latest/developerguide/what-is-data-provider.html">Amazon Location * Service data providers</a>. * </p> * * @param dataSource * The geospatial data provider attached to the place index resource specified in the request. Values can be * one of the following:</p> * <ul> * <li> * <p> * Esri * </p> * </li> * <li> * <p> * Here * </p> * </li> * </ul> * <p> * For more information about data providers, see <a * href="https://docs.aws.amazon.com/location/latest/developerguide/what-is-data-provider.html">Amazon * Location Service data providers</a>. */ public void setDataSource(String dataSource) { this.dataSource = dataSource; } /** * <p> * The geospatial data provider attached to the place index resource specified in the request. Values can be one of * the following: * </p> * <ul> * <li> * <p> * Esri * </p> * </li> * <li> * <p> * Here * </p> * </li> * </ul> * <p> * For more information about data providers, see <a * href="https://docs.aws.amazon.com/location/latest/developerguide/what-is-data-provider.html">Amazon Location * Service data providers</a>. * </p> * * @return The geospatial data provider attached to the place index resource specified in the request. Values can be * one of the following:</p> * <ul> * <li> * <p> * Esri * </p> * </li> * <li> * <p> * Here * </p> * </li> * </ul> * <p> * For more information about data providers, see <a * href="https://docs.aws.amazon.com/location/latest/developerguide/what-is-data-provider.html">Amazon * Location Service data providers</a>. */ public String getDataSource() { return this.dataSource; } /** * <p> * The geospatial data provider attached to the place index resource specified in the request. Values can be one of * the following: * </p> * <ul> * <li> * <p> * Esri * </p> * </li> * <li> * <p> * Here * </p> * </li> * </ul> * <p> * For more information about data providers, see <a * href="https://docs.aws.amazon.com/location/latest/developerguide/what-is-data-provider.html">Amazon Location * Service data providers</a>. * </p> * * @param dataSource * The geospatial data provider attached to the place index resource specified in the request. Values can be * one of the following:</p> * <ul> * <li> * <p> * Esri * </p> * </li> * <li> * <p> * Here * </p> * </li> * </ul> * <p> * For more information about data providers, see <a * href="https://docs.aws.amazon.com/location/latest/developerguide/what-is-data-provider.html">Amazon * Location Service data providers</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withDataSource(String dataSource) { setDataSource(dataSource); return this; } /** * <p> * Contains the coordinates for the optional bounding box specified in the request. * </p> * * @return Contains the coordinates for the optional bounding box specified in the request. */ public java.util.List<Double> getFilterBBox() { return filterBBox; } /** * <p> * Contains the coordinates for the optional bounding box specified in the request. * </p> * * @param filterBBox * Contains the coordinates for the optional bounding box specified in the request. */ public void setFilterBBox(java.util.Collection<Double> filterBBox) { if (filterBBox == null) { this.filterBBox = null; return; } this.filterBBox = new java.util.ArrayList<Double>(filterBBox); } /** * <p> * Contains the coordinates for the optional bounding box specified in the request. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setFilterBBox(java.util.Collection)} or {@link #withFilterBBox(java.util.Collection)} if you want to * override the existing values. * </p> * * @param filterBBox * Contains the coordinates for the optional bounding box specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withFilterBBox(Double... filterBBox) { if (this.filterBBox == null) { setFilterBBox(new java.util.ArrayList<Double>(filterBBox.length)); } for (Double ele : filterBBox) { this.filterBBox.add(ele); } return this; } /** * <p> * Contains the coordinates for the optional bounding box specified in the request. * </p> * * @param filterBBox * Contains the coordinates for the optional bounding box specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withFilterBBox(java.util.Collection<Double> filterBBox) { setFilterBBox(filterBBox); return this; } /** * <p> * Contains the optional country filter specified in the request. * </p> * * @return Contains the optional country filter specified in the request. */ public java.util.List<String> getFilterCountries() { return filterCountries; } /** * <p> * Contains the optional country filter specified in the request. * </p> * * @param filterCountries * Contains the optional country filter specified in the request. */ public void setFilterCountries(java.util.Collection<String> filterCountries) { if (filterCountries == null) { this.filterCountries = null; return; } this.filterCountries = new java.util.ArrayList<String>(filterCountries); } /** * <p> * Contains the optional country filter specified in the request. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setFilterCountries(java.util.Collection)} or {@link #withFilterCountries(java.util.Collection)} if you * want to override the existing values. * </p> * * @param filterCountries * Contains the optional country filter specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withFilterCountries(String... filterCountries) { if (this.filterCountries == null) { setFilterCountries(new java.util.ArrayList<String>(filterCountries.length)); } for (String ele : filterCountries) { this.filterCountries.add(ele); } return this; } /** * <p> * Contains the optional country filter specified in the request. * </p> * * @param filterCountries * Contains the optional country filter specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withFilterCountries(java.util.Collection<String> filterCountries) { setFilterCountries(filterCountries); return this; } /** * <p> * The preferred language used to return results. Matches the language in the request. The value is a valid <a * href="https://tools.ietf.org/search/bcp47">BCP 47</a> language tag, for example, <code>en</code> for English. * </p> * * @param language * The preferred language used to return results. Matches the language in the request. The value is a valid * <a href="https://tools.ietf.org/search/bcp47">BCP 47</a> language tag, for example, <code>en</code> for * English. */ public void setLanguage(String language) { this.language = language; } /** * <p> * The preferred language used to return results. Matches the language in the request. The value is a valid <a * href="https://tools.ietf.org/search/bcp47">BCP 47</a> language tag, for example, <code>en</code> for English. * </p> * * @return The preferred language used to return results. Matches the language in the request. The value is a valid * <a href="https://tools.ietf.org/search/bcp47">BCP 47</a> language tag, for example, <code>en</code> for * English. */ public String getLanguage() { return this.language; } /** * <p> * The preferred language used to return results. Matches the language in the request. The value is a valid <a * href="https://tools.ietf.org/search/bcp47">BCP 47</a> language tag, for example, <code>en</code> for English. * </p> * * @param language * The preferred language used to return results. Matches the language in the request. The value is a valid * <a href="https://tools.ietf.org/search/bcp47">BCP 47</a> language tag, for example, <code>en</code> for * English. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withLanguage(String language) { setLanguage(language); return this; } /** * <p> * Contains the optional result count limit specified in the request. * </p> * * @param maxResults * Contains the optional result count limit specified in the request. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * Contains the optional result count limit specified in the request. * </p> * * @return Contains the optional result count limit specified in the request. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * Contains the optional result count limit specified in the request. * </p> * * @param maxResults * Contains the optional result count limit specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * The free-form partial text input specified in the request. * </p> * * @param text * The free-form partial text input specified in the request. */ public void setText(String text) { this.text = text; } /** * <p> * The free-form partial text input specified in the request. * </p> * * @return The free-form partial text input specified in the request. */ public String getText() { return this.text; } /** * <p> * The free-form partial text input specified in the request. * </p> * * @param text * The free-form partial text input specified in the request. * @return Returns a reference to this object so that method calls can be chained together. */ public SearchPlaceIndexForSuggestionsSummary withText(String text) { setText(text); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getBiasPosition() != null) sb.append("BiasPosition: ").append("***Sensitive Data Redacted***").append(","); if (getDataSource() != null) sb.append("DataSource: ").append(getDataSource()).append(","); if (getFilterBBox() != null) sb.append("FilterBBox: ").append("***Sensitive Data Redacted***").append(","); if (getFilterCountries() != null) sb.append("FilterCountries: ").append(getFilterCountries()).append(","); if (getLanguage() != null) sb.append("Language: ").append(getLanguage()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getText() != null) sb.append("Text: ").append("***Sensitive Data Redacted***"); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SearchPlaceIndexForSuggestionsSummary == false) return false; SearchPlaceIndexForSuggestionsSummary other = (SearchPlaceIndexForSuggestionsSummary) obj; if (other.getBiasPosition() == null ^ this.getBiasPosition() == null) return false; if (other.getBiasPosition() != null && other.getBiasPosition().equals(this.getBiasPosition()) == false) return false; if (other.getDataSource() == null ^ this.getDataSource() == null) return false; if (other.getDataSource() != null && other.getDataSource().equals(this.getDataSource()) == false) return false; if (other.getFilterBBox() == null ^ this.getFilterBBox() == null) return false; if (other.getFilterBBox() != null && other.getFilterBBox().equals(this.getFilterBBox()) == false) return false; if (other.getFilterCountries() == null ^ this.getFilterCountries() == null) return false; if (other.getFilterCountries() != null && other.getFilterCountries().equals(this.getFilterCountries()) == false) return false; if (other.getLanguage() == null ^ this.getLanguage() == null) return false; if (other.getLanguage() != null && other.getLanguage().equals(this.getLanguage()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getText() == null ^ this.getText() == null) return false; if (other.getText() != null && other.getText().equals(this.getText()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getBiasPosition() == null) ? 0 : getBiasPosition().hashCode()); hashCode = prime * hashCode + ((getDataSource() == null) ? 0 : getDataSource().hashCode()); hashCode = prime * hashCode + ((getFilterBBox() == null) ? 0 : getFilterBBox().hashCode()); hashCode = prime * hashCode + ((getFilterCountries() == null) ? 0 : getFilterCountries().hashCode()); hashCode = prime * hashCode + ((getLanguage() == null) ? 0 : getLanguage().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getText() == null) ? 0 : getText().hashCode()); return hashCode; } @Override public SearchPlaceIndexForSuggestionsSummary clone() { try { return (SearchPlaceIndexForSuggestionsSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.location.model.transform.SearchPlaceIndexForSuggestionsSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/******************************************************************************* * Copyright (c) 2013, Daniel Murphy * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package org.jbox2d.common; import java.io.Serializable; /** * A 2D column vector */ public class Vec2 implements Serializable { private static final long serialVersionUID = 1L; public float x, y; public Vec2() { this(0, 0); } public Vec2(float x, float y) { this.x = x; this.y = y; } public Vec2(Vec2 toCopy) { this(toCopy.x, toCopy.y); } /** Zero out this vector. */ public final void setZero() { x = 0.0f; y = 0.0f; } /** Set the vector component-wise. */ public final Vec2 set(float x, float y) { this.x = x; this.y = y; return this; } /** Set this vector to another vector. */ public final Vec2 set(Vec2 v) { this.x = v.x; this.y = v.y; return this; } /** Return the sum of this vector and another; does not alter either one. */ public final Vec2 add(Vec2 v) { return new Vec2(x + v.x, y + v.y); } /** Return the difference of this vector and another; does not alter either one. */ public final Vec2 sub(Vec2 v) { return new Vec2(x - v.x, y - v.y); } /** Return this vector multiplied by a scalar; does not alter this vector. */ public final Vec2 mul(float a) { return new Vec2(x * a, y * a); } /** Return the negation of this vector; does not alter this vector. */ public final Vec2 negate() { return new Vec2(-x, -y); } /** Flip the vector and return it - alters this vector. */ public final Vec2 negateLocal() { x = -x; y = -y; return this; } /** Add another vector to this one and returns result - alters this vector. */ public final Vec2 addLocal(Vec2 v) { x += v.x; y += v.y; return this; } /** Adds values to this vector and returns result - alters this vector. */ public final Vec2 addLocal(float x, float y) { this.x += x; this.y += y; return this; } /** Subtract another vector from this one and return result - alters this vector. */ public final Vec2 subLocal(Vec2 v) { x -= v.x; y -= v.y; return this; } /** Multiply this vector by a number and return result - alters this vector. */ public final Vec2 mulLocal(float a) { x *= a; y *= a; return this; } /** Get the skew vector such that dot(skew_vec, other) == cross(vec, other) */ public final Vec2 skew() { return new Vec2(-y, x); } /** Get the skew vector such that dot(skew_vec, other) == cross(vec, other) */ public final void skew(Vec2 out) { out.x = -y; out.y = x; } /** Return the length of this vector. */ public final float length() { return MathUtils.sqrt(x * x + y * y); } /** Return the squared length of this vector. */ public final float lengthSquared() { return (x * x + y * y); } /** Normalize this vector and return the length before normalization. Alters this vector. */ public final float normalize() { float length = length(); if (length < Settings.EPSILON) { return 0f; } float invLength = 1.0f / length; x *= invLength; y *= invLength; return length; } /** True if the vector represents a pair of valid, non-infinite floating point numbers. */ public final boolean isValid() { return !Float.isNaN(x) && !Float.isInfinite(x) && !Float.isNaN(y) && !Float.isInfinite(y); } /** Return a new vector that has positive components. */ public final Vec2 abs() { return new Vec2(MathUtils.abs(x), MathUtils.abs(y)); } public final void absLocal() { x = MathUtils.abs(x); y = MathUtils.abs(y); } // @Override // annotation omitted for GWT-compatibility /** Return a copy of this vector. */ public final Vec2 clone() { return new Vec2(x, y); } @Override public final String toString() { return "(" + x + "," + y + ")"; } /* * Static */ public final static Vec2 abs(Vec2 a) { return new Vec2(MathUtils.abs(a.x), MathUtils.abs(a.y)); } public final static void absToOut(Vec2 a, Vec2 out) { out.x = MathUtils.abs(a.x); out.y = MathUtils.abs(a.y); } public final static float dot(final Vec2 a, final Vec2 b) { return a.x * b.x + a.y * b.y; } public final static float cross(final Vec2 a, final Vec2 b) { return a.x * b.y - a.y * b.x; } public final static Vec2 cross(Vec2 a, float s) { return new Vec2(s * a.y, -s * a.x); } public final static void crossToOut(Vec2 a, float s, Vec2 out) { final float tempy = -s * a.x; out.x = s * a.y; out.y = tempy; } public final static void crossToOutUnsafe(Vec2 a, float s, Vec2 out) { assert (out != a); out.x = s * a.y; out.y = -s * a.x; } public final static Vec2 cross(float s, Vec2 a) { return new Vec2(-s * a.y, s * a.x); } public final static void crossToOut(float s, Vec2 a, Vec2 out) { final float tempY = s * a.x; out.x = -s * a.y; out.y = tempY; } public final static void crossToOutUnsafe(float s, Vec2 a, Vec2 out) { assert (out != a); out.x = -s * a.y; out.y = s * a.x; } public final static void negateToOut(Vec2 a, Vec2 out) { out.x = -a.x; out.y = -a.y; } public final static Vec2 min(Vec2 a, Vec2 b) { return new Vec2(a.x < b.x ? a.x : b.x, a.y < b.y ? a.y : b.y); } public final static Vec2 max(Vec2 a, Vec2 b) { return new Vec2(a.x > b.x ? a.x : b.x, a.y > b.y ? a.y : b.y); } public final static void minToOut(Vec2 a, Vec2 b, Vec2 out) { out.x = a.x < b.x ? a.x : b.x; out.y = a.y < b.y ? a.y : b.y; } public final static void maxToOut(Vec2 a, Vec2 b, Vec2 out) { out.x = a.x > b.x ? a.x : b.x; out.y = a.y > b.y ? a.y : b.y; } /** * @see java.lang.Object#hashCode() */ @Override public int hashCode() { // automatically generated by Eclipse final int prime = 31; int result = 1; result = prime * result + Float.floatToIntBits(x); result = prime * result + Float.floatToIntBits(y); return result; } /** * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { // automatically generated by Eclipse if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Vec2 other = (Vec2) obj; if (Float.floatToIntBits(x) != Float.floatToIntBits(other.x)) return false; if (Float.floatToIntBits(y) != Float.floatToIntBits(other.y)) return false; return true; } }
package org.apache.lucene.util.automaton; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.lucene.index.SingleTermsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.UnicodeUtil; /** * Immutable class holding compiled details for a given * Automaton. The Automaton is deterministic, must not have * dead states but is not necessarily minimal. * * @lucene.experimental */ public class CompiledAutomaton { /** * Automata are compiled into different internal forms for the * most efficient execution depending upon the language they accept. */ public enum AUTOMATON_TYPE { /** Automaton that accepts no strings. */ NONE, /** Automaton that accepts all possible strings. */ ALL, /** Automaton that accepts only a single fixed string. */ SINGLE, /** Catch-all for any other automata. */ NORMAL }; /** If simplify is true this will be the "simplified" type; else, this is NORMAL */ public final AUTOMATON_TYPE type; /** * For {@link AUTOMATON_TYPE#SINGLE} this is the singleton term. */ public final BytesRef term; /** * Matcher for quickly determining if a byte[] is accepted. * only valid for {@link AUTOMATON_TYPE#NORMAL}. */ public final ByteRunAutomaton runAutomaton; /** * Two dimensional array of transitions, indexed by state * number for traversal. The state numbering is consistent with * {@link #runAutomaton}. * Only valid for {@link AUTOMATON_TYPE#NORMAL}. */ public final Automaton automaton; /** * Shared common suffix accepted by the automaton. Only valid * for {@link AUTOMATON_TYPE#NORMAL}, and only when the * automaton accepts an infinite language. This will be null * if the common prefix is length 0. */ public final BytesRef commonSuffixRef; /** * Indicates if the automaton accepts a finite set of strings. * Null if this was not computed. * Only valid for {@link AUTOMATON_TYPE#NORMAL}. */ public final Boolean finite; /** Which state, if any, accepts all suffixes, else -1. */ public final int sinkState; /** Create this, passing simplify=true and finite=null, so that we try * to simplify the automaton and determine if it is finite. */ public CompiledAutomaton(Automaton automaton) { this(automaton, null, true); } /** Returns sink state, if present, else -1. */ private static int findSinkState(Automaton automaton) { int numStates = automaton.getNumStates(); Transition t = new Transition(); int foundState = -1; for (int s=0;s<numStates;s++) { if (automaton.isAccept(s)) { int count = automaton.initTransition(s, t); boolean isSinkState = false; for(int i=0;i<count;i++) { automaton.getNextTransition(t); if (t.dest == s && t.min == 0 && t.max == 0xff) { isSinkState = true; break; } } if (isSinkState) { foundState = s; break; } } } return foundState; } /** Create this. If finite is null, we use {@link Operations#isFinite} * to determine whether it is finite. If simplify is true, we run * possibly expensive operations to determine if the automaton is one * the cases in {@link CompiledAutomaton.AUTOMATON_TYPE}. */ public CompiledAutomaton(Automaton automaton, Boolean finite, boolean simplify) { this(automaton, finite, simplify, Operations.DEFAULT_MAX_DETERMINIZED_STATES, false); } /** Create this. If finite is null, we use {@link Operations#isFinite} * to determine whether it is finite. If simplify is true, we run * possibly expensive operations to determine if the automaton is one * the cases in {@link CompiledAutomaton.AUTOMATON_TYPE}. If simplify * requires determinizing the autaomaton then only maxDeterminizedStates * will be created. Any more than that will cause a * TooComplexToDeterminizeException. */ public CompiledAutomaton(Automaton automaton, Boolean finite, boolean simplify, int maxDeterminizedStates, boolean isBinary) { if (automaton.getNumStates() == 0) { automaton = new Automaton(); automaton.createState(); } if (simplify) { // Test whether the automaton is a "simple" form and // if so, don't create a runAutomaton. Note that on a // large automaton these tests could be costly: if (Operations.isEmpty(automaton)) { // matches nothing type = AUTOMATON_TYPE.NONE; term = null; commonSuffixRef = null; runAutomaton = null; this.automaton = null; this.finite = null; sinkState = -1; return; } boolean isTotal; // NOTE: only approximate, because automaton may not be minimal: if (isBinary) { isTotal = Operations.isTotal(automaton, 0, 0xff); } else { isTotal = Operations.isTotal(automaton); } if (isTotal) { // matches all possible strings type = AUTOMATON_TYPE.ALL; term = null; commonSuffixRef = null; runAutomaton = null; this.automaton = null; this.finite = null; sinkState = -1; return; } automaton = Operations.determinize(automaton, maxDeterminizedStates); IntsRef singleton = Operations.getSingleton(automaton); if (singleton != null) { // matches a fixed string type = AUTOMATON_TYPE.SINGLE; commonSuffixRef = null; runAutomaton = null; this.automaton = null; this.finite = null; if (isBinary) { term = StringHelper.intsRefToBytesRef(singleton); } else { term = new BytesRef(UnicodeUtil.newString(singleton.ints, singleton.offset, singleton.length)); } sinkState = -1; return; } } type = AUTOMATON_TYPE.NORMAL; term = null; if (finite == null) { this.finite = Operations.isFinite(automaton); } else { this.finite = finite; } Automaton binary; if (isBinary) { // Caller already built binary automaton themselves, e.g. PrefixQuery // does this since it can be provided with a binary (not necessarily // UTF8!) term: binary = automaton; } else { // Incoming automaton is unicode, and we must convert to UTF8 to match what's in the index: binary = new UTF32ToUTF8().convert(automaton); } if (this.finite) { commonSuffixRef = null; } else { // NOTE: this is a very costly operation! We should test if it's really warranted in practice... we could do a fast match // by looking for a sink state (which means it has no common suffix). Or maybe we shouldn't do it when simplify is false?: BytesRef suffix = Operations.getCommonSuffixBytesRef(binary, maxDeterminizedStates); if (suffix.length == 0) { commonSuffixRef = null; } else { commonSuffixRef = suffix; } } // This will determinize the binary automaton for us: runAutomaton = new ByteRunAutomaton(binary, true, maxDeterminizedStates); this.automaton = runAutomaton.automaton; // TODO: this is a bit fragile because if the automaton is not minimized there could be more than 1 sink state but auto-prefix will fail // to run for those: sinkState = findSinkState(this.automaton); } private Transition transition = new Transition(); //private static final boolean DEBUG = BlockTreeTermsWriter.DEBUG; private BytesRef addTail(int state, BytesRefBuilder term, int idx, int leadLabel) { //System.out.println("addTail state=" + state + " term=" + term.utf8ToString() + " idx=" + idx + " leadLabel=" + (char) leadLabel); //System.out.println(automaton.toDot()); // Find biggest transition that's < label // TODO: use binary search here int maxIndex = -1; int numTransitions = automaton.initTransition(state, transition); for(int i=0;i<numTransitions;i++) { automaton.getNextTransition(transition); if (transition.min < leadLabel) { maxIndex = i; } else { // Transitions are alway sorted break; } } //System.out.println(" maxIndex=" + maxIndex); assert maxIndex != -1; automaton.getTransition(state, maxIndex, transition); // Append floorLabel final int floorLabel; if (transition.max > leadLabel-1) { floorLabel = leadLabel-1; } else { floorLabel = transition.max; } //System.out.println(" floorLabel=" + (char) floorLabel); term.grow(1+idx); //if (DEBUG) System.out.println(" add floorLabel=" + (char) floorLabel + " idx=" + idx); term.setByteAt(idx, (byte) floorLabel); state = transition.dest; //System.out.println(" dest: " + state); idx++; // Push down to last accept state while (true) { numTransitions = automaton.getNumTransitions(state); if (numTransitions == 0) { //System.out.println("state=" + state + " 0 trans"); assert runAutomaton.isAccept(state); term.setLength(idx); //if (DEBUG) System.out.println(" return " + term.utf8ToString()); return term.get(); } else { // We are pushing "top" -- so get last label of // last transition: //System.out.println("get state=" + state + " numTrans=" + numTransitions); automaton.getTransition(state, numTransitions-1, transition); term.grow(1+idx); //if (DEBUG) System.out.println(" push maxLabel=" + (char) lastTransition.max + " idx=" + idx); //System.out.println(" add trans dest=" + scratch.dest + " label=" + (char) scratch.max); term.setByteAt(idx, (byte) transition.max); state = transition.dest; idx++; } } } // TODO: should this take startTerm too? This way // Terms.intersect could forward to this method if type != // NORMAL: /** Return a {@link TermsEnum} intersecting the provided {@link Terms} * with the terms accepted by this automaton. */ public TermsEnum getTermsEnum(Terms terms) throws IOException { switch(type) { case NONE: return TermsEnum.EMPTY; case ALL: return terms.iterator(); case SINGLE: return new SingleTermsEnum(terms.iterator(), term); case NORMAL: return terms.intersect(this, null); default: // unreachable throw new RuntimeException("unhandled case"); } } /** Finds largest term accepted by this Automaton, that's * &lt;= the provided input term. The result is placed in * output; it's fine for output and input to point to * the same bytes. The returned result is either the * provided output, or null if there is no floor term * (ie, the provided input term is before the first term * accepted by this Automaton). */ public BytesRef floor(BytesRef input, BytesRefBuilder output) { //if (DEBUG) System.out.println("CA.floor input=" + input.utf8ToString()); int state = runAutomaton.getInitialState(); // Special case empty string: if (input.length == 0) { if (runAutomaton.isAccept(state)) { output.clear(); return output.get(); } else { return null; } } final List<Integer> stack = new ArrayList<>(); int idx = 0; while (true) { int label = input.bytes[input.offset + idx] & 0xff; int nextState = runAutomaton.step(state, label); //if (DEBUG) System.out.println(" cycle label=" + (char) label + " nextState=" + nextState); if (idx == input.length-1) { if (nextState != -1 && runAutomaton.isAccept(nextState)) { // Input string is accepted output.grow(1+idx); output.setByteAt(idx, (byte) label); output.setLength(input.length); //if (DEBUG) System.out.println(" input is accepted; return term=" + output.utf8ToString()); return output.get(); } else { nextState = -1; } } if (nextState == -1) { // Pop back to a state that has a transition // <= our label: while (true) { int numTransitions = automaton.getNumTransitions(state); if (numTransitions == 0) { assert runAutomaton.isAccept(state); output.setLength(idx); //if (DEBUG) System.out.println(" return " + output.utf8ToString()); return output.get(); } else { automaton.getTransition(state, 0, transition); if (label-1 < transition.min) { if (runAutomaton.isAccept(state)) { output.setLength(idx); //if (DEBUG) System.out.println(" return " + output.utf8ToString()); return output.get(); } // pop if (stack.size() == 0) { //if (DEBUG) System.out.println(" pop ord=" + idx + " return null"); return null; } else { state = stack.remove(stack.size()-1); idx--; //if (DEBUG) System.out.println(" pop ord=" + (idx+1) + " label=" + (char) label + " first trans.min=" + (char) transitions[0].min); label = input.bytes[input.offset + idx] & 0xff; } } else { //if (DEBUG) System.out.println(" stop pop ord=" + idx + " first trans.min=" + (char) transitions[0].min); break; } } } //if (DEBUG) System.out.println(" label=" + (char) label + " idx=" + idx); return addTail(state, output, idx, label); } else { output.grow(1+idx); output.setByteAt(idx, (byte) label); stack.add(state); state = nextState; idx++; } } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((runAutomaton == null) ? 0 : runAutomaton.hashCode()); result = prime * result + ((term == null) ? 0 : term.hashCode()); result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; CompiledAutomaton other = (CompiledAutomaton) obj; if (type != other.type) return false; if (type == AUTOMATON_TYPE.SINGLE) { if (!term.equals(other.term)) return false; } else if (type == AUTOMATON_TYPE.NORMAL) { if (!runAutomaton.equals(other.runAutomaton)) return false; } return true; } }
/* * The MIT License * * Copyright (c) 2013, CloudBees, Inc., Stephen Connolly. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package jenkins.plugins.git; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.StandardListBoxModel; import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials; import com.cloudbees.plugins.credentials.domains.URIRequirementBuilder; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import hudson.Extension; import hudson.RestrictedSince; import hudson.Util; import hudson.model.Descriptor; import hudson.model.Item; import hudson.model.ParameterValue; import hudson.model.Queue; import hudson.model.queue.Tasks; import hudson.plugins.git.GitSCM; import hudson.plugins.git.GitStatus; import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.GitSCMExtensionDescriptor; import hudson.scm.RepositoryBrowser; import hudson.scm.SCM; import hudson.security.ACL; import hudson.security.ACLContext; import hudson.util.FormValidation; import hudson.util.ListBoxModel; import java.io.ObjectStreamException; import java.io.PrintWriter; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import jenkins.model.Jenkins; import jenkins.plugins.git.traits.BranchDiscoveryTrait; import jenkins.plugins.git.traits.GitBrowserSCMSourceTrait; import jenkins.plugins.git.traits.GitSCMExtensionTrait; import jenkins.plugins.git.traits.GitSCMExtensionTraitDescriptor; import jenkins.plugins.git.traits.GitToolSCMSourceTrait; import jenkins.plugins.git.traits.IgnoreOnPushNotificationTrait; import jenkins.plugins.git.traits.RefSpecsSCMSourceTrait; import jenkins.plugins.git.traits.RemoteNameSCMSourceTrait; import jenkins.scm.api.SCMEvent; import jenkins.scm.api.SCMHead; import jenkins.scm.api.SCMHeadCategory; import jenkins.scm.api.SCMHeadEvent; import jenkins.scm.api.SCMHeadObserver; import jenkins.scm.api.SCMNavigator; import jenkins.scm.api.SCMRevision; import jenkins.scm.api.SCMSource; import jenkins.scm.api.SCMSourceDescriptor; import jenkins.scm.api.SCMSourceOwner; import jenkins.scm.api.SCMSourceOwners; import jenkins.scm.api.trait.SCMHeadPrefilter; import jenkins.scm.api.trait.SCMSourceTrait; import jenkins.scm.api.trait.SCMSourceTraitDescriptor; import jenkins.scm.api.trait.SCMTrait; import jenkins.scm.impl.TagSCMHeadCategory; import jenkins.scm.impl.UncategorizedSCMHeadCategory; import jenkins.scm.impl.form.NamedArrayList; import jenkins.scm.impl.trait.Discovery; import jenkins.scm.impl.trait.Selection; import jenkins.scm.impl.trait.WildcardSCMHeadFilterTrait; import org.acegisecurity.context.SecurityContext; import org.acegisecurity.context.SecurityContextHolder; import org.apache.commons.lang.StringUtils; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.transport.URIish; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.gitclient.GitClient; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.DoNotUse; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.AncestorInPath; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; /** * A {@link SCMSource} that discovers branches in a git repository. */ public class GitSCMSource extends AbstractGitSCMSource { private static final String DEFAULT_INCLUDES = "*"; private static final String DEFAULT_EXCLUDES = ""; public static final Logger LOGGER = Logger.getLogger(GitSCMSource.class.getName()); private final String remote; @CheckForNull private String credentialsId; @Deprecated private transient String remoteName; @Deprecated private transient String rawRefSpecs; @Deprecated private transient String includes; @Deprecated private transient String excludes; @Deprecated private transient boolean ignoreOnPushNotifications; @Deprecated private transient GitRepositoryBrowser browser; @Deprecated private transient String gitTool; @Deprecated private transient List<GitSCMExtension> extensions; /** * Holds all the behavioural traits of this source. * * @since 3.4.0 */ private List<SCMSourceTrait> traits = new ArrayList<>(); @DataBoundConstructor public GitSCMSource(String remote) { this.remote = remote; } @DataBoundSetter public void setCredentialsId(@CheckForNull String credentialsId) { this.credentialsId = credentialsId; } @DataBoundSetter public void setTraits(List<SCMSourceTrait> traits) { this.traits = SCMTrait.asSetList(traits); } @Deprecated @Restricted(NoExternalUse.class) @RestrictedSince("3.4.0") public GitSCMSource(String id, String remote, String credentialsId, String remoteName, String rawRefSpecs, String includes, String excludes, boolean ignoreOnPushNotifications) { super(id); this.remote = remote; this.credentialsId = credentialsId; List<SCMSourceTrait> traits = new ArrayList<>(); traits.add(new BranchDiscoveryTrait()); if (!DEFAULT_INCLUDES.equals(includes) || !DEFAULT_EXCLUDES.equals(excludes)) { traits.add(new WildcardSCMHeadFilterTrait(includes, excludes)); } if (!DEFAULT_REMOTE_NAME.equals(remoteName) && StringUtils.isNotBlank(remoteName)) { traits.add(new RemoteNameSCMSourceTrait(remoteName)); } if (ignoreOnPushNotifications) { traits.add(new IgnoreOnPushNotificationTrait()); } RefSpecsSCMSourceTrait trait = asRefSpecsSCMSourceTrait(rawRefSpecs, remoteName); if (trait != null) { traits.add(trait); } setTraits(traits); } @Deprecated @Restricted(NoExternalUse.class) @RestrictedSince("3.4.0") public GitSCMSource(String id, String remote, String credentialsId, String includes, String excludes, boolean ignoreOnPushNotifications) { this(id, remote, credentialsId, null, null, includes, excludes, ignoreOnPushNotifications); } private Object readResolve() throws ObjectStreamException { if (traits == null) { List<SCMSourceTrait> traits = new ArrayList<>(); traits.add(new BranchDiscoveryTrait()); if ((includes != null && !DEFAULT_INCLUDES.equals(includes)) || (excludes != null && !DEFAULT_EXCLUDES.equals(excludes))) { traits.add(new WildcardSCMHeadFilterTrait(includes, excludes)); } if (extensions != null) { EXTENSIONS: for (GitSCMExtension extension : extensions) { for (SCMSourceTraitDescriptor d : SCMSourceTrait.all()) { if (d instanceof GitSCMExtensionTraitDescriptor) { GitSCMExtensionTraitDescriptor descriptor = (GitSCMExtensionTraitDescriptor) d; if (descriptor.getExtensionClass().isInstance(extension)) { try { SCMSourceTrait trait = descriptor.convertToTrait(extension); if (trait != null) { traits.add(trait); continue EXTENSIONS; } } catch (UnsupportedOperationException e) { LOGGER.log(Level.WARNING, "Could not convert " + extension.getClass().getName() + " to a trait", e); } } } LOGGER.log(Level.FINE, "Could not convert {0} to a trait (likely because this option does not " + "make sense for a GitSCMSource)", getClass().getName()); } } } if (remoteName != null && !DEFAULT_REMOTE_NAME.equals(remoteName) && StringUtils.isNotBlank(remoteName)) { traits.add(new RemoteNameSCMSourceTrait(remoteName)); } if (StringUtils.isNotBlank(gitTool)) { traits.add(new GitToolSCMSourceTrait(gitTool)); } if (browser != null) { traits.add(new GitBrowserSCMSourceTrait(browser)); } if (ignoreOnPushNotifications) { traits.add(new IgnoreOnPushNotificationTrait()); } RefSpecsSCMSourceTrait trait = asRefSpecsSCMSourceTrait(rawRefSpecs, remoteName); if (trait != null) { traits.add(trait); } setTraits(traits); } return this; } private RefSpecsSCMSourceTrait asRefSpecsSCMSourceTrait(String rawRefSpecs, String remoteName) { if (rawRefSpecs != null) { Set<String> defaults = new HashSet<>(); defaults.add("+refs/heads/*:refs/remotes/origin/*"); if (remoteName != null) { defaults.add("+refs/heads/*:refs/remotes/"+remoteName+"/*"); } if (!defaults.contains(rawRefSpecs.trim())) { List<String> templates = new ArrayList<>(); for (String rawRefSpec : rawRefSpecs.split(" ")) { if (StringUtils.isBlank(rawRefSpec)) { continue; } if (defaults.contains(rawRefSpec)) { templates.add(AbstractGitSCMSource.REF_SPEC_DEFAULT); } else { templates.add(rawRefSpec); } } if (!templates.isEmpty()) { return new RefSpecsSCMSourceTrait(templates.toArray(new String[templates.size()])); } } } return null; } @Deprecated @Restricted(DoNotUse.class) @RestrictedSince("3.4.0") public boolean isIgnoreOnPushNotifications() { return SCMTrait.find(traits, IgnoreOnPushNotificationTrait.class) != null; } // For Stapler only @Restricted(DoNotUse.class) @DataBoundSetter public void setBrowser(GitRepositoryBrowser browser) { List<SCMSourceTrait> traits = new ArrayList<>(this.traits); for (Iterator<SCMSourceTrait> iterator = traits.iterator(); iterator.hasNext(); ) { if (iterator.next() instanceof GitBrowserSCMSourceTrait) { iterator.remove(); } } if (browser != null) { traits.add(new GitBrowserSCMSourceTrait(browser)); } setTraits(traits); } // For Stapler only @Restricted(DoNotUse.class) @DataBoundSetter public void setGitTool(String gitTool) { List<SCMSourceTrait> traits = new ArrayList<>(this.traits); gitTool = Util.fixEmptyAndTrim(gitTool); for (Iterator<SCMSourceTrait> iterator = traits.iterator(); iterator.hasNext(); ) { if (iterator.next() instanceof GitToolSCMSourceTrait) { iterator.remove(); } } if (gitTool != null) { traits.add(new GitToolSCMSourceTrait(gitTool)); } setTraits(traits); } // For Stapler only @Restricted(DoNotUse.class) @DataBoundSetter @Deprecated public void setExtensions(@CheckForNull List<GitSCMExtension> extensions) { List<SCMSourceTrait> traits = new ArrayList<>(this.traits); for (Iterator<SCMSourceTrait> iterator = traits.iterator(); iterator.hasNext(); ) { if (iterator.next() instanceof GitSCMExtensionTrait) { iterator.remove(); } } EXTENSIONS: for (GitSCMExtension extension : Util.fixNull(extensions)) { for (SCMSourceTraitDescriptor d : SCMSourceTrait.all()) { if (d instanceof GitSCMExtensionTraitDescriptor) { GitSCMExtensionTraitDescriptor descriptor = (GitSCMExtensionTraitDescriptor) d; if (descriptor.getExtensionClass().isInstance(extension)) { try { SCMSourceTrait trait = descriptor.convertToTrait(extension); if (trait != null) { traits.add(trait); continue EXTENSIONS; } } catch (UnsupportedOperationException e) { LOGGER.log(Level.WARNING, "Could not convert " + extension.getClass().getName() + " to a trait", e); } } } LOGGER.log(Level.FINE, "Could not convert {0} to a trait (likely because this option does not " + "make sense for a GitSCMSource)", extension.getClass().getName()); } } setTraits(traits); } @Override public String getCredentialsId() { return credentialsId; } public String getRemote() { return remote; } @Deprecated @Restricted(DoNotUse.class) @RestrictedSince("3.4.0") public String getRawRefSpecs() { String remoteName = null; RefSpecsSCMSourceTrait refSpecs = null; for (SCMSourceTrait trait : traits) { if (trait instanceof RemoteNameSCMSourceTrait) { remoteName = ((RemoteNameSCMSourceTrait) trait).getRemoteName(); if (refSpecs != null) break; } if (trait instanceof RefSpecsSCMSourceTrait) { refSpecs = (RefSpecsSCMSourceTrait) trait; if (remoteName != null) break; } } if (remoteName == null) { remoteName = AbstractGitSCMSource.DEFAULT_REMOTE_NAME; } if (refSpecs == null) { return AbstractGitSCMSource.REF_SPEC_DEFAULT .replaceAll(AbstractGitSCMSource.REF_SPEC_REMOTE_NAME_PLACEHOLDER, remoteName); } StringBuilder result = new StringBuilder(); boolean first = true; Pattern placeholder = Pattern.compile(AbstractGitSCMSource.REF_SPEC_REMOTE_NAME_PLACEHOLDER); for (String template : refSpecs.asStrings()) { if (first) { first = false; } else { result.append(' '); } result.append(placeholder.matcher(template).replaceAll(remoteName)); } return result.toString(); } @Deprecated @Override @Restricted(DoNotUse.class) @RestrictedSince("3.4.0") protected List<RefSpec> getRefSpecs() { return new GitSCMSourceContext<>(null, SCMHeadObserver.none()).withTraits(traits).asRefSpecs(); } @NonNull @Override public List<SCMSourceTrait> getTraits() { return traits; } @Symbol("git") @Extension public static class DescriptorImpl extends SCMSourceDescriptor { @Override public String getDisplayName() { return Messages.GitSCMSource_DisplayName(); } public ListBoxModel doFillCredentialsIdItems(@AncestorInPath Item context, @QueryParameter String remote, @QueryParameter String credentialsId) { if (context == null && !Jenkins.get().hasPermission(Jenkins.ADMINISTER) || context != null && !context.hasPermission(Item.EXTENDED_READ)) { return new StandardListBoxModel().includeCurrentValue(credentialsId); } return new StandardListBoxModel() .includeEmptyValue() .includeMatchingAs( context instanceof Queue.Task ? Tasks.getAuthenticationOf((Queue.Task)context) : ACL.SYSTEM, context, StandardUsernameCredentials.class, URIRequirementBuilder.fromUri(remote).build(), GitClient.CREDENTIALS_MATCHER) .includeCurrentValue(credentialsId); } public FormValidation doCheckCredentialsId(@AncestorInPath Item context, @QueryParameter String remote, @QueryParameter String value) { if (context == null && !Jenkins.get().hasPermission(Jenkins.ADMINISTER) || context != null && !context.hasPermission(Item.EXTENDED_READ)) { return FormValidation.ok(); } value = Util.fixEmptyAndTrim(value); if (value == null) { return FormValidation.ok(); } remote = Util.fixEmptyAndTrim(remote); if (remote == null) // not set, can't check { return FormValidation.ok(); } for (ListBoxModel.Option o : CredentialsProvider.listCredentials( StandardUsernameCredentials.class, context, context instanceof Queue.Task ? Tasks.getAuthenticationOf((Queue.Task) context) : ACL.SYSTEM, URIRequirementBuilder.fromUri(remote).build(), GitClient.CREDENTIALS_MATCHER)) { if (StringUtils.equals(value, o.value)) { // TODO check if this type of credential is acceptable to the Git client or does it merit warning // NOTE: we would need to actually lookup the credential to do the check, which may require // fetching the actual credential instance from a remote credentials store. Perhaps this is // not required return FormValidation.ok(); } } // no credentials available, can't check return FormValidation.warning("Cannot find any credentials with id " + value); } @Deprecated @Restricted(NoExternalUse.class) @RestrictedSince("3.4.0") public GitSCM.DescriptorImpl getSCMDescriptor() { return (GitSCM.DescriptorImpl)Jenkins.getActiveInstance().getDescriptor(GitSCM.class); } @Deprecated @Restricted(DoNotUse.class) @RestrictedSince("3.4.0") public List<GitSCMExtensionDescriptor> getExtensionDescriptors() { return getSCMDescriptor().getExtensionDescriptors(); } @Deprecated @Restricted(DoNotUse.class) @RestrictedSince("3.4.0") public List<Descriptor<RepositoryBrowser<?>>> getBrowserDescriptors() { return getSCMDescriptor().getBrowserDescriptors(); } @Deprecated @Restricted(DoNotUse.class) @RestrictedSince("3.4.0") public boolean showGitToolOptions() { return getSCMDescriptor().showGitToolOptions(); } @Deprecated @Restricted(DoNotUse.class) @RestrictedSince("3.4.0") public ListBoxModel doFillGitToolItems() { return getSCMDescriptor().doFillGitToolItems(); } public List<NamedArrayList<? extends SCMSourceTraitDescriptor>> getTraitsDescriptorLists() { List<NamedArrayList<? extends SCMSourceTraitDescriptor>> result = new ArrayList<>(); List<SCMSourceTraitDescriptor> descriptors = SCMSourceTrait._for(this, GitSCMSourceContext.class, GitSCMBuilder.class); NamedArrayList.select(descriptors, Messages.within_Repository(), NamedArrayList.anyOf( NamedArrayList.withAnnotation(Selection.class), NamedArrayList.withAnnotation(Discovery.class) ), true, result); NamedArrayList.select(descriptors, Messages.additional(), null, true, result); return result; } public List<SCMSourceTrait> getTraitsDefaults() { return Collections.<SCMSourceTrait>singletonList(new BranchDiscoveryTrait()); } @NonNull @Override protected SCMHeadCategory[] createCategories() { return new SCMHeadCategory[]{UncategorizedSCMHeadCategory.DEFAULT, TagSCMHeadCategory.DEFAULT}; } } @Extension public static class ListenerImpl extends GitStatus.Listener { @Override public List<GitStatus.ResponseContributor> onNotifyCommit(String origin, URIish uri, @Nullable final String sha1, List<ParameterValue> buildParameters, String... branches) { List<GitStatus.ResponseContributor> result = new ArrayList<>(); final boolean notified[] = {false}; // run in high privilege to see all the projects anonymous users don't see. // this is safe because when we actually schedule a build, it's a build that can // happen at some random time anyway. try (ACLContext context = ACL.as(ACL.SYSTEM)) { if (branches.length > 0) { final URIish u = uri; for (final String branch: branches) { SCMHeadEvent.fireNow(new SCMHeadEvent<String>(SCMEvent.Type.UPDATED, branch, origin){ @Override public boolean isMatch(@NonNull SCMNavigator navigator) { return false; } @NonNull @Override public String getSourceName() { // we will never be called here as do not match any navigator return u.getHumanishName(); } @Override public boolean isMatch(SCMSource source) { if (source instanceof GitSCMSource) { GitSCMSource git = (GitSCMSource) source; GitSCMSourceContext ctx = new GitSCMSourceContext<>(null, SCMHeadObserver.none()) .withTraits(git.getTraits()); if (ctx.ignoreOnPushNotifications()) { return false; } URIish remote; try { remote = new URIish(git.getRemote()); } catch (URISyntaxException e) { // ignore return false; } if (GitStatus.looselyMatches(u, remote)) { notified[0] = true; return true; } return false; } return false; } @NonNull @Override public Map<SCMHead, SCMRevision> heads(@NonNull SCMSource source) { if (source instanceof GitSCMSource) { GitSCMSource git = (GitSCMSource) source; GitSCMSourceContext<?,?> ctx = new GitSCMSourceContext<>(null, SCMHeadObserver.none()) .withTraits(git.getTraits()); if (ctx.ignoreOnPushNotifications()) { return Collections.emptyMap(); } URIish remote; try { remote = new URIish(git.getRemote()); } catch (URISyntaxException e) { // ignore return Collections.emptyMap(); } if (GitStatus.looselyMatches(u, remote)) { GitBranchSCMHead head = new GitBranchSCMHead(branch); for (SCMHeadPrefilter filter: ctx.prefilters()) { if (filter.isExcluded(git, head)) { return Collections.emptyMap(); } } return Collections.<SCMHead, SCMRevision>singletonMap(head, sha1 != null ? new GitBranchSCMRevision(head, sha1) : null); } } return Collections.emptyMap(); } @Override public boolean isMatch(@NonNull SCM scm) { return false; // TODO rewrite the legacy event system to fire through SCM API } }); } } else { for (final SCMSourceOwner owner : SCMSourceOwners.all()) { for (SCMSource source : owner.getSCMSources()) { if (source instanceof GitSCMSource) { GitSCMSource git = (GitSCMSource) source; GitSCMSourceContext<?, ?> ctx = new GitSCMSourceContext<>(null, SCMHeadObserver.none()) .withTraits(git.getTraits()); if (ctx.ignoreOnPushNotifications()) { continue; } URIish remote; try { remote = new URIish(git.getRemote()); } catch (URISyntaxException e) { // ignore continue; } if (GitStatus.looselyMatches(uri, remote)) { LOGGER.info("Triggering the indexing of " + owner.getFullDisplayName() + " as a result of event from " + origin); owner.onSCMSourceUpdated(source); result.add(new GitStatus.ResponseContributor() { @Override public void addHeaders(StaplerRequest req, StaplerResponse rsp) { rsp.addHeader("Triggered", owner.getAbsoluteUrl()); } @Override public void writeBody(PrintWriter w) { w.println("Scheduled indexing of " + owner.getFullDisplayName()); } }); notified[0] = true; } } } } } } if (!notified[0]) { result.add(new GitStatus.MessageResponseContributor("No Git consumers using SCM API plugin for: " + uri.toString())); } return result; } } }
/* * Copyright 2012 - 2016 Anton Tananaev (anton@traccar.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.traccar.protocol; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.traccar.BaseProtocolDecoder; import org.traccar.Context; import org.traccar.DeviceSession; import org.traccar.helper.Checksum; import org.traccar.helper.DateBuilder; import org.traccar.helper.Parser; import org.traccar.helper.PatternBuilder; import org.traccar.model.Position; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.charset.StandardCharsets; import java.util.regex.Pattern; public class MeiligaoProtocolDecoder extends BaseProtocolDecoder { public MeiligaoProtocolDecoder(MeiligaoProtocol protocol) { super(protocol); } private static final Pattern PATTERN = new PatternBuilder() .number("(dd)(dd)(dd).?(d+)?,") // time .expression("([AV]),") // validity .number("(d+)(dd.d+),") // latitude .expression("([NS]),") .number("(d+)(dd.d+),") // longitude .expression("([EW]),") .number("(d+.?d*)?,") // speed .number("(d+.?d*)?,") // course .number("(dd)(dd)(dd)") // date (ddmmyy) .expression("[^\\|]*") .groupBegin() .number("|(d+.d+)?") // hdop .number("|(-?d+.?d*)?") // altitude .number("|(xxxx)?") // state .groupBegin() .number("|(xxxx),(xxxx)") // adc .number("(?:,(xxxx),(xxxx),(xxxx),(xxxx),(xxxx),(xxxx))?") .groupBegin() .number("|x{16}") // cell .number("|(xx)") // gsm .number("|(x{8})") // odometer .or() .number("|(x{9})") // odometer .groupBegin() .number("|(x{5,})") // rfid .groupEnd("?") .groupEnd("?") .groupEnd("?") .groupEnd("?") .any() .compile(); private static final Pattern PATTERN_RFID = new PatternBuilder() .number("|(dd)(dd)(dd),") // time .number("(dd)(dd)(dd),") // Date (ddmmyy) .number("(d+)(dd.d+),") // latitude .expression("([NS]),") .number("(d+)(dd.d+),") // longitude .expression("([EW])") .compile(); private static final Pattern PATTERN_OBD = new PatternBuilder() .number("(d+.d+),") // battery .number("(d+),") // rpm .number("(d+),") // speed .number("(d+.d+),") // throttle .number("(d+.d+),") // engine load .number("(-?d+),") // coolant temp .number("d+.d+,") // instantaneous fuel .number("(d+.d+),") // average fuel .number("(d+.d+),") // driving range .number("(d+.?d*),") // odometer .number("(d+.d+),") .number("(d+.d+),") .number("(d+),") // error code count .number("d+,") // harsh acceleration count .number("d+") // harsh break count .compile(); public static final int MSG_HEARTBEAT = 0x0001; public static final int MSG_SERVER = 0x0002; public static final int MSG_LOGIN = 0x5000; public static final int MSG_LOGIN_RESPONSE = 0x4000; public static final int MSG_POSITION = 0x9955; public static final int MSG_POSITION_LOGGED = 0x9016; public static final int MSG_ALARM = 0x9999; public static final int MSG_RFID = 0x9966; public static final int MSG_OBD_RT = 0x9901; private DeviceSession identify(ChannelBuffer buf, Channel channel, SocketAddress remoteAddress) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < 7; i++) { int b = buf.readUnsignedByte(); // First digit int d1 = (b & 0xf0) >> 4; if (d1 == 0xf) { break; } builder.append(d1); // Second digit int d2 = b & 0x0f; if (d2 == 0xf) { break; } builder.append(d2); } String id = builder.toString(); if (id.length() == 14) { return getDeviceSession(channel, remoteAddress, id, id + Checksum.luhn(Long.parseLong(id))); } else { return getDeviceSession(channel, remoteAddress, id); } } private static void sendResponse( Channel channel, SocketAddress remoteAddress, ChannelBuffer id, int type, ChannelBuffer msg) { if (channel != null) { ChannelBuffer buf = ChannelBuffers.buffer( 2 + 2 + id.readableBytes() + 2 + msg.readableBytes() + 2 + 2); buf.writeByte('@'); buf.writeByte('@'); buf.writeShort(buf.capacity()); buf.writeBytes(id); buf.writeShort(type); buf.writeBytes(msg); buf.writeShort(Checksum.crc16(Checksum.CRC16_CCITT_FALSE, buf.toByteBuffer())); buf.writeByte('\r'); buf.writeByte('\n'); channel.write(buf, remoteAddress); } } private String getServer(Channel channel) { String server = Context.getConfig().getString(getProtocolName() + ".server"); if (server == null) { InetSocketAddress address = (InetSocketAddress) channel.getLocalAddress(); server = address.getAddress().getHostAddress() + ":" + address.getPort(); } return server; } private String decodeAlarm(short value) { switch (value) { case 0x01: return Position.ALARM_SOS; case 0x10: return Position.ALARM_LOW_BATTERY; case 0x11: return Position.ALARM_OVERSPEED; case 0x12: return Position.ALARM_MOVEMENT; case 0x13: return Position.ALARM_GEOFENCE_ENTER; case 0x50: return Position.ALARM_POWER_OFF; case 0x53: return Position.ALARM_GPS_ANTENNA_CUT; default: return null; } } private Position decodeRegular(Position position, String sentence) { Parser parser = new Parser(PATTERN, sentence); if (!parser.matches()) { return null; } DateBuilder dateBuilder = new DateBuilder() .setTime(parser.nextInt(), parser.nextInt(), parser.nextInt()); if (parser.hasNext()) { dateBuilder.setMillis(parser.nextInt()); } position.setValid(parser.next().equals("A")); position.setLatitude(parser.nextCoordinate()); position.setLongitude(parser.nextCoordinate()); if (parser.hasNext()) { position.setSpeed(parser.nextDouble()); } if (parser.hasNext()) { position.setCourse(parser.nextDouble()); } dateBuilder.setDateReverse(parser.nextInt(), parser.nextInt(), parser.nextInt()); position.setTime(dateBuilder.getDate()); position.set(Position.KEY_HDOP, parser.next()); if (parser.hasNext()) { position.setAltitude(parser.nextDouble()); } position.set(Position.KEY_STATUS, parser.next()); for (int i = 1; i <= 8; i++) { if (parser.hasNext()) { position.set(Position.PREFIX_ADC + i, parser.nextInt(16)); } } if (parser.hasNext()) { position.set(Position.KEY_GSM, parser.nextInt(16)); } if (parser.hasNext()) { position.set(Position.KEY_ODOMETER, parser.nextLong(16)); } if (parser.hasNext()) { position.set(Position.KEY_ODOMETER, parser.nextLong(16)); } if (parser.hasNext()) { position.set(Position.KEY_RFID, parser.nextInt(16)); } return position; } private Position decodeRfid(Position position, String sentence) { Parser parser = new Parser(PATTERN_RFID, sentence); if (!parser.matches()) { return null; } DateBuilder dateBuilder = new DateBuilder() .setTime(parser.nextInt(), parser.nextInt(), parser.nextInt()) .setDateReverse(parser.nextInt(), parser.nextInt(), parser.nextInt()); position.setTime(dateBuilder.getDate()); position.setValid(true); position.setLatitude(parser.nextCoordinate()); position.setLongitude(parser.nextCoordinate()); return position; } private Position decodeObd(Position position, String sentence) { Parser parser = new Parser(PATTERN_OBD, sentence); if (!parser.matches()) { return null; } getLastLocation(position, null); position.set(Position.KEY_BATTERY, parser.nextDouble()); position.set(Position.KEY_RPM, parser.nextInt()); position.set(Position.KEY_OBD_SPEED, parser.nextInt()); position.set(Position.KEY_THROTTLE, parser.nextDouble()); position.set("engineLoad", parser.nextDouble()); position.set(Position.PREFIX_TEMP + 1, parser.nextInt()); position.set(Position.KEY_FUEL_CONSUMPTION, parser.nextDouble()); position.set("drivingRange", parser.nextDouble() * 1000); position.set(Position.KEY_ODOMETER, parser.nextDouble() * 1000); position.set("singleFuelConsumption", parser.nextDouble()); position.set("totalFuelConsumption", parser.nextDouble()); return position; } @Override protected Object decode( Channel channel, SocketAddress remoteAddress, Object msg) throws Exception { ChannelBuffer buf = (ChannelBuffer) msg; buf.skipBytes(2); // header buf.readShort(); // length ChannelBuffer id = buf.readBytes(7); int command = buf.readUnsignedShort(); ChannelBuffer response; if (channel != null) { if (command == MSG_LOGIN) { response = ChannelBuffers.wrappedBuffer(new byte[] {0x01}); sendResponse(channel, remoteAddress, id, MSG_LOGIN_RESPONSE, response); return null; } else if (command == MSG_HEARTBEAT) { response = ChannelBuffers.wrappedBuffer(new byte[] {0x01}); sendResponse(channel, remoteAddress, id, MSG_HEARTBEAT, response); return null; } else if (command == MSG_SERVER) { response = ChannelBuffers.copiedBuffer(getServer(channel), StandardCharsets.US_ASCII); sendResponse(channel, remoteAddress, id, MSG_SERVER, response); return null; } } Position position = new Position(); position.setProtocol(getProtocolName()); if (command == MSG_ALARM) { position.set(Position.KEY_ALARM, decodeAlarm(buf.readUnsignedByte())); } else if (command == MSG_POSITION_LOGGED) { buf.skipBytes(6); } DeviceSession deviceSession = identify(id, channel, remoteAddress); if (deviceSession == null) { return null; } position.setDeviceId(deviceSession.getDeviceId()); if (command == MSG_RFID) { for (int i = 0; i < 15; i++) { long rfid = buf.readUnsignedInt(); if (rfid != 0) { String card = String.format("%010d", rfid); position.set("card" + (i + 1), card); position.set(Position.KEY_RFID, card); } } } String sentence = buf.toString(buf.readerIndex(), buf.readableBytes() - 4, StandardCharsets.US_ASCII); if (command == MSG_POSITION || command == MSG_POSITION_LOGGED || command == MSG_ALARM) { return decodeRegular(position, sentence); } else if (command == MSG_RFID) { return decodeRfid(position, sentence); } else if (command == MSG_OBD_RT) { return decodeObd(position, sentence); } return null; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.raptor.storage; import com.facebook.presto.PagesIndexPageSorter; import com.facebook.presto.SequencePageBuilder; import com.facebook.presto.raptor.metadata.ColumnInfo; import com.facebook.presto.raptor.metadata.ShardInfo; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.PageBuilder; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.SortOrder; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.testing.MaterializedResult; import com.facebook.presto.testing.MaterializedRow; import com.google.common.collect.ImmutableList; import io.airlift.units.DataSize; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.IDBI; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.util.List; import java.util.OptionalInt; import java.util.Set; import java.util.UUID; import static com.facebook.presto.raptor.storage.TestOrcStorageManager.createOrcStorageManager; import static com.facebook.presto.spi.block.SortOrder.ASC_NULLS_FIRST; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.DateType.DATE; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP; import static com.facebook.presto.spi.type.VarcharType.createVarcharType; import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream; import static com.facebook.presto.testing.TestingConnectorSession.SESSION; import static com.facebook.presto.tests.QueryAssertions.assertEqualsIgnoreOrder; import static com.google.common.io.Files.createTempDir; import static io.airlift.testing.FileUtils.deleteRecursively; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static java.util.Collections.nCopies; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toSet; import static org.testng.Assert.assertEquals; @Test(singleThreaded = true) public class TestShardCompactor { private static final int MAX_SHARD_ROWS = 1000; private static final PagesIndexPageSorter PAGE_SORTER = new PagesIndexPageSorter(); private static final ReaderAttributes READER_ATTRIBUTES = new ReaderAttributes(new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE)); private OrcStorageManager storageManager; private ShardCompactor compactor; private File temporary; private Handle dummyHandle; @BeforeMethod public void setup() throws Exception { temporary = createTempDir(); IDBI dbi = new DBI("jdbc:h2:mem:test" + System.nanoTime()); dummyHandle = dbi.open(); storageManager = createOrcStorageManager(dbi, temporary, MAX_SHARD_ROWS); compactor = new ShardCompactor(storageManager, READER_ATTRIBUTES); } @AfterMethod(alwaysRun = true) public void tearDown() throws Exception { if (dummyHandle != null) { dummyHandle.close(); } deleteRecursively(temporary); } @Test public void testShardCompactor() throws Exception { List<Long> columnIds = ImmutableList.of(3L, 7L, 2L, 1L, 5L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(20), DOUBLE, DATE, TIMESTAMP); List<ShardInfo> inputShards = createShards(storageManager, columnIds, columnTypes, 3); assertEquals(inputShards.size(), 3); long totalRows = inputShards.stream() .mapToLong(ShardInfo::getRowCount) .sum(); long expectedOutputShards = computeExpectedOutputShards(totalRows); Set<UUID> inputUuids = inputShards.stream().map(ShardInfo::getShardUuid).collect(toSet()); long transactionId = 1; List<ShardInfo> outputShards = compactor.compact(transactionId, OptionalInt.empty(), inputUuids, getColumnInfo(columnIds, columnTypes)); assertEquals(outputShards.size(), expectedOutputShards); Set<UUID> outputUuids = outputShards.stream().map(ShardInfo::getShardUuid).collect(toSet()); assertShardEqualsIgnoreOrder(inputUuids, outputUuids, columnIds, columnTypes); } @Test public void testShardCompactorSorted() throws Exception { List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(20), DATE, TIMESTAMP, DOUBLE); List<Long> columnIds = ImmutableList.of(3L, 7L, 2L, 1L, 5L); List<Long> sortColumnIds = ImmutableList.of(1L, 2L, 3L, 5L, 7L); List<SortOrder> sortOrders = nCopies(sortColumnIds.size(), ASC_NULLS_FIRST); List<Integer> sortIndexes = sortColumnIds.stream() .map(columnIds::indexOf) .collect(toList()); List<ShardInfo> inputShards = createSortedShards(storageManager, columnIds, columnTypes, sortIndexes, sortOrders, 2); assertEquals(inputShards.size(), 2); long totalRows = inputShards.stream().mapToLong(ShardInfo::getRowCount).sum(); long expectedOutputShards = computeExpectedOutputShards(totalRows); Set<UUID> inputUuids = inputShards.stream().map(ShardInfo::getShardUuid).collect(toSet()); long transactionId = 1; List<ShardInfo> outputShards = compactor.compactSorted(transactionId, OptionalInt.empty(), inputUuids, getColumnInfo(columnIds, columnTypes), sortColumnIds, sortOrders); List<UUID> outputUuids = outputShards.stream() .map(ShardInfo::getShardUuid) .collect(toList()); assertEquals(outputShards.size(), expectedOutputShards); assertShardEqualsSorted(inputUuids, outputUuids, columnIds, columnTypes, sortIndexes, sortOrders); } private static long computeExpectedOutputShards(long totalRows) { return ((totalRows % MAX_SHARD_ROWS) != 0) ? ((totalRows / MAX_SHARD_ROWS) + 1) : (totalRows / MAX_SHARD_ROWS); } private void assertShardEqualsIgnoreOrder(Set<UUID> inputUuids, Set<UUID> outputUuids, List<Long> columnIds, List<Type> columnTypes) throws IOException { MaterializedResult inputRows = getMaterializedRows(ImmutableList.copyOf(inputUuids), columnIds, columnTypes); MaterializedResult outputRows = getMaterializedRows(ImmutableList.copyOf(outputUuids), columnIds, columnTypes); assertEqualsIgnoreOrder(outputRows, inputRows); } private void assertShardEqualsSorted(Set<UUID> inputUuids, List<UUID> outputUuids, List<Long> columnIds, List<Type> columnTypes, List<Integer> sortIndexes, List<SortOrder> sortOrders) throws IOException { List<Page> inputPages = getPages(inputUuids, columnIds, columnTypes); List<Type> sortTypes = sortIndexes.stream().map(columnTypes::get).collect(toList()); MaterializedResult inputRowsSorted = sortAndMaterialize(inputPages, columnTypes, sortIndexes, sortOrders, sortTypes); MaterializedResult outputRows = extractColumns(getMaterializedRows(outputUuids, columnIds, columnTypes), sortIndexes, sortTypes); assertEquals(outputRows, inputRowsSorted); } private static MaterializedResult extractColumns(MaterializedResult materializedRows, List<Integer> indexes, List<Type> types) { ImmutableList.Builder<MaterializedRow> rows = ImmutableList.builder(); for (MaterializedRow row : materializedRows) { Object[] values = new Object[indexes.size()]; for (int i = 0; i < indexes.size(); i++) { values[i] = row.getField(indexes.get(i)); } rows.add(new MaterializedRow(MaterializedResult.DEFAULT_PRECISION, values)); } return new MaterializedResult(rows.build(), types); } private static MaterializedResult sortAndMaterialize(List<Page> pages, List<Type> columnTypes, List<Integer> sortIndexes, List<SortOrder> sortOrders, List<Type> sortTypes) { long[] orderedAddresses = PAGE_SORTER.sort(columnTypes, pages, sortIndexes, sortOrders, 10_000); PageBuilder pageBuilder = new PageBuilder(columnTypes); for (long orderedAddress : orderedAddresses) { int pageIndex = PAGE_SORTER.decodePageIndex(orderedAddress); int positionIndex = PAGE_SORTER.decodePositionIndex(orderedAddress); Page page = pages.get(pageIndex); pageBuilder.declarePosition(); for (int i = 0; i < columnTypes.size(); i++) { columnTypes.get(i).appendTo(page.getBlock(i), positionIndex, pageBuilder.getBlockBuilder(i)); } } // extract the sortIndexes and reorder the blocks by sort indexes (useful for debugging) Block[] blocks = pageBuilder.build().getBlocks(); Block[] outputBlocks = new Block[blocks.length]; for (int i = 0; i < sortIndexes.size(); i++) { outputBlocks[i] = blocks[sortIndexes.get(i)]; } MaterializedResult.Builder resultBuilder = MaterializedResult.resultBuilder(SESSION, sortTypes); resultBuilder.page(new Page(outputBlocks)); return resultBuilder.build(); } private List<Page> getPages(Set<UUID> uuids, List<Long> columnIds, List<Type> columnTypes) throws IOException { ImmutableList.Builder<Page> pages = ImmutableList.builder(); for (UUID uuid : uuids) { try (ConnectorPageSource pageSource = getPageSource(columnIds, columnTypes, uuid)) { while (!pageSource.isFinished()) { Page outputPage = pageSource.getNextPage(); if (outputPage == null) { break; } outputPage.assureLoaded(); pages.add(outputPage); } } } return pages.build(); } private MaterializedResult getMaterializedRows(List<UUID> uuids, List<Long> columnIds, List<Type> columnTypes) throws IOException { MaterializedResult.Builder rows = MaterializedResult.resultBuilder(SESSION, columnTypes); for (UUID uuid : uuids) { try (ConnectorPageSource pageSource = getPageSource(columnIds, columnTypes, uuid)) { MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes); rows.rows(result.getMaterializedRows()); } } return rows.build(); } private ConnectorPageSource getPageSource(List<Long> columnIds, List<Type> columnTypes, UUID uuid) { return storageManager.getPageSource(uuid, OptionalInt.empty(), columnIds, columnTypes, TupleDomain.all(), READER_ATTRIBUTES); } private static List<ShardInfo> createSortedShards(StorageManager storageManager, List<Long> columnIds, List<Type> columnTypes, List<Integer> sortChannels, List<SortOrder> sortOrders, int shardCount) { StoragePageSink sink = createStoragePageSink(storageManager, columnIds, columnTypes); for (int shardNum = 0; shardNum < shardCount; shardNum++) { createSortedShard(columnTypes, sortChannels, sortOrders, sink); } return sink.commit(); } private static void createSortedShard(List<Type> columnTypes, List<Integer> sortChannels, List<SortOrder> sortOrders, StoragePageSink sink) { List<Page> pages = createPages(columnTypes); // Sort pages long[] orderedAddresses = PAGE_SORTER.sort(columnTypes, pages, sortChannels, sortOrders, 10_000); int[] orderedPageIndex = new int[orderedAddresses.length]; int[] orderedPositionIndex = new int[orderedAddresses.length]; for (int i = 0; i < orderedAddresses.length; i++) { orderedPageIndex[i] = PAGE_SORTER.decodePageIndex(orderedAddresses[i]); orderedPositionIndex[i] = PAGE_SORTER.decodePositionIndex(orderedAddresses[i]); } // Append sorted pages sink.appendPages(pages, orderedPageIndex, orderedPositionIndex); sink.flush(); } private static List<ShardInfo> createShards(StorageManager storageManager, List<Long> columnIds, List<Type> columnTypes, int shardCount) { StoragePageSink sink = createStoragePageSink(storageManager, columnIds, columnTypes); for (int i = 0; i < shardCount; i++) { sink.appendPages(createPages(columnTypes)); sink.flush(); } return sink.commit(); } private static StoragePageSink createStoragePageSink(StorageManager manager, List<Long> columnIds, List<Type> columnTypes) { long transactionId = 1; return manager.createStoragePageSink(transactionId, OptionalInt.empty(), columnIds, columnTypes); } private static List<Page> createPages(List<Type> columnTypes) { // Creates 10 pages with 10 rows each int rowCount = 10; int pageCount = 10; // some random values to start off the blocks int[][] initialValues = { { 17, 15, 16, 18, 14 }, { 59, 55, 54, 53, 58 } }; ImmutableList.Builder<Page> pages = ImmutableList.builder(); for (int i = 0; i < pageCount; i++) { pages.add(SequencePageBuilder.createSequencePage(columnTypes, rowCount, initialValues[i % 2])); } return pages.build(); } private static List<ColumnInfo> getColumnInfo(List<Long> columnIds, List<Type> columnTypes) { ImmutableList.Builder<ColumnInfo> columnInfos = ImmutableList.builder(); for (int i = 0; i < columnIds.size(); i++) { columnInfos.add(new ColumnInfo(columnIds.get(i), columnTypes.get(i))); } return columnInfos.build(); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.completion; import com.intellij.codeInsight.ExpectedTypeInfo; import com.intellij.codeInsight.ExpectedTypesProvider; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.lang.LangBundle; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.text.StringUtil; import com.intellij.patterns.ElementPattern; import com.intellij.patterns.PsiJavaElementPattern; import com.intellij.psi.*; import com.intellij.psi.filters.ClassFilter; import com.intellij.psi.filters.ElementFilter; import com.intellij.psi.filters.TrueFilter; import com.intellij.psi.filters.element.ExcludeDeclaredFilter; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.DirectClassInheritorsSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.Consumer; import com.intellij.util.Processor; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.JBIterable; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import static com.intellij.codeInsight.completion.JavaClassNameInsertHandler.JAVA_CLASS_INSERT_HANDLER; import static com.intellij.patterns.PsiJavaPatterns.psiClass; import static com.intellij.patterns.PsiJavaPatterns.psiElement; /** * @author peter */ public class JavaClassNameCompletionContributor extends CompletionContributor { public static final PsiJavaElementPattern.Capture<PsiElement> AFTER_NEW = psiElement().afterLeaf(PsiKeyword.NEW); private static final PsiJavaElementPattern.Capture<PsiElement> IN_TYPE_PARAMETER = psiElement().afterLeaf(PsiKeyword.EXTENDS, PsiKeyword.SUPER, "&").withParent( psiElement(PsiReferenceList.class).withParent(PsiTypeParameter.class)); private static final ElementPattern<PsiElement> IN_EXTENDS_IMPLEMENTS = psiElement().inside(psiElement(PsiReferenceList.class).withParent(psiClass())); @Override public void fillCompletionVariants(@NotNull CompletionParameters parameters, @NotNull final CompletionResultSet _result) { if (parameters.getCompletionType() == CompletionType.CLASS_NAME || parameters.isExtendedCompletion() && mayContainClassName(parameters)) { addAllClasses(parameters, _result); } } static void addAllClasses(CompletionParameters parameters, final CompletionResultSet _result) { CompletionResultSet result = _result.withPrefixMatcher(CompletionUtil.findReferenceOrAlphanumericPrefix(parameters)); addAllClasses(parameters, parameters.getInvocationCount() <= 1, result.getPrefixMatcher(), _result); } private static boolean mayContainClassName(CompletionParameters parameters) { PsiElement position = parameters.getPosition(); if (SkipAutopopupInStrings.isInStringLiteral(position)) { return true; } PsiComment comment = PsiTreeUtil.getParentOfType(position, PsiComment.class, false); if (comment != null && !(comment instanceof PsiDocComment)) { return true; } return false; } public static void addAllClasses(@NotNull CompletionParameters parameters, final boolean filterByScope, @NotNull final PrefixMatcher matcher, @NotNull final Consumer<? super LookupElement> consumer) { final PsiElement insertedElement = parameters.getPosition(); if (JavaCompletionContributor.getAnnotationNameIfInside(insertedElement) != null) { MultiMap<String, PsiClass> annoMap = getAllAnnotationClasses(insertedElement, matcher); Processor<PsiClass> processor = new LimitedAccessibleClassPreprocessor(parameters, filterByScope, anno -> { JavaPsiClassReferenceElement item = AllClassesGetter.createLookupItem(anno, JAVA_CLASS_INSERT_HANDLER); item.addLookupStrings(getClassNameWithContainers(anno)); consumer.consume(item); return true; }); for (String name : matcher.sortMatching(annoMap.keySet())) { if (!ContainerUtil.process(annoMap.get(name), processor)) break; } return; } final ElementFilter filter = IN_EXTENDS_IMPLEMENTS.accepts(insertedElement) ? new ExcludeDeclaredFilter(new ClassFilter(PsiClass.class)) : IN_TYPE_PARAMETER.accepts(insertedElement) ? new ExcludeDeclaredFilter(new ClassFilter(PsiTypeParameter.class)) : TrueFilter.INSTANCE; final boolean inJavaContext = parameters.getPosition() instanceof PsiIdentifier; final boolean afterNew = AFTER_NEW.accepts(insertedElement); if (afterNew) { final PsiExpression expr = PsiTreeUtil.getContextOfType(insertedElement, PsiExpression.class, true); for (final ExpectedTypeInfo info : ExpectedTypesProvider.getExpectedTypes(expr, true)) { final PsiType type = info.getType(); final PsiClass psiClass = PsiUtil.resolveClassInType(type); if (psiClass != null && psiClass.getName() != null) { consumer.consume(createClassLookupItem(psiClass, inJavaContext)); } final PsiType defaultType = info.getDefaultType(); if (!defaultType.equals(type)) { final PsiClass defClass = PsiUtil.resolveClassInType(defaultType); if (defClass != null && defClass.getName() != null) { consumer.consume(createClassLookupItem(defClass, true)); } } } } final boolean pkgContext = JavaCompletionUtil.inSomePackage(insertedElement); AllClassesGetter.processJavaClasses(parameters, matcher, filterByScope, new Consumer<PsiClass>() { @Override public void consume(PsiClass psiClass) { processClass(psiClass, null, ""); } private void processClass(PsiClass psiClass, @Nullable Set<? super PsiClass> visited, String prefix) { boolean isInnerClass = StringUtil.isNotEmpty(prefix); if (isInnerClass && isProcessedIndependently(psiClass)) { return; } if (filter.isAcceptable(psiClass, insertedElement)) { if (!inJavaContext) { JavaPsiClassReferenceElement element = AllClassesGetter.createLookupItem(psiClass, AllClassesGetter.TRY_SHORTENING); element.setLookupString(prefix + element.getLookupString()); consumer.consume(element); } else { Condition<PsiClass> condition = eachClass -> filter.isAcceptable(eachClass, insertedElement) && AllClassesGetter.isAcceptableInContext(insertedElement, eachClass, filterByScope, pkgContext); for (JavaPsiClassReferenceElement element : createClassLookupItems(psiClass, afterNew, JAVA_CLASS_INSERT_HANDLER, condition)) { element.setLookupString(prefix + element.getLookupString()); JavaConstructorCallElement.wrap(element, insertedElement).forEach( e -> consumer.consume(JavaCompletionUtil.highlightIfNeeded(null, e, e.getObject(), insertedElement))); } } } else { String name = psiClass.getName(); if (name != null) { PsiClass[] innerClasses = psiClass.getInnerClasses(); if (innerClasses.length > 0) { if (visited == null) visited = new HashSet<>(); for (PsiClass innerClass : innerClasses) { if (visited.add(innerClass)) { processClass(innerClass, visited, prefix + name + "."); } } } } } } private boolean isProcessedIndependently(PsiClass psiClass) { String innerName = psiClass.getName(); return innerName != null && matcher.prefixMatches(innerName); } }); } @NotNull private static MultiMap<String, PsiClass> getAllAnnotationClasses(PsiElement context, PrefixMatcher matcher) { MultiMap<String, PsiClass> map = new MultiMap<>(); GlobalSearchScope scope = context.getResolveScope(); PsiClass annotation = JavaPsiFacade.getInstance(context.getProject()).findClass(CommonClassNames.JAVA_LANG_ANNOTATION_ANNOTATION, scope); if (annotation != null) { DirectClassInheritorsSearch.search(annotation, scope, false).forEach(psiClass -> { if (!psiClass.isAnnotationType() || psiClass.getQualifiedName() == null) return true; String name = Objects.requireNonNull(psiClass.getName()); if (!matcher.prefixMatches(name)) { name = getClassNameWithContainers(psiClass); if (!matcher.prefixMatches(name)) return true; } map.putValue(name, psiClass); return true; }); } return map; } @NotNull private static String getClassNameWithContainers(@NotNull PsiClass psiClass) { String name = Objects.requireNonNull(psiClass.getName()); for (PsiClass parent : JBIterable.generate(psiClass, PsiClass::getContainingClass)) { name = parent.getName() + "." + name; } return name; } public static JavaPsiClassReferenceElement createClassLookupItem(final PsiClass psiClass, final boolean inJavaContext) { return AllClassesGetter.createLookupItem(psiClass, inJavaContext ? JAVA_CLASS_INSERT_HANDLER : AllClassesGetter.TRY_SHORTENING); } public static List<JavaPsiClassReferenceElement> createClassLookupItems(final PsiClass psiClass, boolean withInners, InsertHandler<JavaPsiClassReferenceElement> insertHandler, Condition<? super PsiClass> condition) { List<JavaPsiClassReferenceElement> result = new SmartList<>(); if (condition.value(psiClass)) { result.add(AllClassesGetter.createLookupItem(psiClass, insertHandler)); } String name = psiClass.getName(); if (withInners && name != null) { for (PsiClass inner : psiClass.getInnerClasses()) { if (inner.hasModifierProperty(PsiModifier.STATIC)) { for (JavaPsiClassReferenceElement lookupInner : createClassLookupItems(inner, true, insertHandler, condition)) { String forced = lookupInner.getForcedPresentableName(); String qualifiedName = name + "." + (forced != null ? forced : inner.getName()); lookupInner.setForcedPresentableName(qualifiedName); lookupInner.setLookupString(qualifiedName); result.add(lookupInner); } } } } return result; } @Override public String handleEmptyLookup(@NotNull final CompletionParameters parameters, final Editor editor) { if (!(parameters.getOriginalFile() instanceof PsiJavaFile)) return null; if (shouldShowSecondSmartCompletionHint(parameters)) { return LangBundle.message("completion.no.suggestions") + "; " + StringUtil.decapitalize( CompletionBundle.message("completion.class.name.hint.2", KeymapUtil.getFirstKeyboardShortcutText(IdeActions.ACTION_CODE_COMPLETION))); } return null; } private static boolean shouldShowSecondSmartCompletionHint(final CompletionParameters parameters) { return parameters.getCompletionType() == CompletionType.BASIC && parameters.getInvocationCount() == 2 && parameters.getOriginalFile().getLanguage().isKindOf(JavaLanguage.INSTANCE); } }
package org.wso2.carbon.apimgt.rest.api.publisher.v1.dto; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import java.util.ArrayList; import java.util.List; import javax.validation.constraints.*; import io.swagger.annotations.*; import java.util.Objects; import javax.xml.bind.annotation.*; import org.wso2.carbon.apimgt.rest.api.util.annotations.Scope; public class APIProductInfoDTO { private String id = null; private String name = null; private String context = null; private String description = null; private String provider = null; private Boolean hasThumbnail = null; @XmlType(name="StateEnum") @XmlEnum(String.class) public enum StateEnum { @XmlEnumValue("CREATED") CREATED(String.valueOf("CREATED")), @XmlEnumValue("PUBLISHED") PUBLISHED(String.valueOf("PUBLISHED")); private String value; StateEnum (String v) { value = v; } public String value() { return value; } @Override public String toString() { return String.valueOf(value); } public static StateEnum fromValue(String v) { for (StateEnum b : StateEnum.values()) { if (String.valueOf(b.value).equals(v)) { return b; } } return null; } } private StateEnum state = null; private List<String> securityScheme = new ArrayList<>(); /** * UUID of the api product **/ public APIProductInfoDTO id(String id) { this.id = id; return this; } @ApiModelProperty(example = "01234567-0123-0123-0123-012345678901", value = "UUID of the api product ") @JsonProperty("id") public String getId() { return id; } public void setId(String id) { this.id = id; } /** * Name of the API Product **/ public APIProductInfoDTO name(String name) { this.name = name; return this; } @ApiModelProperty(example = "CalculatorAPIProduct", value = "Name of the API Product") @JsonProperty("name") public String getName() { return name; } public void setName(String name) { this.name = name; } /** **/ public APIProductInfoDTO context(String context) { this.context = context; return this; } @ApiModelProperty(example = "CalculatorAPI", value = "") @JsonProperty("context") public String getContext() { return context; } public void setContext(String context) { this.context = context; } /** * A brief description about the API **/ public APIProductInfoDTO description(String description) { this.description = description; return this; } @ApiModelProperty(example = "A calculator API Product that supports basic operations", value = "A brief description about the API") @JsonProperty("description") public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } /** * If the provider value is not given, the user invoking the API will be used as the provider. **/ public APIProductInfoDTO provider(String provider) { this.provider = provider; return this; } @ApiModelProperty(example = "admin", value = "If the provider value is not given, the user invoking the API will be used as the provider. ") @JsonProperty("provider") public String getProvider() { return provider; } public void setProvider(String provider) { this.provider = provider; } /** **/ public APIProductInfoDTO hasThumbnail(Boolean hasThumbnail) { this.hasThumbnail = hasThumbnail; return this; } @ApiModelProperty(example = "true", value = "") @JsonProperty("hasThumbnail") public Boolean isHasThumbnail() { return hasThumbnail; } public void setHasThumbnail(Boolean hasThumbnail) { this.hasThumbnail = hasThumbnail; } /** * State of the API product. Only published api products are visible on the store **/ public APIProductInfoDTO state(StateEnum state) { this.state = state; return this; } @ApiModelProperty(value = "State of the API product. Only published api products are visible on the store ") @JsonProperty("state") public StateEnum getState() { return state; } public void setState(StateEnum state) { this.state = state; } /** * Types of API security, the current API secured with. It can be either OAuth2 or mutual SSL or both. If it is not set OAuth2 will be set as the security for the current API. **/ public APIProductInfoDTO securityScheme(List<String> securityScheme) { this.securityScheme = securityScheme; return this; } @ApiModelProperty(value = "Types of API security, the current API secured with. It can be either OAuth2 or mutual SSL or both. If it is not set OAuth2 will be set as the security for the current API. ") @JsonProperty("securityScheme") public List<String> getSecurityScheme() { return securityScheme; } public void setSecurityScheme(List<String> securityScheme) { this.securityScheme = securityScheme; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } APIProductInfoDTO apIProductInfo = (APIProductInfoDTO) o; return Objects.equals(id, apIProductInfo.id) && Objects.equals(name, apIProductInfo.name) && Objects.equals(context, apIProductInfo.context) && Objects.equals(description, apIProductInfo.description) && Objects.equals(provider, apIProductInfo.provider) && Objects.equals(hasThumbnail, apIProductInfo.hasThumbnail) && Objects.equals(state, apIProductInfo.state) && Objects.equals(securityScheme, apIProductInfo.securityScheme); } @Override public int hashCode() { return Objects.hash(id, name, context, description, provider, hasThumbnail, state, securityScheme); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class APIProductInfoDTO {\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" context: ").append(toIndentedString(context)).append("\n"); sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" provider: ").append(toIndentedString(provider)).append("\n"); sb.append(" hasThumbnail: ").append(toIndentedString(hasThumbnail)).append("\n"); sb.append(" state: ").append(toIndentedString(state)).append("\n"); sb.append(" securityScheme: ").append(toIndentedString(securityScheme)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
package com.iotticket.api.v1.integrationTests; import com.iotticket.api.v1.exception.IoTServerCommunicationException; import com.iotticket.api.v1.exception.ValidAPIParamException; import com.iotticket.api.v1.model.*; import com.iotticket.api.v1.model.Datanode.DatanodeRead; import com.iotticket.api.v1.model.Datanode.DatanodeReadValue; import com.iotticket.api.v1.model.Datanode.DatanodeWriteValue; import com.iotticket.api.v1.model.DatanodeQueryCriteria.Order; import com.iotticket.api.v1.model.Device.DeviceDetails; import org.junit.Before; import org.junit.Test; import java.util.*; import static org.junit.Assert.*; public class DataNodeWriteReadIT extends IntegrationTestBase { private final static byte[] testByteValue = new byte[]{1, 2, 3, 4, 5, 6, 7, 9, 10}; public static String firstPath = "Engine/Auxillary"; public static String secondPath = "Engine/Main"; public static String numericDatanodeName = "AirVolume"; private static boolean testBooleanValue = false; private static String boolDatanodeName = "LightOn"; private String byteDatanodeName = "MP3"; @Before public void setup() { boolDatanodeName = "LightOn"; byteDatanodeName = "CANMessage"; numericDatanodeName = "AirFlow"; firstPath = "Engine/Auxillary"; secondPath = "Engine/Main"; } @Test public void writeBooleanValue() throws ValidAPIParamException { DatanodeWriteValue bv = new DatanodeWriteValue(); bv.setName(boolDatanodeName); bv.setValue(testBooleanValue); WriteDataResponse writeResult = apiClient.writeData(deviceId, bv); assertNotNull(writeResult); assertTrue(writeResult.getTotalWritten() > 0); assertNotNull(writeResult.getWriteResults()); assertTrue(writeResult.getWriteResults().iterator().next().getWrittenCount() > 0); assertNotNull(writeResult.getWriteResults().iterator().next().getHref()); readBooleanValue(); } public void readBooleanValue() { DatanodeQueryCriteria criteria = new DatanodeQueryCriteria(deviceId, boolDatanodeName); criteria.setDeviceId(deviceId); ProcessValues processData = apiClient.readProcessData(criteria); assertNotNull(processData.getUri()); assertTrue(processData.getDatanodeReads().size() > 0); assertTrue(processData.getDatanodeReads().iterator().next().getDatanodeReadValues().size() > 0); assertEquals(DataType.BooleanType, processData.getDatanodeReads().iterator().next().getDataType()); assertEquals(testBooleanValue, processData.getDatanodeReads().iterator().next().getDatanodeReadValues().iterator().next().getConvertedValue()); assertTrue(processData.getDatanodeReads().iterator().next().getDatanodeReadValues().iterator().next().getTimestampMilliSeconds() > 0L); } @Test public void writeBinaryValue() throws ValidAPIParamException { DatanodeWriteValue writeValue = new DatanodeWriteValue(); writeValue.setName(byteDatanodeName); writeValue.setValue(testByteValue); WriteDataResponse writeResult = apiClient.writeData(deviceId, writeValue); assertNotNull(writeResult); assertTrue(writeResult.getTotalWritten() > 0); assertNotNull(writeResult.getWriteResults()); assertTrue(writeResult.getWriteResults().iterator().next().getWrittenCount() > 0); assertNotNull(writeResult.getWriteResults().iterator().next().getHref()); readBinaryValue(); } private void readBinaryValue() { DatanodeQueryCriteria criteria = new DatanodeQueryCriteria(deviceId, byteDatanodeName); criteria.setDeviceId(deviceId); ProcessValues processData = apiClient.readProcessData(criteria); assertNotNull(processData.getUri()); assertTrue(processData.getDatanodeReads().size() > 0); assertTrue(processData.getDatanodeReads().iterator().next().getDatanodeReadValues().size() > 0); assertEquals(DataType.BinaryType, processData.getDatanodeReads().iterator().next().getDataType()); assertArrayEquals(testByteValue, (byte[]) processData.getDatanodeReads().iterator().next() .getDatanodeReadValues().iterator().next().getConvertedValue()); } @Test public void writeNumericValue() throws IoTServerCommunicationException, ValidAPIParamException { DeviceDetails device = apiClient.getDevice(deviceId); Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); cal.add(Calendar.SECOND, -1); Random r = new Random(); Collection<DatanodeWriteValue> valuesToWrite = new ArrayList<DatanodeWriteValue>(); valuesToWrite.size(); for (int i = 0; i < 30; i++) { cal.add(Calendar.MILLISECOND, 1); DatanodeWriteValue dnwrite = new DatanodeWriteValue(); dnwrite.setName(numericDatanodeName); dnwrite.setPath(firstPath); dnwrite.setUnit("l/s"); dnwrite.setValue(r.nextInt(10)); dnwrite.setTimestampMilliseconds(cal.getTimeInMillis()); valuesToWrite.add(dnwrite); dnwrite = new DatanodeWriteValue(); dnwrite.setName(numericDatanodeName); dnwrite.setPath(secondPath); dnwrite.setUnit("l/s"); dnwrite.setValue(10 * r.nextDouble()); dnwrite.setTimestampMilliseconds(cal.getTimeInMillis()); valuesToWrite.add(dnwrite); } WriteDataResponse writeResult = apiClient.writeData(device.getDeviceId(), valuesToWrite); assertNotNull(writeResult); assertEquals(60, writeResult.getTotalWritten()); assertNotNull(writeResult.getWriteResults()); assertTrue(writeResult.getWriteResults().iterator().next().getWrittenCount() > 0); assertNotNull(writeResult.getWriteResults().iterator().next().getHref()); readNumericValue(); } private void readNumericValue() { readFirstNumericValue(); readSecondNumericValues(); /* Two datanodes are expected, since there are two datanodes with the name <numericDatanodeName> * but with different paths */ DatanodeQueryCriteria crit = new DatanodeQueryCriteria(deviceId, numericDatanodeName); ProcessValues processValues = apiClient.readProcessData(crit); Collection<DatanodeRead> datanodeReads = processValues.getDatanodeReads(); assertTrue(datanodeReads.size() == 2); } private void readFirstNumericValue() { Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); cal.add(Calendar.SECOND, -5); String fullPath = DataPathUtil.getFullPath(numericDatanodeName, firstPath); DatanodeQueryCriteria crit = new DatanodeQueryCriteria(deviceId, fullPath); crit.setFromDate(cal.getTimeInMillis()); crit.setLimit(100); crit.setSortOrder(Order.Ascending); // TODO: Without the following code the test fails, as the stored nodes are not yet available. try { Thread.sleep(4000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } ProcessValues processValues = apiClient.readProcessData(crit); Collection<DatanodeRead> datanodeReads = processValues.getDatanodeReads(); for (DatanodeRead datanodeRead : datanodeReads) { Collection<DatanodeReadValue> values = datanodeRead.getDatanodeReadValues(); assertEquals(DataType.LongType, datanodeRead.getDataType()); assertTrue(values.size() >= 30); for (DatanodeReadValue value : values) { long ts = value.getTimestampMilliSeconds(); long val = (Long) value.getConvertedValue(); assertTrue(val < 10); assertTrue(ts > 0); } } } private void readSecondNumericValues() { String fullPath = DataPathUtil.getFullPath(numericDatanodeName, secondPath); DatanodeQueryCriteria crit = new DatanodeQueryCriteria(deviceId, fullPath); crit.setSortOrder(Order.Descending); ProcessValues processValues = apiClient.readProcessData(crit); Collection<DatanodeRead> datanodeReads = processValues.getDatanodeReads(); for (DatanodeRead datanodeRead : datanodeReads) { Collection<DatanodeReadValue> values = datanodeRead.getDatanodeReadValues(); assertEquals(DataType.DoubleType, datanodeRead.getDataType()); assertTrue(values.size() == 1); for (DatanodeReadValue value : values) { long ts = value.getTimestampMilliSeconds(); double val = (Double) value.getConvertedValue(); assertTrue(ts > 0); assertTrue(val < 10); } } } }
//======================================================================== // //File: $RCSfile: XMIExportNature.java,v $ //Version: $Revision: 1.13 $ //Modified: $Date: 2013/01/10 22:44:48 $ // //(c) Copyright 2006-2014 by Mentor Graphics Corp. All rights reserved. // //======================================================================== //This document contains information proprietary and confidential to //Mentor Graphics Corp. and is not for external distribution. //======================================================================== package org.xtuml.bp.mc.xmiexport; import java.io.File; import java.util.HashMap; import java.util.Map; import org.eclipse.core.resources.ICommand; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IProjectDescription; import org.eclipse.core.resources.IProjectNature; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IStatus; import org.eclipse.debug.core.DebugPlugin; import org.eclipse.debug.core.ILaunchConfiguration; import org.eclipse.debug.core.ILaunchConfigurationType; import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy; import org.eclipse.swt.widgets.Shell; import org.xtuml.bp.core.util.UIUtil; /** * implementation of a nature that customizes a project by adding the nature to * the project description. */ public class XMIExportNature implements IProjectNature { /** identifier of nature in plugin.xml - (concatenate pluginid.natureid) */ public static final String XMIEXPORT_NATURE_ID = "org.xtuml.bp.mc.xmiexport.XMIExportNature"; //$NON-NLS-1$ public static final String XMIEXPORT_NATURE_ID_OLD = "com.mentor.nucleus.bp.mc.xmiexport.XMIExportNature"; //$NON-NLS-1$ public static final String EXTERNALTOOLBUILDER_FOLDER = ".externalToolBuilders"; //$NON-NLS-1$ public static final String EXTERNAL_TOOL_PLUGIN_ID = "org.eclipse.ui.externaltools"; //$NON-NLS-1$ public static final String ATTR_DISABLED_BUILDER = EXTERNAL_TOOL_PLUGIN_ID + ".ATTR_DISABLED_BUILDER"; //$NON-NLS-1$ public static final String ATTR_BUILDER_ENABLED = EXTERNAL_TOOL_PLUGIN_ID + ".ATTR_BUILDER_ENABLED"; //$NON-NLS-1$ public static final String DISABLED_BUILDER_TYPE = "org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType"; //$NON-NLS-1$ public static final String DISABLED_TOOL_BUILDER_NAME = "org.eclipse.ui.externaltools.ExternalToolBuilder"; //$NON-NLS-1$ /** To hold associated project reference */ private IProject project; public void run(org.eclipse.jface.action.IAction action) { // just a stub to meet class requirements } static public boolean hasNature(IProject project) { boolean ret_val = false; try { ret_val = project.isOpen() && (project.hasNature(XMIEXPORT_NATURE_ID) || project.hasNature(XMIEXPORT_NATURE_ID_OLD)); } catch (Exception e) { XMIExport.logError("Error checking XMI Export nature for project " + project.getName(), e); } return ret_val; } /** * Add the nature to the project if it does not yet have the nature. The * process requires that you get the project description, get the current * nature set, and then add the new nature to the set. */ static public boolean addNature(IProject project) { boolean hasNature = XMIExportNature.hasNature(project); if (!hasNature) { try { IProjectDescription description = project.getDescription(); String[] natures = description.getNatureIds(); String[] newNatures = new String[natures.length + 1]; System.arraycopy(natures, 0, newNatures, 0, natures.length); newNatures[natures.length] = XMIEXPORT_NATURE_ID; IStatus status = ResourcesPlugin.getWorkspace().validateNatureSet(natures); // check the status and decide what to do if (status.getCode() == IStatus.OK) { description.setNatureIds(newNatures); project.setDescription(description, null); hasNature = true; } else { throw new CoreException( status ); } } catch (CoreException e) { XMIExport.logError("Error adding the XMI Export nature to the " + project.getName() + "project.", e); } } return hasNature; } /** * Customizes the project by adding a nature and builder. */ public void configure() throws CoreException { // Add the Builder to the project addBuilderToBuildSpec(project); } /** * Adds the builder to the project description for the selected project if * it does not already exist. */ public void addBuilderToBuildSpec(IProject project) throws CoreException { // Get project description and then the associated build commands IProjectDescription desc = project.getDescription(); ICommand[] commands = desc.getBuildSpec(); // Determine if builder already associated boolean xmiBuilderFound = false; for (int i = 0; i < commands.length; ++i) { if (commands[i].getBuilderName().equals(XMIExportBuilder.BUILDER_ID)) { xmiBuilderFound = true; break; } } // Add XMI Export builder if not already in project if (!xmiBuilderFound) { ICommand custCommand = desc.newCommand(); // create the disabled external tool builder // file ILaunchConfiguration config = createExternalToolBuilderFile(); // create a command from the launch configuration custCommand = createCommandFromConfiguration(config, custCommand); ICommand[] newCommands = new ICommand[commands.length + 1]; newCommands[0] = custCommand; // Add it before other builders. System.arraycopy(commands, 0, newCommands, 1, commands.length); desc.setBuildSpec(newCommands); project.setDescription(desc, null); } else { resultError("Add XMI Export Nature Request", "Error adding the Builder to the project."); } } /** * This method creates a command based on the given launch configuration. * @param config */ private ICommand createCommandFromConfiguration(ILaunchConfiguration config, ICommand command) { Map args = new HashMap(); args.put("LaunchConfigHandle", "<project>/" + EXTERNALTOOLBUILDER_FOLDER + "/" + XMIExportBuilder.BUILDER_ID + ".launch"); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$ command.setBuilderName(DISABLED_TOOL_BUILDER_NAME); command.setArguments(args); return command; } /** * This method will create an external tool * builder file which is expected by eclipse * when a non external tool builder is disabled. * @return * */ private ILaunchConfiguration createExternalToolBuilderFile() { ILaunchConfigurationWorkingCopy workingCopy = null; String builderName = XMIExportBuilder.BUILDER_ID; // when eclipse converts a non-external tool builder to a disabled // external tool builder the id below is what they use for the // configuration type ILaunchConfigurationType type = DebugPlugin.getDefault().getLaunchManager().getLaunchConfigurationType(DISABLED_BUILDER_TYPE); try { if (type == null) { RuntimeException re = new RuntimeException("Failed to to get valid disabled launch configuration type."); throw re; } workingCopy = type.newInstance(getExternalToolsFolder(), XMIExportBuilder.BUILDER_ID); // set the required attributes of this tool builder workingCopy.setAttribute(ATTR_DISABLED_BUILDER, builderName); workingCopy.setAttribute(ATTR_BUILDER_ENABLED, false); // now save the working copy return workingCopy.doSave(); } catch (CoreException e) { XMIExport.logError("Unable to create external tool builder for: " + project.getName(), e); } return null; } /** * This method returns the IFolder instance * for the external tools builder location * * @return IContainer - The folder in which the external tool builder file is kept. */ private IContainer getExternalToolsFolder() { return project.getFolder(EXTERNALTOOLBUILDER_FOLDER); } public void deconfigure() throws CoreException { } /** * MessageDialog to show errors in action processing. */ private void resultError(String title, String msg) { Shell shell = XMIExport.getDefault().getWorkbench() .getActiveWorkbenchWindow().getShell(); UIUtil.openError(shell, title, msg); } /** * Returns local reference to associated project */ public IProject getProject() { return project; } /** * Saves local reference to associated project. */ public void setProject(IProject value) { project = value; } }
/* Copyright (c) 2014,2015,2016 Ahome' Innovation Technologies. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.ait.lienzo.client.core.shape; import java.util.LinkedHashSet; import java.util.List; import com.ait.lienzo.client.core.Attribute; import com.ait.lienzo.client.core.Context2D; import com.ait.lienzo.client.core.RecordingContext2D; import com.ait.lienzo.client.core.animation.LayerRedrawManager; import com.ait.lienzo.client.core.config.LienzoCore; import com.ait.lienzo.client.core.shape.json.validators.ValidationContext; import com.ait.lienzo.client.core.shape.json.validators.ValidationException; import com.ait.lienzo.client.core.shape.storage.IStorageEngine; import com.ait.lienzo.client.core.shape.storage.PrimitiveFastArrayStorageEngine; import com.ait.lienzo.client.core.types.BoundingBox; import com.ait.lienzo.client.core.types.ColorKeyRotor; import com.ait.lienzo.client.core.types.ImageDataPixelColor; import com.ait.lienzo.client.core.types.OnLayerAfterDraw; import com.ait.lienzo.client.core.types.OnLayerBeforeDraw; import com.ait.lienzo.client.core.types.Transform; import com.ait.lienzo.shared.core.types.DataURLType; import com.ait.lienzo.shared.core.types.LayerClearMode; import com.ait.lienzo.shared.core.types.NodeType; import com.ait.tooling.common.api.java.util.function.Predicate; import com.ait.tooling.nativetools.client.collection.NFastArrayList; import com.ait.tooling.nativetools.client.collection.NFastStringMap; import com.google.gwt.dom.client.CanvasElement; import com.google.gwt.dom.client.DivElement; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Style.Display; import com.google.gwt.dom.client.Style.Position; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.dom.client.Style.Visibility; import com.google.gwt.json.client.JSONArray; import com.google.gwt.json.client.JSONObject; import com.google.gwt.json.client.JSONString; /** * Layer is an abstraction for the Canvas element. * <ul> * <li>Layers are assigned z-indexes automatically.</li> * <li>Every Layer contains a {@link SelectionLayer} to act as an off-set canvas.</li> * <li>Layers may contain {@link IPrimitive} or {@link Group}.</li> * </ul> */ public class Layer extends ContainerNode<IPrimitive<?>, Layer> { private int m_wide = 0; private int m_high = 0; private boolean m_shower = false; private boolean m_record = false; private SelectionLayer m_select = null; private OnLayerBeforeDraw m_olbd = null; private OnLayerAfterDraw m_olad = null; private CanvasElement m_element = null; private Context2D m_context = null; private RecordingContext2D m_recctx = null; private DivElement m_wrapper = null; private long m_batched = 0L; private final ColorKeyRotor m_c_rotor = new ColorKeyRotor(); private final NFastStringMap<Shape<?>> m_shape_color_map = new NFastStringMap<Shape<?>>(); /** * Constructor. Creates an instance of a Layer. */ public Layer() { super(NodeType.LAYER, new PrimitiveFastArrayStorageEngine()); } public Layer(final IStorageEngine<IPrimitive<?>> storage) { super(NodeType.LAYER, storage); } /** * Constructor. Creates an instance of a Layer. * * @param node */ protected Layer(final JSONObject node, final ValidationContext ctx) throws ValidationException { super(NodeType.LAYER, node, ctx); } public final DivElement getElement() { if (null == m_wrapper) { m_wrapper = Document.get().createDivElement(); m_wrapper.getStyle().setPosition(Position.ABSOLUTE); m_wrapper.getStyle().setDisplay(Display.INLINE_BLOCK); final CanvasElement element = getCanvasElement(); if (null != element) { if (false == isSelection()) { m_wrapper.appendChild(element); } } } return m_wrapper; } public final boolean isBatchScheduled() { return (m_batched > 0L); } public final Layer doBatchScheduled() { m_batched++; return this; } public final Layer unBatchScheduled() { m_batched = 0L; return this; } @Override public final IStorageEngine<IPrimitive<?>> getDefaultStorageEngine() { return new PrimitiveFastArrayStorageEngine(); } /** * Returns the Selection Layer. * * @return {@link SelectionLayer} */ public final SelectionLayer getSelectionLayer() { if (isListening()) { if (null == m_select) { m_select = new SelectionLayer(); m_select.setPixelSize(getWidth(), getHeight()); } return m_select; } return null; } public final RecordingContext2D getRecordingContext() { return m_recctx; } /** * Looks at the {@link SelectionLayer} and attempts to find a {@link Shape} whose alpha * channel is 255. * * @param x * @param y * @return {@link Shape} */ public Shape<?> findShapeAtPoint(final int x, final int y) { if (isVisible()) { final SelectionLayer selection = getSelectionLayer(); if (null != selection) { final ImageDataPixelColor rgba = selection.getContext().getImageDataPixelColor(x, y);// x,y is adjusted to canvas coordinates in event dispatch if (null != rgba) { if (rgba.getA() != 255) { return null; } final Shape<?> shape = m_shape_color_map.get(rgba.toBrowserRGB()); if ((null != shape) && (shape.isVisible())) { return shape; } } } } return null; } @Override public List<Attribute> getTransformingAttributes() { return LienzoCore.STANDARD_TRANSFORMING_ATTRIBUTES; } /** * Adds a primitive to the collection. Override to ensure primitive is put in Layers Color Map * <p> * It should be noted that this operation will not have an apparent effect for an already rendered (drawn) Container. * In other words, if the Container has already been drawn and a new primitive is added, you'll need to invoke draw() on the * Container. This is done to enhance performance, otherwise, for every add we would have draws impacting performance. */ @Override public Layer add(final IPrimitive<?> child) { super.add(child); child.attachToLayerColorMap(); return this; } @Override public Layer add(final IPrimitive<?> child, final IPrimitive<?>... children) { add(child); for (IPrimitive<?> node : children) { add(node); } return this; } /** * Removes a primitive from the container. Override to ensure primitive is removed from Layers Color Map * <p> * It should be noted that this operation will not have an apparent effect for an already rendered (drawn) Container. * In other words, if the Container has already been drawn and a new primitive is added, you'll need to invoke draw() on the * Container. This is done to enhance performance, otherwise, for every add we would have draws impacting performance. */ @Override public Layer remove(final IPrimitive<?> child) { child.detachFromLayerColorMap(); super.remove(child); return this; } @Override public boolean removeFromParent() { final Node<?> parent = getParent(); if (null != parent) { final Scene scene = parent.asScene(); if (null != scene) { scene.remove(this); return true; } } return false; } /** * Removes all primitives from the collection. Override to ensure all primitives are removed from Layers Color Map * <p> * It should be noted that this operation will not have an apparent effect for an already rendered (drawn) Container. * In other words, if the Container has already been drawn and a new primitive is added, you'll need to invoke draw() on the * Container. This is done to enhance performance, otherwise, for every add we would have draws impacting performance. */ @Override public Layer removeAll() { final NFastArrayList<IPrimitive<?>> list = getChildNodes(); if (null != list) { final int size = list.size(); for (int i = 0; i < size; i++) { list.get(i).detachFromLayerColorMap(); } } super.removeAll(); return this; } /** * Internal method. Attach a Shape to the Layers Color Map */ final void attachShapeToColorMap(final Shape<?> shape) { if (null != shape) { String color = shape.getColorKey(); if (null != color) { m_shape_color_map.remove(color); shape.setColorKey(null); } int count = 0; do { count++; color = m_c_rotor.next(); } while ((m_shape_color_map.get(color) != null) && (count <= ColorKeyRotor.COLOR_SPACE_MAXIMUM)); if (count > ColorKeyRotor.COLOR_SPACE_MAXIMUM) { throw new IllegalArgumentException("Exhausted color space " + count); } m_shape_color_map.put(color, shape); shape.setColorKey(color); } } /** * Internal method. Detach a {@link Shape} from the Layers Color Map * * @param shape */ final void detachShapeFromColorMap(final Shape<?> shape) { if (null != shape) { final String color = shape.getColorKey(); if (null != color) { final Shape<?> look = m_shape_color_map.get(color); if (shape == look) { shape.setColorKey(null); m_shape_color_map.remove(color); } } } } /** * Serializes this Layer as a {@link com.google.gwt.json.client.JSONObject} * * @return JSONObject */ @Override public JSONObject toJSONObject() { final JSONObject object = new JSONObject(); object.put("type", new JSONString(getNodeType().getValue())); if (false == getMetaData().isEmpty()) { object.put("meta", new JSONObject(getMetaData().getJSO())); } object.put("attributes", new JSONObject(getAttributes().getJSO())); final NFastArrayList<IPrimitive<?>> list = getChildNodes(); final JSONArray children = new JSONArray(); if (null != list) { final int size = list.size(); for (int i = 0; i < size; i++) { final IPrimitive<?> prim = list.get(i); if (null != prim) { JSONObject make = prim.toJSONObject(); if (null != make) { children.set(children.size(), make); } } } } object.put("children", children); object.put("storage", getStorageEngine().toJSONObject()); return object; } /** * Sets this layer's pixel size. * * @param wide * @param high */ void setPixelSize(final int wide, final int high) { m_wide = wide; m_high = high; if (LienzoCore.IS_CANVAS_SUPPORTED) { if (false == isSelection()) { getElement().getStyle().setWidth(wide, Unit.PX); getElement().getStyle().setHeight(high, Unit.PX); } final CanvasElement element = getCanvasElement(); element.setWidth(wide); element.setHeight(high); if ((false == isSelection()) && (null != m_select)) { m_select.setPixelSize(wide, high); } } } /** * Enables event handling on this object. * * @param listening * @param Layer */ @Override public Layer setListening(final boolean listening) { super.setListening(listening); if (listening) { if (isShowSelectionLayer()) { if (null != getSelectionLayer()) { doShowSelectionLayer(true); } } } else { if (isShowSelectionLayer()) { doShowSelectionLayer(false); } m_select = null; } return this; } public boolean isShowSelectionLayer() { return m_shower; } public Layer setShowSelectionLayer(final boolean shower) { m_shower = shower; return doShowSelectionLayer(shower); } private final Layer doShowSelectionLayer(final boolean shower) { if (false == isSelection()) { if (null != m_select) { while (getElement().getChildCount() > 0) { getElement().removeChild(getElement().getChild(0)); } CanvasElement element = getCanvasElement(); if (null != element) { getElement().appendChild(element); } if (shower) { element = m_select.getCanvasElement(); if (null != element) { getElement().appendChild(element); } } } } return this; } /** * Gets this layer's width. * * @return int */ public int getWidth() { return m_wide; } /** * Sets this layer's width * * @param wide */ void setWidth(final int wide) { m_wide = wide; } /** * Gets this layer's height * * @return int */ public int getHeight() { return m_high; } /** * Sets this layer's height * * @param high * @return Layer */ void setHeight(final int high) { m_high = high; } /** * Returns whether the Layer is zoomable. * If not, changes to the (parent) Viewport's transform (probably due to zoom or pan operations) won't affect this layer. * The default value is true. * * @return boolean */ public boolean isTransformable() { return getAttributes().isTransformable(); } /** * Sets whether the Layer is zoomable. * If not, changes to the (parent) Viewport's transform (probably due to zoom or pan operations) won't affect this layer. * The default value is true. * * @param zoomable boolean * @return */ public Layer setTransformable(final boolean transformable) { getAttributes().setTransformable(transformable); return this; } /** * Returns whether this layer is cleared before being drawn. * * @return boolean */ public boolean isClearLayerBeforeDraw() { return getAttributes().isClearLayerBeforeDraw(); } /** * Sets whether this layer should be cleared before being drawn. * * @param clear * @return Layer */ public Layer setClearLayerBeforeDraw(final boolean clear) { getAttributes().setClearLayerBeforeDraw(clear); return this; } /** * Return the {@link CanvasElement}. * * @return CanvasElement */ public CanvasElement getCanvasElement() { if (LienzoCore.IS_CANVAS_SUPPORTED) { if (null == m_element) { m_element = Document.get().createCanvasElement(); m_element.getStyle().setPosition(Position.ABSOLUTE); m_element.getStyle().setDisplay(Display.INLINE_BLOCK); } if (null == m_context) { m_context = new Context2D(m_element); m_recctx = new RecordingContext2D(m_context); } } return m_element; } /** * Handler that can be used to hook into the pre-drawing process. * If the handler returns false, no drawing will take place. * * @param onLayerBeforeDrawHandler * @return Layer */ public Layer setOnLayerBeforeDraw(final OnLayerBeforeDraw onLayerBeforeDrawHandler) { m_olbd = onLayerBeforeDrawHandler; return this; } /** * Handler that can be used to hook into the post-drawing process. * The handler will be invoked after the drawing process finishes. * * @param onLayerAfterDrawHandler * @return Layer */ public Layer setOnLayerAfterDraw(final OnLayerAfterDraw onLayerAfterDrawHandler) { m_olad = onLayerAfterDrawHandler; return this; } public boolean isRecording() { return m_record; } public Layer setRecording(final boolean record) { m_record = record; return this; } /** * Draws the layer and invokes pre/post draw handlers. * Drawing only takes place if the layer is visible. */ @Override public Layer draw() { return draw(isRecording() ? getRecordingContext() : getContext()); } protected Layer draw(Context2D context) { if (LienzoCore.IS_CANVAS_SUPPORTED) { if (isClearLayerBeforeDraw()) { clear(); } if (isVisible()) { boolean draw = true; if (null != m_olbd) { draw = m_olbd.onLayerBeforeDraw(this); } if (draw) { Transform transform = null; final Viewport viewport = getViewport(); if ((isTransformable()) && (null != viewport)) { transform = viewport.getTransform(); } context.save(); if (null != transform) { context.transform(transform); } final BoundingBox bbox = getStorageBounds(); IPathClipper vclp = null; if (null != viewport) { vclp = viewport.getPathClipper(); if ((null != vclp) && (vclp.isActive())) { vclp.clip(context); } } final IPathClipper lclp = getPathClipper(); if ((null != lclp) && (lclp.isActive())) { lclp.clip(context); } drawWithTransforms(context, 1, bbox); context.restore(); if (null != m_olad) { m_olad.onLayerAfterDraw(this); } final SelectionLayer selection = getSelectionLayer(); if (null != selection) { selection.clear(); context = selection.getContext(); context.save(); if (null != transform) { context.transform(transform); } if ((null != vclp) && (vclp.isActive())) { vclp.clip(context); } if ((null != lclp) && (lclp.isActive())) { lclp.clip(context); } drawWithTransforms(context, 1, bbox); context.restore(); } } } } return this; } /** * Performs batch updates to the Layer, that is, drawing is deferred till the next AnimationFrame, * to cut down on redraws on rapid event dispatch. * * @return Layer */ @Override public Layer batch() { return LayerRedrawManager.get().schedule(this); } /** * Sets whether this object is visible. * * @param visible * @return Layer */ @Override public Layer setVisible(final boolean visible) { super.setVisible(visible); getElement().getStyle().setVisibility(visible ? Visibility.VISIBLE : Visibility.HIDDEN); return this; } /** * Returns this layer * * @return Layer */ @Override public Layer getLayer() { return this; } @Override public Layer asLayer() { return this; } public boolean isSelection() { return false; } /** * Clears the layer. */ public void clear() { if (LienzoCore.get().getLayerClearMode() == LayerClearMode.CLEAR) { final Context2D context = getContext(); if (null != context) { context.clearRect(0, 0, getWidth(), getHeight()); } } else { setPixelSize(getWidth(), getHeight()); } } /** * Returns the {@link Context2D} this layer is operating on. * * @return Context2D */ public Context2D getContext() { return m_context; } /** * Moves this layer one level up. * * @return Layer */ @SuppressWarnings("unchecked") @Override public Layer moveUp() { final Node<?> parent = getParent(); if (null != parent) { final IContainer<?, Layer> container = (IContainer<?, Layer>) parent.asContainer(); if (null != container) { container.moveUp(this); } } return this; } /** * Moves this layer one level down. * * @return Layer */ @SuppressWarnings("unchecked") @Override public Layer moveDown() { final Node<?> parent = getParent(); if (null != parent) { final IContainer<?, Layer> container = (IContainer<?, Layer>) parent.asContainer(); if (null != container) { container.moveDown(this); } } return this; } /** * Moves this layer to the top of the layer stack. * * @return Layer */ @SuppressWarnings("unchecked") @Override public Layer moveToTop() { final Node<?> parent = getParent(); if (null != parent) { final IContainer<?, Layer> container = (IContainer<?, Layer>) parent.asContainer(); if (null != container) { container.moveToTop(this); } } return this; } /** * Moves this layer to the bottom of the layer stack. * * @return Layer */ @SuppressWarnings("unchecked") @Override public Layer moveToBottom() { final Node<?> parent = getParent(); if (null != parent) { final IContainer<?, Layer> container = (IContainer<?, Layer>) parent.asContainer(); if (null != container) { container.moveToBottom(this); } } return this; } /** * Returns all the {@link Node} objects present in this layer that match the * given {@link com.ait.lienzo.client.core.types.INodeFilter}, this Layer * included. * * @param filter * @return ArrayList<Node> */ @Override protected void find(final Predicate<Node<?>> predicate, final LinkedHashSet<Node<?>> buff) { if (predicate.test(this)) { buff.add(this); } final NFastArrayList<IPrimitive<?>> list = getChildNodes(); if (null != list) { final int size = list.size(); for (int i = 0; i < size; i++) { final IPrimitive<?> prim = list.get(i); if (null != prim) { final Node<?> node = prim.asNode(); if (null != node) { if (predicate.test(node)) { buff.add(node); } final ContainerNode<?, ?> cont = node.asContainerNode(); if (null != cont) { cont.find(predicate, buff); } } } } } } /** * Returns the content of this Layer as a PNG image that can be used as a source for another canvas or an HTML element. * * @return String */ public final String toDataURL() { if (null != m_element) { return toDataURL(m_element); } else { return "data:,"; } } /** * Returns the content of this {@link Layer} as an image that can be used as a source for another canvas or an HTML element * * @return String */ public final String toDataURL(DataURLType mimetype) { if (null != m_element) { if (null == mimetype) { mimetype = DataURLType.PNG; } return toDataURL(m_element, mimetype.getValue()); } else { return "data:,"; } } private static native final String toDataURL(CanvasElement element) /*-{ return element.toDataURL(); }-*/; private static native final String toDataURL(CanvasElement element, String mimetype) /*-{ return element.toDataURL(mimetype); }-*/; public static class SelectionLayer extends Layer { private SelectionContext2D m_context; public SelectionLayer() { super(); setListening(false); } /** * Empty implementation of draw. Not needed in this case. */ @Override public Layer draw() { return this; } @Override public CanvasElement getCanvasElement() { final CanvasElement element = super.getCanvasElement(); if (null != element) { if (null == m_context) { m_context = new SelectionContext2D(element); } } return element; } @Override public boolean isSelection() { return true; } @Override public Context2D getContext() { return m_context; } private static class SelectionContext2D extends Context2D { public SelectionContext2D(final CanvasElement element) { super(element); super.setGlobalAlpha(1); } @Override public boolean isSelection() { return true; } @Override public void setGlobalAlpha(final double alpha) { } } } public static class LayerFactory extends ContainerNodeFactory<Layer> { public LayerFactory() { super(NodeType.LAYER); addAttribute(Attribute.CLEAR_LAYER_BEFORE_DRAW); addAttribute(Attribute.TRANSFORMABLE); } @Override public Layer container(final JSONObject node, final ValidationContext ctx) throws ValidationException { return new Layer(node, ctx); } @Override public boolean addNodeForContainer(final IContainer<?, ?> container, final Node<?> node, final ValidationContext ctx) { final IPrimitive<?> prim = node.asPrimitive(); if (null != prim) { container.asLayer().add(prim); return true; } else { try { ctx.addBadTypeError(node.getClass().getName() + " is not a Primitive"); } catch (ValidationException e) { return false; } } return false; } } }
package i5.las2peer.security; import i5.las2peer.communication.Message; import i5.las2peer.communication.MessageException; import i5.las2peer.logging.NodeObserver.Event; import i5.las2peer.p2p.Node; import i5.las2peer.persistency.MalformedXMLException; import i5.las2peer.persistency.XmlAble; import i5.las2peer.tools.CryptoException; import i5.las2peer.tools.CryptoTools; import i5.las2peer.tools.SerializationException; import i5.las2peer.tools.SerializeTools; import i5.simpleXML.Element; import i5.simpleXML.Parser; import i5.simpleXML.XMLSyntaxException; import java.security.KeyPair; import java.security.PrivateKey; import java.security.PublicKey; import javax.crypto.SecretKey; import org.apache.commons.codec.binary.Base64; /** * An Agent is the basic acting entity in the LAS2peer network. * At the moment, an agent can represent a simple user, a group, a service or a monitoring agent. * * @author Holger Jan&szlig;en * */ public abstract class Agent implements XmlAble, Cloneable, MessageReceiver { private long id; /** * encrypted private key */ private byte[] baEncrypedPrivate; /** * public key for asymmetric encryption */ private PublicKey publicKey; /** * private key for asymmetric encryption */ private PrivateKey privateKey = null; private Node runningAt = null; /** * Creates a new agent. * * @param id * @param pair * @param key * @throws L2pSecurityException */ protected Agent ( long id, KeyPair pair, SecretKey key ) throws L2pSecurityException { publicKey = pair.getPublic(); privateKey = pair.getPrivate(); this.id = id; encryptPrivateKey(key); lockPrivateKey(); } /** * Creates a new agent. * * @param id * @param publicKey * @param encodedPrivate */ protected Agent ( long id, PublicKey publicKey, byte[] encodedPrivate ) { this.id = id; this.publicKey = publicKey; this.baEncrypedPrivate = encodedPrivate.clone(); this.privateKey = null; } /** * (Re-)Lock the private key. */ public void lockPrivateKey () { privateKey = null; } /** * Unlocks the private key. * * @param key * * @throws L2pSecurityException */ public void unlockPrivateKey ( SecretKey key ) throws L2pSecurityException { try { privateKey = (PrivateKey) SerializeTools.deserialize(CryptoTools.decryptSymmetric(baEncrypedPrivate, key)); } catch (SerializationException e) { throw new L2pSecurityException("unable do deserialize key", e ); } catch (CryptoException e) { throw new L2pSecurityException("unable do decrypt key", e ); } } /** * Encrypts the private key into a byte array with strong encryption based on a passphrase. * to unlock the key * * @param key * @throws L2pSecurityException */ public void encryptPrivateKey(SecretKey key) throws L2pSecurityException { if ( isLocked() ) throw new L2pSecurityException("You have to unlock the key first!" ); try { baEncrypedPrivate = CryptoTools.encryptSymmetric(privateKey, key); } catch (CryptoException e) { throw new L2pSecurityException ( "Unable to encrypt private key", e); } catch ( SerializationException e) { throw new L2pSecurityException ( "unable to serialize private key", e); } } /** * * @return true, if the private key of this agent is still locked */ public boolean isLocked () { return privateKey == null; } /** * Returns the id of this agent. * * @return id of the agent */ public long getId () { return id; } /** * Returns the id of this agent. * <i>This method is only implemented, since an Agent is * also a {@link MessageReceiver}, thus has to implement this method. * It was written for the {@link Mediator} class.</i> * * @return id of the agent */ @Override public long getResponsibleForAgentId() { return getId(); } /** * * @return the cryptographic public key of this agent */ public PublicKey getPublicKey () { return publicKey; } /** * * @return the cryptographic private key of this agent * @throws L2pSecurityException the private key has not been unlocked yet */ public PrivateKey getPrivateKey () throws L2pSecurityException { if ( privateKey == null ) throw new L2pSecurityException("You have to unlock the key using a passphrase first!"); return privateKey; } /** * Gets the private key encrypted and encoded in base64. * * mainly for <code>toXmlString()</code> methods of subclasses * * @return encoded version or the private key */ protected String getEncodedPrivate () { return Base64.encodeBase64String( baEncrypedPrivate ); } /** * Hook to be called by the node where this agent is registered to, when the * node receives a message destined to this agent. * * @param message * @throws MessageException */ public abstract void receiveMessage( Message message, Context c ) throws MessageException; /** * Gets a locked copy of this agent. * * @return a locked clone of this agent * @throws CloneNotSupportedException */ public final Agent cloneLocked () throws CloneNotSupportedException { Agent result = (Agent) clone(); result.lockPrivateKey(); return result; } /** * * Notifies this agent of unregistering from a node. * */ public void notifyUnregister() { if ( this instanceof ServiceAgent ) runningAt.observerNotice(Event.SERVICE_SHUTDOWN, runningAt.getNodeId(), this, "" + ((ServiceAgent)this).getServiceClassName()); runningAt = null; } /** * * Notifies this agent that it has been registered at a node. * May be overridden in implementing classes. * * <i>Make sure, overriding methods do a call of this method!</i> * * @param n * * @throws AgentException * */ public void notifyRegistrationTo ( Node n ) throws AgentException { if ( this instanceof ServiceAgent ) n.observerNotice(Event.SERVICE_STARTUP, n.getNodeId(), this, "" + ((ServiceAgent)this).getServiceClassName()); runningAt = n; } /** * Gets the node, this agent is running at. * * @return the node, this agent is running at */ public Node getRunningAtNode () { return runningAt; } /** * Factory: Create an agent from its XML string representation. * * Depending on the type attribute of the root node, the type will be * a {@link UserAgent}, {@link GroupAgent}, {@link ServiceAgent}. * Creation of {@link MonitoringAgent}s is not supported. * * @param xml * * @return an agent * * @throws MalformedXMLException */ public static Agent createFromXml(String xml) throws MalformedXMLException { try { Element root = Parser.parse(xml, false); if ( !root.getName().equals("agent") ) throw new MalformedXMLException ( "this is not an agent but a " + root.getName() ); String type = root.getAttribute( "type"); if ( "user".equals( type )) return UserAgent.createFromXml(root); else if ( "group".equals( type ) ) return GroupAgent.createFromXml(root); else if ( "service".equals( type ) ) return ServiceAgent.createFromXml(root); else if ("monitoring".equals( type )) return MonitoringAgent.createFromXml(root); else throw new MalformedXMLException("Unknown agent type: " + type); } catch (XMLSyntaxException e) { throw new MalformedXMLException("Error parsing xml string", e); } } }
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package me.lucko.helper.random; import com.flowpowered.math.GenericMath; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import javax.annotation.Nonnull; /** * Represents a value which may vary randomly. */ @FunctionalInterface public interface VariableAmount { /** * Creates a new 'fixed' variable amount, calls to {@link #getAmount} will * always return the fixed value. * * @param value The fixed value * @return A variable amount representation */ @Nonnull static VariableAmount fixed(double value) { return new Fixed(value); } /** * Creates a new variable amount which return values between the given min * (inclusive) and max (exclusive). * * @param min The minimum of the range (inclusive) * @param max The maximum of the range (exclusive) * @return A variable amount representation */ @Nonnull static VariableAmount range(double min, double max) { return new BaseAndAddition(min, fixed(max - min)); } /** * Creates a new variable about which has a base and variance. The final * amount will be the base amount plus or minus a random amount between zero * (inclusive) and the variance (exclusive). * * @param base The base value * @param variance The variance * @return A variable amount representation */ @Nonnull static VariableAmount baseWithVariance(double base, double variance) { return new BaseAndVariance(base, fixed(variance)); } /** * Creates a new variable about which has a base and variance. The final * amount will be the base amount plus or minus a random amount between zero * (inclusive) and the variance (exclusive). * * @param base The base value * @param variance The variance * @return A variable amount representation */ @Nonnull static VariableAmount baseWithVariance(double base, @Nonnull VariableAmount variance) { return new BaseAndVariance(base, variance); } /** * Creates a new variable amount which has a base and an additional amount. * The final amount will be the base amount plus a random amount between * zero (inclusive) and the additional amount (exclusive). * * @param base The base value * @param addition The additional amount * @return A variable amount representation */ @Nonnull static VariableAmount baseWithRandomAddition(double base, double addition) { return new BaseAndAddition(base, fixed(addition)); } /** * Creates a new variable amount which has a base and an additional amount. * The final amount will be the base amount plus a random amount between * zero (inclusive) and the additional amount (exclusive). * * @param base The base value * @param addition The additional amount * @return A variable amount representation */ @Nonnull static VariableAmount baseWithRandomAddition(double base, @Nonnull VariableAmount addition) { return new BaseAndAddition(base, addition); } /** * Creates a new variable about which has a base and a chance to apply a * random variance. The chance should be between zero and one with a chance * of one signifying that the variance will always be applied. If the chance * succeeds then the final amount will be the base amount plus or minus a * random amount between zero (inclusive) and the variance (exclusive). If * the chance fails then the final amount will just be the base value. * * @param base The base value * @param variance The variance * @param chance The chance to apply the variance * @return A variable amount representation */ @Nonnull static VariableAmount baseWithOptionalVariance(double base, double variance, double chance) { return new OptionalAmount(base, chance, baseWithVariance(base, variance)); } /** * Creates a new variable about which has a base and a chance to apply a * random variance. The chance should be between zero and one with a chance * of one signifying that the variance will always be applied. If the chance * succeeds then the final amount will be the base amount plus or minus a * random amount between zero (inclusive) and the variance (exclusive). If * the chance fails then the final amount will just be the base value. * * @param base The base value * @param variance The variance * @param chance The chance to apply the variance * @return A variable amount representation */ @Nonnull static VariableAmount baseWithOptionalVariance(double base, @Nonnull VariableAmount variance, double chance) { return new OptionalAmount(base, chance, baseWithVariance(base, variance)); } /** * Creates a new variable about which has a base and a chance to apply a * random additional amount. The chance should be between zero and one with * a chance of one signifying that the additional amount will always be * applied. If the chance succeeds then the final amount will be the base * amount plus a random amount between zero (inclusive) and the additional * amount (exclusive). If the chance fails then the final amount will just * be the base value. * * @param base The base value * @param addition The additional amount * @param chance The chance to apply the additional amount * @return A variable amount representation */ @Nonnull static VariableAmount baseWithOptionalAddition(double base, double addition, double chance) { return new OptionalAmount(base, chance, baseWithRandomAddition(base, addition)); } /** * Creates a new variable about which has a base and a chance to apply a * random additional amount. The chance should be between zero and one with * a chance of one signifying that the additional amount will always be * applied. If the chance succeeds then the final amount will be the base * amount plus a random amount between zero (inclusive) and the additional * amount (exclusive). If the chance fails then the final amount will just * be the base value. * * @param base The base value * @param addition The additional amount * @param chance The chance to apply the additional amount * @return A variable amount representation */ @Nonnull static VariableAmount baseWithOptionalAddition(double base, @Nonnull VariableAmount addition, double chance) { return new OptionalAmount(base, chance, baseWithRandomAddition(base, addition)); } /** * Gets an instance of the variable amount depending on the given random * object. * * @param random The random object * @return The amount */ double getAmount(@Nonnull Random random); /** * Gets an instance of the variable amount using the thread's * {@link ThreadLocalRandom} instance. * * @return The amount */ default double getAmount() { return getAmount(ThreadLocalRandom.current()); } /** * Gets the amount as if from {@link #getAmount(Random)} but floored to the * nearest integer equivalent. * * @param random The random object * @return The floored amount */ default int getFlooredAmount(@Nonnull Random random) { return GenericMath.floor(getAmount(random)); } /** * Gets the amount as if from {@link #getAmount()} but floored to the * nearest integer equivalent. * * @return The floored amount */ default int getFlooredAmount() { return GenericMath.floor(getAmount()); } /** * Represents a fixed amount, calls to {@link #getAmount} will always return * the same fixed value. */ final class Fixed implements VariableAmount { private final double amount; private Fixed(double amount) { this.amount = amount; } @Override public double getAmount(@Nonnull Random random) { return this.amount; } @Override public String toString() { return "VariableAmount.Fixed(amount=" + this.getAmount() + ")"; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof Fixed)) return false; final Fixed other = (Fixed) o; return Double.compare(this.amount, other.amount) == 0; } @Override public int hashCode() { final int PRIME = 59; int result = 1; result = result * PRIME + (int) (Double.doubleToLongBits(this.amount) >>> 32 ^ Double.doubleToLongBits(this.amount)); return result; } } /** * Represents a base amount with a variance, the final amount will be the * base amount plus or minus a random amount between zero (inclusive) and * the variance (exclusive). */ final class BaseAndVariance implements VariableAmount { private final double base; private final VariableAmount variance; private BaseAndVariance(double base, @Nonnull VariableAmount variance) { this.base = base; this.variance = variance; } @Override public double getAmount(@Nonnull Random random) { double var = this.variance.getAmount(random); return this.base + random.nextDouble() * var * 2 - var; } @Override public String toString() { return "VariableAmount.BaseAndVariance(base=" + this.base + ", variance=" + this.variance + ")"; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof BaseAndVariance)) return false; final BaseAndVariance other = (BaseAndVariance) o; return Double.compare(this.base, other.base) == 0 && this.variance.equals(other.variance); } @Override public int hashCode() { final int PRIME = 59; int result = 1; result = result * PRIME + (int) (Double.doubleToLongBits(this.base) >>> 32 ^ Double.doubleToLongBits(this.base)); result = result * PRIME + this.variance.hashCode(); return result; } } /** * Represents a base amount with a random addition, the final amount will be * the base amount plus a random amount between zero (inclusive) and the * addition (exclusive). */ final class BaseAndAddition implements VariableAmount { private final double base; private final VariableAmount addition; private BaseAndAddition(double base, VariableAmount addition) { this.base = base; this.addition = addition; } @Override public double getAmount(@Nonnull Random random) { return this.base + (random.nextDouble() * this.addition.getAmount(random)); } @Override public String toString() { return "VariableAmount.BaseAndAddition(base=" + this.base + ", addition=" + this.addition + ")"; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof BaseAndAddition)) return false; final BaseAndAddition other = (BaseAndAddition) o; return Double.compare(this.base, other.base) == 0 && this.addition.equals(other.addition); } @Override public int hashCode() { final int PRIME = 59; int result = 1; result = result * PRIME + (int) (Double.doubleToLongBits(this.base) >>> 32 ^ Double.doubleToLongBits(this.base)); result = result * PRIME + this.addition.hashCode(); return result; } } /** * Represents a variable amount which has a base and a chance of varying. * This wraps another {@link VariableAmount} which it refers to if the * chance succeeds. */ final class OptionalAmount implements VariableAmount { private final double base; private final double chance; private final VariableAmount inner; OptionalAmount(double base, double chance, VariableAmount inner) { this.base = base; this.chance = chance; this.inner = inner; } @Override public double getAmount(@Nonnull Random random) { if (random.nextDouble() < this.chance) { return this.inner.getAmount(random); } return this.base; } @Override public String toString() { return "VariableAmount.OptionalAmount(base=" + this.base + ", chance=" + this.chance + ", inner=" + this.inner + ")"; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof OptionalAmount)) return false; final OptionalAmount other = (OptionalAmount) o; return Double.compare(this.base, other.base) == 0 && Double.compare(this.chance, other.chance) == 0 && this.inner.equals(other.inner); } @Override public int hashCode() { final int PRIME = 59; int result = 1; result = result * PRIME + (int) (Double.doubleToLongBits(this.base) >>> 32 ^ Double.doubleToLongBits(this.base)); result = result * PRIME + (int) (Double.doubleToLongBits(this.chance) >>> 32 ^ Double.doubleToLongBits(this.chance)); result = result * PRIME + this.inner.hashCode(); return result; } } }
package org.apache.cordova.facebook; import java.io.IOException; import java.net.MalformedURLException; import java.util.Iterator; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.util.Log; import com.facebook.android.DialogError; import com.facebook.android.Facebook; import com.facebook.android.Facebook.DialogListener; import com.facebook.android.FacebookError; import org.apache.cordova.api.Plugin; import org.apache.cordova.api.PluginResult; public class ConnectPlugin extends Plugin { public static final String SINGLE_SIGN_ON_DISABLED = "service_disabled"; private final String TAG = "ConnectPlugin"; private Facebook facebook; //used for dialog auth private String[] permissions = new String[] {}; private String callbackId; private Bundle paramBundle; private String method; private void logStatus(PluginResult.Status status, String message){ Log.d(TAG, PluginResult.StatusMessages[status.ordinal()] + ": " + message); } public PluginResult logResult(PluginResult.Status status, final JSONObject json){ logStatus(status, json.toString()); return new PluginResult(status, json); } public PluginResult logResult(PluginResult.Status status, final String msg, final JSONObject json){ logStatus(status, msg + ": " + json.toString()); return new PluginResult(status, json); } public PluginResult logResult(PluginResult.Status status, final String msg){ logStatus(status, msg.toString()); return new PluginResult(status, msg); } public PluginResult logNoResult(final String msg){ logStatus(PluginResult.Status.NO_RESULT, msg); return new PluginResult(PluginResult.Status.NO_RESULT); } @Override public PluginResult execute(String action, JSONArray args, final String callbackId) { PluginResult pr = new PluginResult(PluginResult.Status.NO_RESULT); pr.setKeepCallback(true); if (action.equals("init")) { try { String appId = args.getString(0); facebook = new Facebook(appId); Log.d(TAG, "init: Initializing plugin."); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this.ctx.getContext()); String access_token = prefs.getString("access_token", null); Long expires = prefs.getLong("access_expires", -1); if (access_token != null && expires != -1) { this.facebook.setAccessToken(access_token); this.facebook.setAccessExpires(expires); } if(!facebook.isSessionValid()) { return logNoResult("session invalid"); } else { return logResult(PluginResult.Status.OK, "init", this.getResponse()); } } catch (JSONException e) { e.printStackTrace(); return logResult(PluginResult.Status.ERROR, "Invalid JSON args used. expected a string as the first arg."); } } else if (action.equals("login")) { if (facebook != null) { final ConnectPlugin me = this; String[] permissions = new String[args.length()]; try { for (int i=0; i<args.length(); i++) { permissions[i] = args.getString(i); } } catch (JSONException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return logResult(PluginResult.Status.ERROR, "Invalid JSON args used. Expected a string array of permissions."); } this.ctx.setActivityResultCallback(this); this.permissions = permissions; this.callbackId = callbackId; Log.d(TAG, "authorizing"); Runnable runnable = new Runnable() { public void run() { me.facebook.authorize((Activity)me.ctx, me.permissions, new AuthorizeListener(me)); }; }; this.ctx.runOnUiThread(runnable); } else { pr = logResult(PluginResult.Status.ERROR, "Must call init before login."); } } else if (action.equals("logout")) { if (facebook != null) { try { facebook.logout(this.ctx.getContext()); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this.ctx.getContext()); prefs.edit().putLong("access_expires", -1).commit(); prefs.edit().putString("access_token", null).commit(); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); pr = logResult(PluginResult.Status.MALFORMED_URL_EXCEPTION, "Error logging out."); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); pr = logResult(PluginResult.Status.IO_EXCEPTION, "Error logging out."); } pr = logResult(PluginResult.Status.OK, "logout", getResponse()); } else { pr = logResult(PluginResult.Status.ERROR, "Must call init before logout."); } } else if (action.equals("getLoginStatus")) { if (facebook != null) { pr = logResult(PluginResult.Status.OK, "getLoginStatus", getResponse()); } else { pr = logResult(PluginResult.Status.ERROR, "Must call init before getLoginStatus."); } } else if (action.equals("showDialog")) { if (facebook != null) { Bundle collect = new Bundle(); JSONObject params = null; try { params = args.getJSONObject(0); } catch (JSONException e) { params = new JSONObject(); } final ConnectPlugin me = this; Iterator<?> iter = params.keys(); while (iter.hasNext()) { String key = (String) iter.next(); if (key.equals("method")) { try { this.method = params.getString(key); } catch (JSONException e) { Log.w(TAG, "Nonstring method parameter provided to dialog"); } } else { try { collect.putString(key, params.getString(key)); } catch (JSONException e) { // Need to handle JSON parameters Log.w(TAG, "Nonstring parameter provided to dialog discarded"); } } } this.paramBundle = new Bundle(collect); this.callbackId = callbackId; Runnable runnable = new Runnable() { public void run() { me.facebook.dialog (me.ctx.getContext(), me.method , me.paramBundle , new UIDialogListener(me)); }; }; this.ctx.runOnUiThread(runnable); } else { pr = logResult(PluginResult.Status.ERROR, "Must call init before showDialog."); } } return pr; } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); facebook.authorizeCallback(requestCode, resultCode, data); } public JSONObject getResponse() { String response = "{"+ "\"status\": \""+(facebook.isSessionValid() ? "connected" : "unknown")+"\","+ "\"authResponse\": {"+ "\"accessToken\": \""+facebook.getAccessToken()+"\","+ "\"expiresIn\": \""+facebook.getAccessExpires()+"\","+ "\"session_key\": true,"+ "\"sig\": \"...\""+ "}"+ "}"; try { return new JSONObject(response); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } return new JSONObject(); } class UIDialogListener implements DialogListener { final ConnectPlugin fba; public UIDialogListener(ConnectPlugin fba){ super(); this.fba = fba; } public void onComplete(Bundle values) { this.fba.success(logResult(PluginResult.Status.OK, values.toString()), this.fba.callbackId); } public void onFacebookError(FacebookError e) { Log.d(TAG, "facebook error"); this.fba.error("Facebook error: " + e.getMessage(), callbackId); } public void onError(DialogError e) { Log.d(TAG, "other error"); this.fba.error("Dialog error: " + e.getMessage(), this.fba.callbackId); } public void onCancel() { Log.d(TAG, "cancel"); this.fba.error("Cancelled", this.fba.callbackId); } } class AuthorizeListener implements DialogListener { final ConnectPlugin fba; public AuthorizeListener(ConnectPlugin fba){ super(); this.fba = fba; } public void onComplete(Bundle values) { // Handle a successful login String token = this.fba.facebook.getAccessToken(); long token_expires = this.fba.facebook.getAccessExpires(); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this.fba.ctx.getContext()); prefs.edit().putLong("access_expires", token_expires).commit(); prefs.edit().putString("access_token", token).commit(); Log.d(TAG, "authorized"); Log.d(TAG, values.toString()); this.fba.success(getResponse(), this.fba.callbackId); } public void onFacebookError(FacebookError e) { Log.d(TAG, "facebook error"); this.fba.error("Facebook error: " + e.getMessage(), callbackId); } public void onError(DialogError e) { Log.d(TAG, "other error"); this.fba.error("Dialog error: " + e.getMessage(), this.fba.callbackId); } public void onCancel() { Log.d(TAG, "cancel"); this.fba.error("Cancelled", this.fba.callbackId); } } }