gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2014 Feedzai * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.feedzai.commons.sql.abstraction.engine.impl.abs; import com.feedzai.commons.sql.abstraction.ddl.DbEntity; import com.feedzai.commons.sql.abstraction.engine.DatabaseEngine; import com.feedzai.commons.sql.abstraction.engine.DatabaseEngineDriver; import com.feedzai.commons.sql.abstraction.engine.DatabaseEngineException; import com.feedzai.commons.sql.abstraction.engine.DatabaseFactory; import com.feedzai.commons.sql.abstraction.engine.DatabaseFactoryException; import com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties; import com.feedzai.commons.sql.abstraction.engine.testconfig.DatabaseConfiguration; import com.feedzai.commons.sql.abstraction.engine.testconfig.DatabaseTestUtil; import com.feedzai.commons.sql.abstraction.entry.EntityEntry; import org.assertj.core.api.ThrowableAssert; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.sql.Connection; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Properties; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.Phaser; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.StampedLock; import static com.feedzai.commons.sql.abstraction.ddl.DbColumnType.INT; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.all; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.column; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.count; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.dbEntity; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.entry; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.eq; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.k; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.select; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.table; import static com.feedzai.commons.sql.abstraction.dml.dialect.SqlBuilder.update; import static com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties.ENGINE; import static com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties.ISOLATION_LEVEL; import static com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties.JDBC; import static com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties.PASSWORD; import static com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties.SCHEMA_POLICY; import static com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties.USERNAME; import static com.feedzai.commons.sql.abstraction.util.Constants.RETRYABLE_EXCEPTIONS; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.catchThrowable; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * @author Rui Vilao (rui.vilao@feedzai.com) * @since 2.0.0 */ @RunWith(Parameterized.class) public class EngineIsolationTest { @Rule public Timeout timeout = Timeout.seconds(120); protected Properties properties; @Parameterized.Parameters public static Collection<DatabaseConfiguration> data() throws Exception { return DatabaseTestUtil.loadConfigurations(); } @Parameterized.Parameter public DatabaseConfiguration config; /** * The {@link DatabaseEngineDriver} corresponding to the current {@link #config test config}. */ private DatabaseEngineDriver engineDriver; /** * A list of actions to perform when a test finishes. */ private List<ThrowableAssert.ThrowingCallable> closeActions = new ArrayList<>(); @Before public void init() { this.properties = new Properties() { { setProperty(JDBC, config.jdbc); setProperty(USERNAME, config.username); setProperty(PASSWORD, config.password); setProperty(ENGINE, config.engine); setProperty(SCHEMA_POLICY, "create"); } }; final PdbProperties pdbProps = new PdbProperties(this.properties, true); this.engineDriver = DatabaseEngineDriver.fromEngine(pdbProps.getEngine()); } @After public void cleanup() { this.closeActions.forEach(ThrowableAssert::catchThrowable); } @Test public void readCommittedTest() throws DatabaseFactoryException { properties.setProperty(ISOLATION_LEVEL, "read_committed"); DatabaseFactory.getConnection(properties); } @Test public void readUncommittedTest() throws DatabaseFactoryException { this.properties.setProperty(ISOLATION_LEVEL, "read_uncommitted"); DatabaseFactory.getConnection(properties); } @Test public void repeatableReadTest() throws DatabaseFactoryException { this.properties.setProperty(ISOLATION_LEVEL, "repeatable_read"); DatabaseFactory.getConnection(properties); } @Test public void serializableTest() throws DatabaseFactoryException { this.properties.setProperty(ISOLATION_LEVEL, "serializable"); DatabaseFactory.getConnection(properties); } /** * Tests whether the current DB engine in the default isolation level (usually "read committed") will cause * deadlocks when there are concurrent transactions acquiring DB locks (writes) and Java locks in different order. * <p> * Besides testing current DB engines with default isolation level, this test will allow verification of new DB * engines, and with small modifications, different isolation levels. * <p> * The following table describes the sequence of events of both transactions used in the test. A deadlock may occur * at T4 if the "SELECT *" query in Transaction 2 gets blocked by the previous persist operation in Transaction 1: * Transaction 2 gets blocked until Transaction 1 commits or rolls back, but Transaction 1 can only advance when it * is able to acquire the Java lock (which is only released after Transaction2 advances). * <table> * <tr><th>time</th><th>transaction 1</th><th>transaction 2</th></tr> * <tr><td>T0</td><td>begin</td></tr> * <tr><td>T1</td><td>persist (DB write lock)</td></tr> * <tr><td>T2</td><td/><td>begin</td></tr> * <tr><td>T3</td><td/><td>acquire Java lock</td></tr> * <tr><td>T4</td><td>try acquire Java lock</td><td>query select *</td></tr> * <tr><td>T5</td><td></td><td>commit</td></tr> * <tr><td>T6</td><td></td><td>release Java lock</td></tr> * <tr><td>T7</td><td>acquire Java lock</td><td></td></tr> * <tr><td>T8</td><td>commit</td></tr> * <tr><td>T9</td><td>release Java lock</td></tr> * </table> * * @throws Exception if something goes wrong in the test. * @since 2.4.7 */ @Test public void deadlockTransactionTest() throws Exception { final StampedLock lock = new StampedLock(); final Phaser phaser = new Phaser(1); final ExecutorService executorService = Executors.newFixedThreadPool(2); this.closeActions.add(executorService::shutdownNow); final DbEntity entity = dbEntity().name("TEST") .addColumn("COL1", INT, true) .addColumn("COL2", INT) .pkFields("COL1") .build(); final DatabaseEngine engine = DatabaseFactory.getConnection(properties); engine.dropEntity(entity); engine.updateEntity(entity); final DatabaseEngine engine2 = engine.duplicate(null, true); this.closeActions.add(engine::close); this.closeActions.add(engine2::close); // persist an initial entry engine.persist("TEST", entry().set("COL2", 1).build()); // start Transaction 1 final Future<?> tx1 = executorService.submit(() -> { engine.beginTransaction(); try { final Long persist = engine.persist("TEST", entry().set("COL2", 2).build()); assertNotNull("Persist in Transaction 1 should be successful", persist); // arrive phase 0 - begin phase 1 (start Transaction 2) phaser.arrive(); // wait for phase 1 to complete - Transaction 2 should have began and acquired the Java lock phaser.awaitAdvance(1); lock.writeLockInterruptibly(); engine.commit(); } catch (final Exception ex) { throw new RuntimeException("Error occurred on Transaction 1", ex); } finally { if (engine.isTransactionActive()) { engine.rollback(); } lock.tryUnlockWrite(); // arrive phase 2 - begin phase 3 (proceed to end of test) phaser.arrive(); } }); // wait for phase 0 to complete - Transaction 1 should have began and persisted an entry to DB (acquiring a DB write lock) phaser.awaitAdvance(0); final AtomicInteger countResult = new AtomicInteger(); // start Transaction 2 final Future<?> tx2 = executorService.submit(() -> { try { engine2.beginTransaction(); long tstamp = lock.readLockInterruptibly(); if (isRealSerializableLevel(engine2)) { lock.tryConvertToOptimisticRead(tstamp); } // arrive phase 1 - begin phase 2 (transaction 1 can now try to acquire the lock, // only being able to do so after it is unlocked below) phaser.arrive(); // possible deadlock occurs on the next line if the engine blocks reads after a write in another transaction final Integer result = engine2.query(select(count(all()).alias("testcount")) .from(table("TEST"))) .get(0) .get("testcount") .toInt(); countResult.set(result); engine2.commit(); } catch (final Exception ex) { throw new RuntimeException("Error occurred on Transaction 2", ex); } finally { if (engine2.isTransactionActive()) { engine2.rollback(); } // at this point, if using only optimistic read, we would be validating the timestamp; // if invalid (i.e. a write occurred in the meantime) then we would retry the code inside the lock lock.tryUnlockRead(); } }); // wait for phase 1 to complete (no issues are expected up to this point; // only on the next test phase a deadlock might prevent a phase advance and timeout on the next wait point below) phaser.awaitAdvance(1); final int finalPhase; try { finalPhase = phaser.awaitAdvanceInterruptibly(2, 5, TimeUnit.SECONDS); } catch (final Exception ex) { fail("The transaction threads are deadlocked"); throw ex; } assertEquals( "Both transactions should have completed successfully, causing the main test thread to arrive at phase 3 of the test", 3, finalPhase ); assertThatCode(() -> tx1.get(1, TimeUnit.SECONDS)) .as("Code for Transaction 1 shouldn't have thrown any exceptions") .doesNotThrowAnyException(); assertThatCode(() -> tx2.get(1, TimeUnit.SECONDS)) .as("Code for Transaction 2 shouldn't have thrown any exceptions") .doesNotThrowAnyException(); /* check that the SELECT query returned a correct result (and indirectly, if the write completed) - if using a "real" SERIALIZABLE isolation level, the SELECT should wait for the write operation on the DB to complete (when using Java optimistic read lock), thus the result should reflect 2 rows - if not using a "real" SERIALIZABLE isolation level, the SELECT should complete before the write in the other transaction, thus seeing only the initial persisted row */ assertThat(countResult) .as("The SELECT query should return correct result (2 for \"real\" SERIALIZABLE isolation level, 1 otherwise)") .hasValue(isRealSerializableLevel(engine) ? 2 : 1); } /** * Returns whether the current engine is using a "real" SERIALIZABLE isolation level (the supported DB engines * oconfigured to be SERIALIZABLE are in fact normally using snapshot isolation). * * @param engine the {@link DatabaseEngine} to get isolation level information from. * @return whether the current engine is using a "real" SERIALIZABLE isolation level. * @throws Exception if something goes wrong in the verification. * @since 2.4.7 */ private boolean isRealSerializableLevel(final DatabaseEngine engine) throws Exception { if (engine.getConnection().getTransactionIsolation() == Connection.TRANSACTION_SERIALIZABLE) { return engineDriver == DatabaseEngineDriver.SQLSERVER || engineDriver == DatabaseEngineDriver.COCKROACHDB; } return false; } /** * This test causes deadlocks using concurrent transactions and verifies that those deadlocks are detected by the * database, PDB correctly signals such errors as retryable, and finally that when the failed transactions are * retried they are successful. * <p> * The actions to perform can be selects, updates or persists (each verification uses a pair of actions in which at * least one must perform a write on the DB). * Each transaction is ran on its own DB engine. * Additionally, the test can run with first transaction (transaction 0) finishing either before or after the second * action on the second transaction runs. * <p> * The following table describes the sequence of events of both transactions used in the test: * <table> * <tr><th>time</th><th>transaction 0</th><th>transaction 1</th></tr> * <tr><td>T0</td><td>BEGIN</td><td>BEGIN</td></tr> * <tr><td>T1</td><td>action1 on TEST0</td><td>action1 on TEST1</td></tr> * <tr><td>T2</td><td>action2 on TEST1</td></tr> * <tr>** if transaction 0 waits for transaction 1</tr> * <tr><td>T3</td><td/><td>action2 on TEST0</td></tr> * <tr><td>T4</td><td>COMMIT</td></tr> * <tr><td>T5</td><td></td><td>COMMIT (fails?)</td></tr> * <tr>** if transaction 0 commits before action2 on transaction 1</tr> * <tr><td>T3</td><td>COMMIT</td></tr> * <tr><td>T4</td><td/><td>action2 on TEST0 (fails?)</td></tr> * </table> * The COMMIT/action2 may fail or not at the indicated timestamps, depending on the database. * Some database may block on some of the actions until the other interfering transaction either finishes (commit) * or fails (e.g. a deadlock is detected by the database) - hence the use of asynchronous actions. * <p> * Finally, the test will check that the failed actions return an exception that is "retryable". * After that, the failed transaction is rolled back and ran again from the beginning. * In the end, the test checks if the tables have the expected data. * * @throws Exception if a problem occurs in the test. * @since 2.5.1 */ @Test public void deadlockRecoveryTest() throws Exception { final DbEngineAction selectAction = (engine, tableIdx) -> engine.query(select(all()).from(table("TEST" + tableIdx))); final DbEngineAction updateAction = (engine, tableIdx) -> engine.executeUpdate( update(table("TEST" + tableIdx)) .set(eq(column("COL2"), k(tableIdx + 1))) .where(eq(column("COL1"), k(1))) ); final DbEngineAction persistAction = (engine, tableIdx) -> engine.persist( "TEST" + tableIdx, entry().set("COL2", tableIdx + 1).build() ); Integer[][] expected = new Integer[][]{{1, 1}, {2, 2}}; runDeadlockExceptionTest(false, updateAction, persistAction, expected); runDeadlockExceptionTest(true, updateAction, persistAction, expected); expected = new Integer[][]{{0, 1}, {0, 2}}; runDeadlockExceptionTest(false, selectAction, persistAction, expected); runDeadlockExceptionTest(true, selectAction, persistAction, expected); runDeadlockExceptionTest(false, persistAction, selectAction, expected); runDeadlockExceptionTest(true, persistAction, selectAction, expected); } /** * This method runs the actions for {@link #deadlockRecoveryTest()}. * * @param waitAction2FromEngine2 Whether engine/transaction 0 waits for action 2 to execute on engine/transaction 1 * before proceeding to commit. * @param action1 The first action to run after beginning transaction. * @param action2 The second action to run after beginning transaction. * @param expected The expected values (first dimension corresponds to the table index, second * dimension contains the several expected values for that particular table). * @throws Exception if a problem occurs in the test. * @since 2.5.1 */ private void runDeadlockExceptionTest(final boolean waitAction2FromEngine2, final DbEngineAction action1, final DbEngineAction action2, final Integer[][] expected) throws Exception { final DatabaseEngine[] engines = prepareEngineForDeadlockRecoveryTest(); final ExecutorService executorService = Executors.newCachedThreadPool(); this.closeActions.add(executorService::shutdownNow); for (int i = 0; i < engines.length; i++) { engines[i].beginTransaction(); action1.runFor(engines[i], i); } final CompletableFuture<Boolean> success0Future = CompletableFuture.supplyAsync( () -> runAction(engines[0], () -> action2.runFor(engines[0], 1)), executorService ); final AtomicReference<CompletableFuture<Boolean>> success1FutureRef = new AtomicReference<>(); if (waitAction2FromEngine2) { success1FutureRef.set(CompletableFuture.supplyAsync( () -> runAction(engines[1], () -> action2.runFor(engines[1], 0)), executorService )); catchThrowable(() -> success1FutureRef.get().get(5, TimeUnit.SECONDS)); } final CompletableFuture<Boolean> success0Future1 = success0Future.thenApplyAsync( success0 -> success0 && runAction(engines[0], engines[0]::commit), executorService ); catchThrowable(() -> success0Future1.get(5, TimeUnit.SECONDS)); boolean success1; if (waitAction2FromEngine2) { success1 = success1FutureRef.get().get(5, TimeUnit.SECONDS); } else { success1 = runAction(engines[1], () -> action2.runFor(engines[1], 0)); } if (success1) { success1 = runAction(engines[1], engines[1]::commit); } if (!success1) { repeatTransaction(engines, 1, action1, action2); } else if (!Boolean.TRUE.equals(success0Future1.get(5, TimeUnit.SECONDS))) { repeatTransaction(engines, 0, action1, action2); } assertEntityValues(engines[0], expected); for (final DatabaseEngine engine : engines) { catchThrowable(engine::close); } } /** * This test causes deadlocks using concurrent transactions and verifies that those deadlocks are detected by the * database, PDB correctly signals such errors as retryable, and finally that when the failed transactions are * retried they are successful. * * This test is similar to {@link #deadlockRecoveryTest()}, but both actions used in this test perform writes (they * are both UPDATEs) and always act on the same row (identified by COL1=1). * Each transaction is ran on its own DB engine. * By using only writes and directly interfering on row level, this test may cause slightly different outcomes * depending on the database (on PostgreSQL this results in a 40P01 error instead of 40001; on Oracle it results in * ORA-08177 instead of ORA-00060). * <p> * The following table describes the sequence of events of both transactions used in the test: * <table> * <tr><th>time</th><th>transaction 0</th><th>transaction 1</th></tr> * <tr><td>T0</td><td>BEGIN</td><td>BEGIN</td></tr> * <tr><td>T1</td><td>update on TEST0</td><td>update on TEST1</td></tr> * <tr><td>T2</td><td>update on TEST1</td></tr> * <tr><td>T3</td><td>COMMIT</td></tr> * <tr><td>T4</td><td/><td>update on TEST0</td></tr> * <tr><td>T5</td><td></td><td>COMMIT</td></tr> * </table> * The actions on T2 to T5 are ran asynchronously and may occur in a different order than what's shown in the table. * Different databases may block and/or fail at different points. * Like in {@link #deadlockRecoveryTest()}, the test will check that the failed actions return an exception that * is "retryable" and will rollback and retry those. * In the end, the test checks if the tables have the expected data. * * @throws Exception if a problem occurs in the test. * @since 2.5.1 */ @Test public void directDeadlockRecoveryTest() throws Exception { final DbEngineAction updateAction = (engine, tableIdx) -> engine.executeUpdate( update(table("TEST" + tableIdx)) .set(eq(column("COL2"), k(tableIdx + 1))) .where(eq(column("COL1"), k(1))) ); final DatabaseEngine[] engines = prepareEngineForDeadlockRecoveryTest(); final ExecutorService executorService = Executors.newCachedThreadPool(); this.closeActions.add(executorService::shutdownNow); final CompletableFuture<Boolean>[] successFutures = new CompletableFuture[2]; for (int i = 0; i < engines.length; i++) { engines[i].beginTransaction(); final int idx = i; successFutures[i] = CompletableFuture.supplyAsync( () -> runAction(engines[idx], () -> updateAction.runFor(engines[idx], idx)), executorService ); } successFutures[0] = successFutures[0] .thenApplyAsync( success0 -> success0 && runAction(engines[0], () -> updateAction.runFor(engines[0], 1)), executorService ) .thenApplyAsync( success0 -> success0 && runAction(engines[0], engines[0]::commit), executorService ); catchThrowable(() -> successFutures[0].get(5, TimeUnit.SECONDS)); successFutures[1] = successFutures[1] .thenApplyAsync( success0 -> success0 && runAction(engines[1], () -> updateAction.runFor(engines[1], 0)), executorService ) .thenApplyAsync( success0 -> success0 && runAction(engines[1], engines[1]::commit), executorService ); // retry failed transactions for (int i = 0; i < successFutures.length; i++) { if (!successFutures[i].get(10, TimeUnit.SECONDS)) { repeatTransaction(engines, i, updateAction, updateAction); } } assertEntityValues(engines[0], new Integer[][]{{1}, {2}}); } /** * Prepares 2 {@link DatabaseEngine}s for deadlock recovery tests. * <p> * This method sets the session isolation level to SERIALIZABLE so that the expected deadlocks can occur and creates * 2 tables (TEST0 and TEST1) with an initial entry persisted. * * @return an array of {@link DatabaseEngine}. * @throws Exception if a problem occurs in the preparation. * @since 2.5.1 */ protected DatabaseEngine[] prepareEngineForDeadlockRecoveryTest() throws Exception { this.properties.setProperty(ISOLATION_LEVEL, "serializable"); final DbEntity.Builder entityBuilder = dbEntity() .addColumn("COL1", INT, true) .addColumn("COL2", INT) .pkFields("COL1"); final DatabaseEngine[] engines = new DatabaseEngine[2]; engines[0] = DatabaseFactory.getConnection(properties); final EntityEntry entry = entry().set("COL1", 1).set("COL2", 0).build(); for (int i = 0; i < 2; i++) { final DbEntity entity0 = entityBuilder.name("TEST" + i).build(); engines[0].dropEntity(entity0); engines[0].updateEntity(entity0); engines[0].persist(entity0.getName(), entry); } engines[1] = engines[0].duplicate(null, true); for (final DatabaseEngine engine : engines) { this.closeActions.add(engine::close); if (this.engineDriver.equals(DatabaseEngineDriver.MYSQL)) { // MySQL is too slow detecting deadlocks by default (default is 50 seconds) // for the purpose of the test, it can be reduced engine.executeUpdate("SET SESSION innodb_lock_wait_timeout = 1"); } } return engines; } /** * Runs an action on a database (for use in deadlock recovery tests). * <p> * If the action fails, this method asserts that the resulting exception is retryable, and if so confirms that the * transaction is still active and rolls it back. * * @param engine The DB engine to use to run the action. * @param action The action to perform. * @return {@code true} if the action succeeded, {@code false} otherwise. * @since 2.5.1 */ private boolean runAction(final DatabaseEngine engine, final ThrowableAssert.ThrowingCallable action) { final Throwable throwable = catchThrowable(action); if (throwable == null) { return true; } assertThat(throwable) .as("If a DB action fails due to deadlock, it should indicate it is retryable") .isInstanceOfAny(RETRYABLE_EXCEPTIONS.toArray(new Class[0])); assertTrue("A transaction failed due to deadlock should still be active and needs to be rolled back", engine.isTransactionActive()); engine.rollback(); return false; } /** * Repeats a transaction that failed due to deadlock in a deadlock recovery test. * * @param engines The engines setup by {@link #prepareEngineForDeadlockRecoveryTest()}. * @param engineIdx The index of the engine to be used to repeat the transaction. * @param action1 The first action to run after beginning transaction. * @param action2 The second action to run after beginning transaction. * @throws Exception if a problem occurs in the test. * @since 2.5.1 */ private void repeatTransaction(final DatabaseEngine[] engines, final int engineIdx, final DbEngineAction action1, final DbEngineAction action2) throws Exception { final DatabaseEngine engine = engines[engineIdx]; while(true) { try { engine.beginTransaction(); action1.runFor(engine, engineIdx); action2.runFor(engine, 1 - engineIdx); engine.commit(); break; } catch (final Exception ex) { if (!RETRYABLE_EXCEPTIONS.contains(ex.getClass())) { throw ex; } } } } /** * Performs an assertion on the values present in the tables setup by {@link #prepareEngineForDeadlockRecoveryTest()}. * * @param dbEngine The DB engine to use to get the values in the tables. * @param expected The expected values (first dimension corresponds to the table index, second dimension contains * the several expected values for that particular table). * @throws DatabaseEngineException if a problem occurs running the query to get data for the assertion. * @since 2.5.1 */ private static void assertEntityValues(final DatabaseEngine dbEngine, final Integer[][] expected) throws DatabaseEngineException { for (int i = 0; i < 1; i++) { assertThat(dbEngine.query(select(column("COL2")).from(table("TEST" + i)))) .as("COL2 in 'TEST%d' should have the expected value", i) .extracting(res -> res.get("COL2").toInt()) .containsExactly(expected[i]); } } /** * Represents an action to be performed on the database that can throw any exception. * This is meant to be used in the deadlock recovery tests, with the DB engines as setup by * {@link #prepareEngineForDeadlockRecoveryTest()}. * * @since 2.5.1 */ @FunctionalInterface private interface DbEngineAction { /** * Executes the action using the specified engine in the table referred to by the provided table index. */ void runFor(DatabaseEngine engine, int tableIdx) throws Exception; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.cluster.coordination.http.endpoints; import java.net.URI; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.regex.Pattern; import org.apache.nifi.cluster.coordination.http.EndpointResponseMerger; import org.apache.nifi.cluster.manager.NodeResponse; import org.apache.nifi.cluster.protocol.NodeIdentifier; import org.apache.nifi.controller.status.history.ConnectionStatusDescriptor; import org.apache.nifi.controller.status.history.MetricDescriptor; import org.apache.nifi.controller.status.history.ProcessGroupStatusDescriptor; import org.apache.nifi.controller.status.history.ProcessorStatusDescriptor; import org.apache.nifi.controller.status.history.RemoteProcessGroupStatusDescriptor; import org.apache.nifi.controller.status.history.StandardStatusSnapshot; import org.apache.nifi.controller.status.history.StatusHistoryUtil; import org.apache.nifi.controller.status.history.StatusSnapshot; import org.apache.nifi.web.api.dto.status.NodeStatusSnapshotsDTO; import org.apache.nifi.web.api.dto.status.StatusHistoryDTO; import org.apache.nifi.web.api.dto.status.StatusSnapshotDTO; import org.apache.nifi.web.api.entity.StatusHistoryEntity; public class StatusHistoryEndpointMerger implements EndpointResponseMerger { public static final Pattern PROCESSOR_STATUS_HISTORY_URI_PATTERN = Pattern.compile("/nifi-api/flow/processors/[a-f0-9\\-]{36}/status/history"); public static final Pattern PROCESS_GROUP_STATUS_HISTORY_URI_PATTERN = Pattern.compile("/nifi-api/flow/process-groups/(?:(?:root)|(?:[a-f0-9\\-]{36}))/status/history"); public static final Pattern REMOTE_PROCESS_GROUP_STATUS_HISTORY_URI_PATTERN = Pattern.compile("/nifi-api/flow/remote-process-groups/[a-f0-9\\-]{36}/status/history"); public static final Pattern CONNECTION_STATUS_HISTORY_URI_PATTERN = Pattern.compile("/nifi-api/flow/connections/[a-f0-9\\-]{36}/status/history"); private final long componentStatusSnapshotMillis; public StatusHistoryEndpointMerger(final long componentStatusSnapshotMillis) { this.componentStatusSnapshotMillis = componentStatusSnapshotMillis; } private Map<String, MetricDescriptor<?>> getMetricDescriptors(final URI uri) { final String path = uri.getPath(); final Map<String, MetricDescriptor<?>> metricDescriptors = new HashMap<>(); if (PROCESSOR_STATUS_HISTORY_URI_PATTERN.matcher(path).matches()) { for (final ProcessorStatusDescriptor descriptor : ProcessorStatusDescriptor.values()) { metricDescriptors.put(descriptor.getField(), descriptor.getDescriptor()); } } else if (PROCESS_GROUP_STATUS_HISTORY_URI_PATTERN.matcher(path).matches()) { for (final ProcessGroupStatusDescriptor descriptor : ProcessGroupStatusDescriptor.values()) { metricDescriptors.put(descriptor.getField(), descriptor.getDescriptor()); } } else if (REMOTE_PROCESS_GROUP_STATUS_HISTORY_URI_PATTERN.matcher(path).matches()) { for (final RemoteProcessGroupStatusDescriptor descriptor : RemoteProcessGroupStatusDescriptor.values()) { metricDescriptors.put(descriptor.getField(), descriptor.getDescriptor()); } } else if (CONNECTION_STATUS_HISTORY_URI_PATTERN.matcher(path).matches()) { for (final ConnectionStatusDescriptor descriptor : ConnectionStatusDescriptor.values()) { metricDescriptors.put(descriptor.getField(), descriptor.getDescriptor()); } } return metricDescriptors; } @Override public boolean canHandle(URI uri, String method) { if (!"GET".equalsIgnoreCase(method)) { return false; } final Map<String, MetricDescriptor<?>> descriptors = getMetricDescriptors(uri); return descriptors != null && !descriptors.isEmpty(); } @Override public NodeResponse merge(URI uri, String method, Set<NodeResponse> successfulResponses, Set<NodeResponse> problematicResponses, NodeResponse clientResponse) { final Map<String, MetricDescriptor<?>> metricDescriptors = getMetricDescriptors(uri); final StatusHistoryEntity responseEntity = clientResponse.getClientResponse().getEntity(StatusHistoryEntity.class); StatusHistoryDTO lastStatusHistory = null; final List<NodeStatusSnapshotsDTO> nodeStatusSnapshots = new ArrayList<>(successfulResponses.size()); LinkedHashMap<String, String> noReadPermissionsComponentDetails = null; for (final NodeResponse nodeResponse : successfulResponses) { final StatusHistoryEntity nodeResponseEntity = nodeResponse == clientResponse ? responseEntity : nodeResponse.getClientResponse().getEntity(StatusHistoryEntity.class); final StatusHistoryDTO nodeStatus = nodeResponseEntity.getStatusHistory(); lastStatusHistory = nodeStatus; if (noReadPermissionsComponentDetails == null && !nodeResponseEntity.getCanRead()) { // If component details from a history with no read permissions is encountered for the first time, hold on to them to be used in the merged response noReadPermissionsComponentDetails = nodeStatus.getComponentDetails(); } final NodeIdentifier nodeId = nodeResponse.getNodeId(); final NodeStatusSnapshotsDTO nodeStatusSnapshot = new NodeStatusSnapshotsDTO(); nodeStatusSnapshot.setNodeId(nodeId.getId()); nodeStatusSnapshot.setAddress(nodeId.getApiAddress()); nodeStatusSnapshot.setApiPort(nodeId.getApiPort()); nodeStatusSnapshot.setStatusSnapshots(nodeStatus.getAggregateSnapshots()); nodeStatusSnapshots.add(nodeStatusSnapshot); } final StatusHistoryDTO clusterStatusHistory = new StatusHistoryDTO(); clusterStatusHistory.setAggregateSnapshots(mergeStatusHistories(nodeStatusSnapshots, metricDescriptors)); clusterStatusHistory.setGenerated(new Date()); clusterStatusHistory.setNodeSnapshots(nodeStatusSnapshots); if (lastStatusHistory != null) { clusterStatusHistory.setComponentDetails(noReadPermissionsComponentDetails == null ? lastStatusHistory.getComponentDetails() : noReadPermissionsComponentDetails); clusterStatusHistory.setFieldDescriptors(lastStatusHistory.getFieldDescriptors()); } final StatusHistoryEntity clusterEntity = new StatusHistoryEntity(); clusterEntity.setStatusHistory(clusterStatusHistory); clusterEntity.setCanRead(noReadPermissionsComponentDetails == null); return new NodeResponse(clientResponse, clusterEntity); } private List<StatusSnapshotDTO> mergeStatusHistories(final List<NodeStatusSnapshotsDTO> nodeStatusSnapshots, final Map<String, MetricDescriptor<?>> metricDescriptors) { // We want a Map<Date, List<StatusSnapshot>>, which is a Map of "normalized Date" (i.e., a time range, essentially) // to all Snapshots for that time. The list will contain one snapshot for each node. However, we can have the case // where the NCM has a different value for the componentStatusSnapshotMillis than the nodes have. In this case, // we end up with multiple entries in the List<StatusSnapshot> for the same node/timestamp, which skews our aggregate // results. In order to avoid this, we will use only the latest snapshot for a node that falls into the the time range // of interest. // To accomplish this, we have an intermediate data structure, which is a Map of "normalized Date" to an inner Map // of Node Identifier to StatusSnapshot. We then will flatten this Map and aggregate the results. final Map<Date, Map<String, StatusSnapshot>> dateToNodeSnapshots = new TreeMap<>(); // group status snapshot's for each node by date for (final NodeStatusSnapshotsDTO nodeStatusSnapshot : nodeStatusSnapshots) { for (final StatusSnapshotDTO snapshotDto : nodeStatusSnapshot.getStatusSnapshots()) { final StatusSnapshot snapshot = createSnapshot(snapshotDto, metricDescriptors); final Date normalizedDate = normalizeStatusSnapshotDate(snapshot.getTimestamp(), componentStatusSnapshotMillis); Map<String, StatusSnapshot> nodeToSnapshotMap = dateToNodeSnapshots.get(normalizedDate); if (nodeToSnapshotMap == null) { nodeToSnapshotMap = new HashMap<>(); dateToNodeSnapshots.put(normalizedDate, nodeToSnapshotMap); } nodeToSnapshotMap.put(nodeStatusSnapshot.getNodeId(), snapshot); } } // aggregate the snapshots by (normalized) timestamp final Map<Date, List<StatusSnapshot>> snapshotsToAggregate = new TreeMap<>(); for (final Map.Entry<Date, Map<String, StatusSnapshot>> entry : dateToNodeSnapshots.entrySet()) { final Date normalizedDate = entry.getKey(); final Map<String, StatusSnapshot> nodeToSnapshot = entry.getValue(); final List<StatusSnapshot> snapshotsForTimestamp = new ArrayList<>(nodeToSnapshot.values()); snapshotsToAggregate.put(normalizedDate, snapshotsForTimestamp); } final List<StatusSnapshotDTO> aggregatedSnapshots = aggregate(snapshotsToAggregate); return aggregatedSnapshots; } private StatusSnapshot createSnapshot(final StatusSnapshotDTO snapshotDto, final Map<String, MetricDescriptor<?>> metricDescriptors) { final StandardStatusSnapshot snapshot = new StandardStatusSnapshot(); snapshot.setTimestamp(snapshotDto.getTimestamp()); final Map<String, Long> metrics = snapshotDto.getStatusMetrics(); for (final Map.Entry<String, Long> entry : metrics.entrySet()) { final String metricId = entry.getKey(); final Long value = entry.getValue(); final MetricDescriptor<?> descriptor = metricDescriptors.get(metricId); if (descriptor != null) { snapshot.addStatusMetric(descriptor, value); } } return snapshot; } private List<StatusSnapshotDTO> aggregate(Map<Date, List<StatusSnapshot>> snapshotsToAggregate) { // Aggregate the snapshots final List<StatusSnapshotDTO> aggregatedSnapshotDtos = new ArrayList<>(); for (final Map.Entry<Date, List<StatusSnapshot>> entry : snapshotsToAggregate.entrySet()) { final List<StatusSnapshot> snapshots = entry.getValue(); final StatusSnapshot reducedSnapshot = snapshots.get(0).getValueReducer().reduce(snapshots); final StatusSnapshotDTO dto = new StatusSnapshotDTO(); dto.setTimestamp(reducedSnapshot.getTimestamp()); dto.setStatusMetrics(StatusHistoryUtil.createStatusSnapshotDto(reducedSnapshot).getStatusMetrics()); aggregatedSnapshotDtos.add(dto); } return aggregatedSnapshotDtos; } public static Date normalizeStatusSnapshotDate(final Date toNormalize, final long numMillis) { final long time = toNormalize.getTime(); return new Date(time - time % numMillis); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence; import java.io.File; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheRebalanceMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.ConnectorConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.failure.FailureContext; import org.apache.ignite.failure.FailureHandler; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.pagemem.PageIdUtils; import org.apache.ignite.internal.pagemem.wal.StorageException; import org.apache.ignite.internal.processors.cache.IgniteInternalCache; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetaStorage; import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryEx; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PagePartitionMetaIO; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import static org.apache.ignite.IgniteSystemProperties.IGNITE_PDS_SKIP_CRC; import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.DFLT_STORE_DIR; import static org.apache.ignite.internal.processors.cache.persistence.metastorage.MetaStorage.METASTORAGE_CACHE_ID; /** * */ public class IgnitePdsCorruptedStoreTest extends GridCommonAbstractTest { /** */ private static final String CACHE_NAME1 = "cache1"; /** */ private static final String CACHE_NAME2 = "cache2"; /** Failure handler. */ private DummyFailureHandler failureHnd; /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { cleanPersistenceDir(); super.beforeTest(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); stopAllGrids(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setConnectorConfiguration(new ConnectorConfiguration()); cfg.setConsistentId(igniteInstanceName); DataStorageConfiguration memCfg = new DataStorageConfiguration() .setDefaultDataRegionConfiguration( new DataRegionConfiguration() .setMaxSize(100 * 1024 * 1024) .setPersistenceEnabled(true) ); cfg.setDataStorageConfiguration(memCfg); cfg.setCacheConfiguration(cacheConfiguration(CACHE_NAME1), cacheConfiguration(CACHE_NAME2)); failureHnd = new DummyFailureHandler(); cfg.setFailureHandler(failureHnd); return cfg; } /** * @return File or folder in work directory. * @throws IgniteCheckedException If failed to resolve file name. */ private File file(String file) throws IgniteCheckedException { return U.resolveWorkDirectory(U.defaultWorkDirectory(), file, false); } /** * Create cache configuration. * * @param name Cache name. */ private CacheConfiguration cacheConfiguration(String name) { CacheConfiguration ccfg = new CacheConfiguration(); ccfg.setName(name); ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); ccfg.setRebalanceMode(CacheRebalanceMode.SYNC); ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); ccfg.setAffinity(new RendezvousAffinityFunction(false, 32)); ccfg.setBackups(2); return ccfg; } /** * @throws Exception If test failed. */ public void testNodeInvalidatedWhenPersistenceIsCorrupted() throws Exception { Ignite ignite = startGrid(0); startGrid(1); ignite.cluster().active(true); awaitPartitionMapExchange(); IgniteCache<Integer, String> cache1 = ignite.cache(CACHE_NAME1); for (int i = 0; i < 100; ++i) cache1.put(i, String.valueOf(i)); forceCheckpoint(); cache1.put(2, "test"); String nodeName = ignite.name().replaceAll("\\.", "_"); stopAllGrids(); U.delete(file(String.format("db/%s/cache-%s/part-2.bin", nodeName, CACHE_NAME1))); startGrid(1); try { startGrid(0); } catch (IgniteCheckedException ex) { throw ex; } waitFailure(StorageException.class); } /** * Test node invalidation when page CRC is wrong and page not found in wal. * * @throws Exception In case of fail */ public void testWrongPageCRC() throws Exception { System.setProperty(IGNITE_PDS_SKIP_CRC, "true"); IgniteEx ignite = startGrid(0); ignite.cluster().active(true); ignite.cluster().active(false); stopGrid(0); System.setProperty(IGNITE_PDS_SKIP_CRC, "false"); File dbDir = U.resolveWorkDirectory(U.defaultWorkDirectory(), DFLT_STORE_DIR, false); File walDir = new File(dbDir, "wal"); U.delete(walDir); try { startGrid(0); ignite.cluster().active(true); } catch (Exception e) { // No-op. } waitFailure(StorageException.class); } /** * Test node invalidation when meta storage is corrupted. */ public void testMetaStorageCorruption() throws Exception { IgniteEx ignite = startGrid(0); ignite.cluster().active(true); MetaStorage metaStorage = ignite.context().cache().context().database().metaStorage(); corruptTreeRoot(ignite, (PageMemoryEx)metaStorage.pageMemory(), METASTORAGE_CACHE_ID, 0); stopGrid(0); try { startGrid(0); ignite.cluster().active(true); } catch (Exception e) { // No-op. } waitFailure(StorageException.class); } /** * Test node invalidation when cache meta is corrupted. */ public void testCacheMetaCorruption() throws Exception { IgniteEx ignite = startGrid(0); ignite.cluster().active(true); IgniteInternalCache cache = ignite.cachex(CACHE_NAME1); cache.put(1, 1); int partId = cache.affinity().partition(1); int grpId = cache.context().group().groupId(); corruptTreeRoot(ignite, (PageMemoryEx)cache.context().dataRegion().pageMemory(), grpId, partId); ignite.cluster().active(false); stopGrid(0); try { startGrid(0); ignite.cluster().active(true); cache.put(1, 1); } catch (Exception e) { // No-op. } waitFailure(StorageException.class); } /** * @param ignite Ignite. * @param grpId Group id. * @param partId Partition id. */ private void corruptTreeRoot(IgniteEx ignite, PageMemoryEx pageMem, int grpId, int partId) throws IgniteCheckedException { ignite.context().cache().context().database().checkpointReadLock(); try { long partMetaId = pageMem.partitionMetaPageId(grpId, partId); long partMetaPage = pageMem.acquirePage(grpId, partMetaId); try { long pageAddr = pageMem.writeLock(grpId, partMetaId, partMetaPage); try { PagePartitionMetaIO io = PageIO.getPageIO(pageAddr); // Corrupt tree root io.setTreeRoot(pageAddr, PageIdUtils.pageId(0, (byte)0, 0)); } catch (Exception e) { fail("Failed to change page: " + e.getMessage()); } finally { pageMem.writeUnlock(grpId, partMetaId, partMetaPage, null, true); } } finally { pageMem.releasePage(grpId, partMetaId, partMetaPage); } } finally { ignite.context().cache().context().database().checkpointReadUnlock(); } } /** * @param expError Expected error. */ private void waitFailure(Class<? extends Throwable> expError) throws IgniteInterruptedCheckedException { assertTrue(GridTestUtils.waitForCondition(() -> failureHnd.failure(), 5_000L)); assertTrue(expError.isInstance(failureHnd.error())); } /** * Dummy failure handler */ public static class DummyFailureHandler implements FailureHandler { /** Failure. */ private volatile boolean failure = false; /** Error. */ private volatile Throwable error = null; /** * @return failure. */ public boolean failure() { return failure; } /** * @return Error. */ public Throwable error() { return error; } /** {@inheritDoc} */ @Override public boolean onFailure(Ignite ignite, FailureContext failureCtx) { failure = true; error = failureCtx.error(); return true; } } }
/****************************************************************************** * * * Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. * * * * COPYRIGHT: * * This software is the property of Wimba S.A. * * This software is redistributed under the Xiph.org variant of * * the BSD license. * * Redistribution and use in source and binary forms, with or without * * modification, are permitted provided that the following conditions * * are met: * * - Redistributions of source code must retain the above copyright * * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * * notice, this list of conditions and the following disclaimer in the * * documentation and/or other materials provided with the distribution. * * - Neither the name of Wimba, the Xiph.org Foundation nor the names of * * its contributors may be used to endorse or promote products derived * * from this software without specific prior written permission. * * * * WARRANTIES: * * This software is made available by the authors in the hope * * that it will be useful, but without any warranty. * * Wimba S.A. is not liable for any consequence related to the * * use of the provided software. * * * * Class: SbEncoder.java * * * * Author: Marc GIMPEL * * Based on code by: Jean-Marc VALIN * * * * Date: 9th April 2003 * * * ******************************************************************************/ /* $Id: SbEncoder.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */ /* Copyright (C) 2002 Jean-Marc Valin Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Xiph.org Foundation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.xiph.speex; /** * Wideband Speex Encoder * * @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com) * @version $Revision: 1.2 $ */ public class SbEncoder extends SbCodec implements Encoder { /** The Narrowband Quality map indicates which narrowband submode to use for the given wideband/ultra-wideband quality setting */ public static final int[] NB_QUALITY_MAP = {1, 8, 2, 3, 4, 5, 5, 6, 6, 7, 7}; /** The Wideband Quality map indicates which sideband submode to use for the given wideband/ultra-wideband quality setting */ public static final int[] WB_QUALITY_MAP = {1, 1, 1, 1, 1, 1, 2, 2, 3, 3, 4}; /** The Ultra-wideband Quality map indicates which sideband submode to use for the given ultra-wideband quality setting */ public static final int[] UWB_QUALITY_MAP = {0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; /** The encoder for the lower half of the Spectrum. */ protected Encoder lowenc; private float[] x1d; private float[] h0_mem; private float[] buf; private float[] swBuf; /** Weighted signal buffer */ private float[] res; private float[] target; private float[] window; private float[] lagWindow; private float[] rc; /** Reflection coefficients */ private float[] autocorr; /** auto-correlation */ private float[] lsp; /** LSPs for current frame */ private float[] old_lsp; /** LSPs for previous frame */ private float[] interp_lsp; /** Interpolated LSPs */ private float[] interp_lpc; /** Interpolated LPCs */ private float[] bw_lpc1; /** LPCs after bandwidth expansion by gamma1 for perceptual weighting*/ private float[] bw_lpc2; /** LPCs after bandwidth expansion by gamma2 for perceptual weighting*/ private float[] mem_sp2; private float[] mem_sw; /** Filter memory for perceptually-weighted signal */ /** */ protected int nb_modes; private boolean uwb; protected int complexity; /** Complexity setting (0-10 from least complex to most complex) */ protected int vbr_enabled; /** 1 for enabling VBR, 0 otherwise */ protected int vad_enabled; /** 1 for enabling VAD, 0 otherwise */ protected int abr_enabled; /** ABR setting (in bps), 0 if off */ protected float vbr_quality; /** Quality setting for VBR encoding */ protected float relative_quality; /** Relative quality that will be needed by VBR */ protected float abr_drift; protected float abr_drift2; protected float abr_count; protected int sampling_rate; protected int submodeSelect; /** Mode chosen by the user (may differ from submodeID if VAD is on) */ /** * Wideband initialisation */ public void wbinit() { lowenc = new NbEncoder(); ((NbEncoder)lowenc).nbinit(); // Initialize SubModes super.wbinit(); // Initialize variables init(160, 40, 8, 640, .9f); uwb = false; nb_modes = 5; sampling_rate = 16000; } /** * Ultra-wideband initialisation */ public void uwbinit() { lowenc = new SbEncoder(); ((SbEncoder)lowenc).wbinit(); // Initialize SubModes super.uwbinit(); // Initialize variables init(320, 80, 8, 1280, .7f); uwb = true; nb_modes = 2; sampling_rate = 32000; } /** * Initialisation * @param frameSize * @param subframeSize * @param lpcSize * @param bufSize * @param foldingGain */ public void init(final int frameSize, final int subframeSize, final int lpcSize, final int bufSize, final float foldingGain) { super.init(frameSize, subframeSize, lpcSize, bufSize, foldingGain); complexity = 3; // in C it's 2 here, but set to 3 automatically by the encoder vbr_enabled = 0; // disabled by default vad_enabled = 0; // disabled by default abr_enabled = 0; // disabled by default vbr_quality = 8; submodeSelect = submodeID; x1d = new float[frameSize]; h0_mem = new float[QMF_ORDER]; buf = new float[windowSize]; swBuf = new float[frameSize]; res = new float[frameSize]; target = new float[subframeSize]; window = Misc.window(windowSize, subframeSize); lagWindow = Misc.lagWindow(lpcSize, lag_factor); rc = new float[lpcSize]; autocorr = new float[lpcSize+1]; lsp = new float[lpcSize]; old_lsp = new float[lpcSize]; interp_lsp = new float[lpcSize]; interp_lpc = new float[lpcSize+1]; bw_lpc1 = new float[lpcSize+1]; bw_lpc2 = new float[lpcSize+1]; mem_sp2 = new float[lpcSize]; mem_sw = new float[lpcSize]; abr_count = 0; } /** * Encode the given input signal. * @param bits - Speex bits buffer. * @param in - the raw mono audio frame to encode. * @return 1 if successful. */ public int encode(final Bits bits, final float[] in) { int i; float[] mem, innov, syn_resp; float[] low_pi_gain, low_exc, low_innov; int dtx; /* Compute the two sub-bands by filtering with h0 and h1*/ Filters.qmf_decomp(in, h0, x0d, x1d, fullFrameSize, QMF_ORDER, h0_mem); /* Encode the narrowband part*/ lowenc.encode(bits, x0d); /* High-band buffering / sync with low band */ for (i=0;i<windowSize-frameSize;i++) high[i] = high[frameSize+i]; for (i=0;i<frameSize;i++) high[windowSize-frameSize+i]=x1d[i]; System.arraycopy(excBuf, frameSize, excBuf, 0, bufSize-frameSize); low_pi_gain = lowenc.getPiGain(); low_exc = lowenc.getExc(); low_innov = lowenc.getInnov(); int low_mode = lowenc.getMode(); if (low_mode==0) dtx=1; else dtx=0; /* Start encoding the high-band */ for (i=0; i<windowSize; i++) buf[i] = high[i] * window[i]; /* Compute auto-correlation */ Lpc.autocorr(buf, autocorr, lpcSize+1, windowSize); autocorr[0] += 1; /* prevents NANs */ autocorr[0] *= lpc_floor; /* Noise floor in auto-correlation domain */ /* Lag windowing: equivalent to filtering in the power-spectrum domain */ for (i=0; i<lpcSize+1; i++) autocorr[i] *= lagWindow[i]; /* Levinson-Durbin */ Lpc.wld(lpc, autocorr, rc, lpcSize); // tmperr System.arraycopy(lpc, 0, lpc, 1, lpcSize); lpc[0]=1; /* LPC to LSPs (x-domain) transform */ int roots = Lsp.lpc2lsp (lpc, lpcSize, lsp, 15, 0.2f); if (roots != lpcSize) { roots = Lsp.lpc2lsp (lpc, lpcSize, lsp, 11, 0.02f); if (roots != lpcSize) { /*If we can't find all LSP's, do some damage control and use a flat filter*/ for (i=0; i<lpcSize; i++) { lsp[i]=(float)Math.cos(Math.PI*((float)(i+1))/(lpcSize+1)); } } } /* x-domain to angle domain*/ for (i=0; i<lpcSize; i++) lsp[i] = (float) Math.acos(lsp[i]); float lsp_dist=0; for (i=0;i<lpcSize;i++) lsp_dist += (old_lsp[i] - lsp[i])*(old_lsp[i] - lsp[i]); /*VBR stuff*/ if ((vbr_enabled != 0 || vad_enabled != 0) && dtx == 0) { float e_low=0, e_high=0; float ratio; if (abr_enabled != 0) { float qual_change=0; if (abr_drift2 * abr_drift > 0) { /* Only adapt if long-term and short-term drift are the same sign */ qual_change = -.00001f*abr_drift/(1+abr_count); if (qual_change>.1f) qual_change=.1f; if (qual_change<-.1f) qual_change=-.1f; } vbr_quality += qual_change; if (vbr_quality>10) vbr_quality=10; if (vbr_quality<0) vbr_quality=0; } for (i=0;i<frameSize;i++) { e_low += x0d[i]* x0d[i]; e_high += high[i]* high[i]; } ratio = (float) Math.log((1+e_high)/(1+e_low)); relative_quality = lowenc.getRelativeQuality(); if (ratio<-4) ratio=-4; if (ratio>2) ratio=2; /*if (ratio>-2)*/ if (vbr_enabled != 0) { int modeid; modeid = nb_modes-1; relative_quality+=1.0*(ratio+2); if (relative_quality<-1) { relative_quality=-1; } while (modeid != 0) { int v1; float thresh; v1=(int)Math.floor(vbr_quality); if (v1==10) thresh = Vbr.hb_thresh[modeid][v1]; else thresh = (vbr_quality-v1) * Vbr.hb_thresh[modeid][v1+1] + (1+v1-vbr_quality) * Vbr.hb_thresh[modeid][v1]; if (relative_quality >= thresh) break; modeid--; } setMode(modeid); if (abr_enabled != 0) { int bitrate; bitrate = getBitRate(); abr_drift+=(bitrate-abr_enabled); abr_drift2 = .95f*abr_drift2 + .05f*(bitrate-abr_enabled); abr_count += 1.0; } } else { /* VAD only */ int modeid; if (relative_quality<2.0) modeid=1; else modeid=submodeSelect; /*speex_encoder_ctl(state, SPEEX_SET_MODE, &mode);*/ submodeID=modeid; } /*fprintf (stderr, "%f %f\n", ratio, low_qual);*/ } bits.pack(1, 1); if (dtx != 0) bits.pack(0, SB_SUBMODE_BITS); else bits.pack(submodeID, SB_SUBMODE_BITS); /* If null mode (no transmission), just set a couple things to zero*/ if (dtx != 0 || submodes[submodeID] == null) { for (i=0; i<frameSize; i++) excBuf[excIdx+i]=swBuf[i]=VERY_SMALL; for (i=0; i<lpcSize; i++) mem_sw[i]=0; first=1; /* Final signal synthesis from excitation */ Filters.iir_mem2(excBuf, excIdx, interp_qlpc, high, 0, subframeSize, lpcSize, mem_sp); /* Reconstruct the original */ filters.fir_mem_up(x0d, h0, y0, fullFrameSize, QMF_ORDER, g0_mem); filters.fir_mem_up(high, h1, y1, fullFrameSize, QMF_ORDER, g1_mem); for (i=0; i<fullFrameSize; i++) in[i]=2*(y0[i]-y1[i]); if (dtx != 0) return 0; else return 1; } /* LSP quantization */ submodes[submodeID].lsqQuant.quant(lsp, qlsp, lpcSize, bits); if (first != 0) { for (i=0; i<lpcSize; i++) old_lsp[i] = lsp[i]; for (i=0; i<lpcSize; i++) old_qlsp[i] = qlsp[i]; } mem = new float[lpcSize]; syn_resp = new float[subframeSize]; innov = new float[subframeSize]; for (int sub=0; sub<nbSubframes; sub++) { float tmp, filter_ratio; int exc, sp, sw, resp; int offset; float rl, rh, eh=0, el=0; int fold; offset = subframeSize*sub; sp=offset; exc=excIdx+offset; resp=offset; sw=offset; /* LSP interpolation (quantized and unquantized) */ tmp = (1.0f + sub)/nbSubframes; for (i=0; i<lpcSize; i++) interp_lsp[i] = (1-tmp)*old_lsp[i] + tmp*lsp[i]; for (i=0; i<lpcSize; i++) interp_qlsp[i] = (1-tmp)*old_qlsp[i] + tmp*qlsp[i]; Lsp.enforce_margin(interp_lsp, lpcSize, .05f); Lsp.enforce_margin(interp_qlsp, lpcSize, .05f); /* Compute interpolated LPCs (quantized and unquantized) */ for (i=0; i<lpcSize; i++) interp_lsp[i] = (float) Math.cos(interp_lsp[i]); for (i=0; i<lpcSize; i++) interp_qlsp[i] = (float) Math.cos(interp_qlsp[i]); m_lsp.lsp2lpc(interp_lsp, interp_lpc, lpcSize); m_lsp.lsp2lpc(interp_qlsp, interp_qlpc, lpcSize); Filters.bw_lpc(gamma1, interp_lpc, bw_lpc1, lpcSize); Filters.bw_lpc(gamma2, interp_lpc, bw_lpc2, lpcSize); /* Compute mid-band (4000 Hz for wideband) response of low-band and high-band filters */ rl=rh=0; tmp=1; pi_gain[sub]=0; for (i=0; i<=lpcSize; i++) { rh += tmp*interp_qlpc[i]; tmp = -tmp; pi_gain[sub]+=interp_qlpc[i]; } rl = low_pi_gain[sub]; rl=1/(Math.abs(rl)+.01f); rh=1/(Math.abs(rh)+.01f); /* Compute ratio, will help predict the gain */ filter_ratio=Math.abs(.01f+rh)/(.01f+Math.abs(rl)); fold = filter_ratio<5 ? 1 : 0; /*printf ("filter_ratio %f\n", filter_ratio);*/ fold=0; /* Compute "real excitation" */ Filters.fir_mem2(high, sp, interp_qlpc, excBuf, exc, subframeSize, lpcSize, mem_sp2); /* Compute energy of low-band and high-band excitation */ for (i=0; i<subframeSize; i++) eh+=excBuf[exc+i]*excBuf[exc+i]; if (submodes[submodeID].innovation == null) {/* 1 for spectral folding excitation, 0 for stochastic */ float g; /*speex_bits_pack(bits, 1, 1);*/ for (i=0; i<subframeSize; i++) el+=low_innov[offset+i]*low_innov[offset+i]; /* Gain to use if we want to use the low-band excitation for high-band */ g=eh/(.01f+el); g=(float) Math.sqrt(g); g *= filter_ratio; /*print_vec(&g, 1, "gain factor");*/ /* Gain quantization */ { int quant = (int) Math.floor(.5 + 10 + 8.0 * Math.log((g+.0001))); /*speex_warning_int("tata", quant);*/ if (quant<0) quant=0; if (quant>31) quant=31; bits.pack(quant, 5); g=(float)(.1*Math.exp(quant/9.4)); } /*printf ("folding gain: %f\n", g);*/ g /= filter_ratio; } else { float gc, scale, scale_1; for (i=0; i<subframeSize; i++) el+=low_exc[offset+i]*low_exc[offset+i]; /*speex_bits_pack(bits, 0, 1);*/ gc = (float) (Math.sqrt(1+eh)*filter_ratio/Math.sqrt((1+el)*subframeSize)); { int qgc = (int)Math.floor(.5+3.7*(Math.log(gc)+2)); if (qgc<0) qgc=0; if (qgc>15) qgc=15; bits.pack(qgc, 4); gc = (float) Math.exp((1/3.7)*qgc-2); } scale = gc*(float)Math.sqrt(1+el)/filter_ratio; scale_1 = 1/scale; for (i=0; i<subframeSize; i++) excBuf[exc+i]=0; excBuf[exc]=1; Filters.syn_percep_zero(excBuf, exc, interp_qlpc, bw_lpc1, bw_lpc2, syn_resp, subframeSize, lpcSize); /* Reset excitation */ for (i=0; i<subframeSize; i++) excBuf[exc+i]=0; /* Compute zero response (ringing) of A(z/g1) / ( A(z/g2) * Aq(z) ) */ for (i=0; i<lpcSize; i++) mem[i]=mem_sp[i]; Filters.iir_mem2(excBuf, exc, interp_qlpc, excBuf, exc, subframeSize, lpcSize, mem); for (i=0; i<lpcSize; i++) mem[i]=mem_sw[i]; Filters.filter_mem2(excBuf, exc, bw_lpc1, bw_lpc2, res, resp, subframeSize, lpcSize, mem, 0); /* Compute weighted signal */ for (i=0; i<lpcSize; i++) mem[i]=mem_sw[i]; Filters.filter_mem2(high, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem, 0); /* Compute target signal */ for (i=0; i<subframeSize; i++) target[i]=swBuf[sw+i]-res[resp+i]; for (i=0; i<subframeSize; i++) excBuf[exc+i]=0; for (i=0; i<subframeSize; i++) target[i]*=scale_1; /* Reset excitation */ for (i=0; i<subframeSize; i++) innov[i]=0; /*print_vec(target, st->subframeSize, "\ntarget");*/ submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2, lpcSize, subframeSize, innov, 0, syn_resp, bits, (complexity+1)>>1); /*print_vec(target, st->subframeSize, "after");*/ for (i=0; i<subframeSize; i++) excBuf[exc+i] += innov[i]*scale; if (submodes[submodeID].double_codebook != 0) { float[] innov2 = new float[subframeSize]; for (i=0; i<subframeSize; i++) innov2[i]=0; for (i=0; i<subframeSize; i++) target[i]*=2.5; submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2, lpcSize, subframeSize, innov2, 0, syn_resp, bits, (complexity+1)>>1); for (i=0; i<subframeSize; i++) innov2[i]*=scale*(1/2.5); for (i=0; i<subframeSize; i++) excBuf[exc+i] += innov2[i]; } } /*Keep the previous memory*/ for (i=0; i<lpcSize; i++) mem[i]=mem_sp[i]; /* Final signal synthesis from excitation */ Filters.iir_mem2(excBuf, exc, interp_qlpc, high, sp, subframeSize, lpcSize, mem_sp); /* Compute weighted signal again, from synthesized speech (not sure it's the right thing) */ Filters.filter_mem2(high, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem_sw, 0); } //#ifndef RELEASE /* Reconstruct the original */ filters.fir_mem_up(x0d, h0, y0, fullFrameSize, QMF_ORDER, g0_mem); filters.fir_mem_up(high, h1, y1, fullFrameSize, QMF_ORDER, g1_mem); for (i=0; i<fullFrameSize; i++) in[i]=2*(y0[i]-y1[i]); //#endif for (i=0; i<lpcSize; i++) old_lsp[i] = lsp[i]; for (i=0; i<lpcSize; i++) old_qlsp[i] = qlsp[i]; first=0; return 1; } /** * Returns the size in bits of an audio frame encoded with the current mode. * @return the size in bits of an audio frame encoded with the current mode. */ public int getEncodedFrameSize() { int size = SB_FRAME_SIZE[submodeID]; size += lowenc.getEncodedFrameSize(); return size; } //--------------------------------------------------------------------------- // Speex Control Functions //--------------------------------------------------------------------------- /** * Sets the Quality. * @param quality */ public void setQuality(int quality) { if (quality < 0) { quality = 0; } if (quality > 10) { quality = 10; } if (uwb) { lowenc.setQuality(quality); this.setMode(UWB_QUALITY_MAP[quality]); } else { lowenc.setMode(NB_QUALITY_MAP[quality]); this.setMode(WB_QUALITY_MAP[quality]); } } /** * Sets the Varible Bit Rate Quality. * @param quality */ public void setVbrQuality(float quality) { vbr_quality = quality; float qual = quality + 0.6f; if (qual>10) qual=10; lowenc.setVbrQuality(qual); int q = (int)Math.floor(.5+quality); if (q>10) q=10; setQuality(q); } /** * Sets whether or not to use Variable Bit Rate encoding. * @param vbr */ public void setVbr(final boolean vbr) { // super.setVbr(vbr); vbr_enabled = vbr ? 1 : 0; lowenc.setVbr(vbr); } /** * Sets the Average Bit Rate. * @param abr */ public void setAbr(final int abr) { lowenc.setVbr(true); // super.setAbr(abr); abr_enabled = (abr!=0) ? 1 : 0; vbr_enabled = 1; { int i=10, rate, target; float vbr_qual; target = abr; while (i>=0) { setQuality(i); rate = getBitRate(); if (rate <= target) break; i--; } vbr_qual=i; if (vbr_qual<0) vbr_qual=0; setVbrQuality(vbr_qual); abr_count=0; abr_drift=0; abr_drift2=0; } } /** * Returns the bitrate. * @return the bitrate. */ public int getBitRate() { if (submodes[submodeID] != null) return lowenc.getBitRate() + sampling_rate*submodes[submodeID].bits_per_frame/frameSize; else return lowenc.getBitRate() + sampling_rate*(SB_SUBMODE_BITS+1)/frameSize; } /** * Sets the sampling rate. * @param rate */ public void setSamplingRate(final int rate) { // super.setSamplingRate(rate); sampling_rate = rate; lowenc.setSamplingRate(rate); } /** * Return LookAhead. * @return LookAhead. */ public int getLookAhead() { return 2*lowenc.getLookAhead() + QMF_ORDER - 1; } /** * */ // public void resetState() // { // } //--------------------------------------------------------------------------- //--------------------------------------------------------------------------- /** * Sets the encoding submode. * @param mode */ public void setMode(int mode) { if (mode < 0) { mode = 0; } submodeID = submodeSelect = mode; } /** * Returns the encoding submode currently in use. * @return the encoding submode currently in use. */ public int getMode() { return submodeID; } /** * Sets the bitrate. * @param bitrate */ public void setBitRate(final int bitrate) { for (int i=10; i>=0; i--) { setQuality(i); if (getBitRate() <= bitrate) return; } } /** * Returns whether or not we are using Variable Bit Rate encoding. * @return whether or not we are using Variable Bit Rate encoding. */ public boolean getVbr() { return vbr_enabled != 0; } /** * Sets whether or not to use Voice Activity Detection encoding. * @param vad */ public void setVad(final boolean vad) { vad_enabled = vad ? 1 : 0; } /** * Returns whether or not we are using Voice Activity Detection encoding. * @return whether or not we are using Voice Activity Detection encoding. */ public boolean getVad() { return vad_enabled != 0; } /** * Sets whether or not to use Discontinuous Transmission encoding. * @param dtx */ public void setDtx(final boolean dtx) { dtx_enabled = dtx ? 1 : 0; } /** * Returns the Average Bit Rate used (0 if ABR is not turned on). * @return the Average Bit Rate used (0 if ABR is not turned on). */ public int getAbr() { return abr_enabled; } /** * Returns the Varible Bit Rate Quality. * @return the Varible Bit Rate Quality. */ public float getVbrQuality() { return vbr_quality; } /** * Sets the algorthmic complexity. * @param complexity */ public void setComplexity(int complexity) { if (complexity < 0) complexity = 0; if (complexity > 10) complexity = 10; this.complexity = complexity; } /** * Returns the algorthmic complexity. * @return the algorthmic complexity. */ public int getComplexity() { return complexity; } /** * Returns the sampling rate. * @return the sampling rate. */ public int getSamplingRate() { return sampling_rate; } /** * Returns the relative quality. * @return the relative quality. */ public float getRelativeQuality() { return relative_quality; } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008-2012. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package edu.wpi.first.wpilibj; import java.nio.ByteOrder; import java.nio.ByteBuffer; import edu.wpi.first.wpilibj.communication.FRCNetworkCommunicationsLibrary.tInstances; import edu.wpi.first.wpilibj.communication.FRCNetworkCommunicationsLibrary.tResourceType; import edu.wpi.first.wpilibj.communication.UsageReporting; import edu.wpi.first.wpilibj.hal.EncoderJNI; import edu.wpi.first.wpilibj.hal.HALUtil; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.livewindow.LiveWindowSendable; import edu.wpi.first.wpilibj.tables.ITable; import edu.wpi.first.wpilibj.util.BoundaryException; /** * Class to read quad encoders. Quadrature encoders are devices that count shaft * rotation and can sense direction. The output of the QuadEncoder class is an * integer that can count either up or down, and can go negative for reverse * direction counting. When creating QuadEncoders, a direction is supplied that * changes the sense of the output to make code more readable if the encoder is * mounted such that forward movement generates negative values. Quadrature * encoders have two digital outputs, an A Channel and a B Channel that are out * of phase with each other to allow the FPGA to do direction sensing. * * All encoders will immediately start counting - reset() them if you need them * to be zeroed before use. */ public class Encoder extends SensorBase implements CounterBase, PIDSource, LiveWindowSendable { public enum IndexingType { kResetWhileHigh, kResetWhileLow, kResetOnFallingEdge, kResetOnRisingEdge } /** * The a source */ protected DigitalSource m_aSource; // the A phase of the quad encoder /** * The b source */ protected DigitalSource m_bSource; // the B phase of the quad encoder /** * The index source */ protected DigitalSource m_indexSource = null; // Index on some encoders private ByteBuffer m_encoder; private int m_index; private double m_distancePerPulse; // distance of travel for each encoder // tick private Counter m_counter; // Counter object for 1x and 2x encoding private EncodingType m_encodingType = EncodingType.k4X; private int m_encodingScale; // 1x, 2x, or 4x, per the encodingType private boolean m_allocatedA; private boolean m_allocatedB; private boolean m_allocatedI; private PIDSourceParameter m_pidSource; /** * Common initialization code for Encoders. This code allocates resources * for Encoders and is common to all constructors. * * The encoder will start counting immediately. * * @param reverseDirection * If true, counts down instead of up (this is all relative) * @param encodingType * either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If * 4X is selected, then an encoder FPGA object is used and the * returned counts will be 4x the encoder spec'd value since all * rising and falling edges are counted. If 1X or 2X are selected * then a counter object will be used and the returned value will * either exactly match the spec'd count or be double (2x) the * spec'd count. */ private void initEncoder(boolean reverseDirection) { switch (m_encodingType.value) { case EncodingType.k4X_val: m_encodingScale = 4; ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); ByteBuffer index = ByteBuffer.allocateDirect(4); // set the byte order index.order(ByteOrder.LITTLE_ENDIAN); m_encoder = EncoderJNI.initializeEncoder( (byte) m_aSource.getModuleForRouting(), m_aSource.getChannelForRouting(), (byte) (m_aSource.getAnalogTriggerForRouting() ? 1 : 0), (byte) m_bSource.getModuleForRouting(), m_bSource.getChannelForRouting(), (byte) (m_bSource.getAnalogTriggerForRouting() ? 1 : 0), (byte) (reverseDirection ? 1 : 0), index.asIntBuffer(), status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); m_index = index.asIntBuffer().get(0); m_counter = null; setMaxPeriod(.5); break; case EncodingType.k2X_val: case EncodingType.k1X_val: m_encodingScale = m_encodingType == EncodingType.k1X ? 1 : 2; m_counter = new Counter(m_encodingType, m_aSource, m_bSource, reverseDirection); m_index = m_counter.getFPGAIndex(); break; } m_distancePerPulse = 1.0; m_pidSource = PIDSourceParameter.kDistance; UsageReporting.report(tResourceType.kResourceType_Encoder, m_index, m_encodingType.value); LiveWindow.addSensor("Encoder", m_aSource.getChannelForRouting(), this); } /** * Encoder constructor. Construct a Encoder given a and b channels. * * The encoder will start counting immediately. * * @param aChannel * The a channel DIO channel. 0-9 are on-board, 10-25 are on the MXP port * @param bChannel * The b channel DIO channel. 0-9 are on-board, 10-25 are on the MXP port * @param reverseDirection * represents the orientation of the encoder and inverts the * output values if necessary so forward represents positive * values. */ public Encoder(final int aChannel, final int bChannel, boolean reverseDirection) { m_allocatedA = true; m_allocatedB = true; m_allocatedI = false; m_aSource = new DigitalInput(aChannel); m_bSource = new DigitalInput(bChannel); initEncoder(reverseDirection); } /** * Encoder constructor. Construct a Encoder given a and b channels. * * The encoder will start counting immediately. * * @param aChannel * The a channel digital input channel. * @param bChannel * The b channel digital input channel. */ public Encoder(final int aChannel, final int bChannel) { this(aChannel, bChannel, false); } /** * Encoder constructor. Construct a Encoder given a and b channels. * * The encoder will start counting immediately. * * @param aChannel * The a channel digital input channel. * @param bChannel * The b channel digital input channel. * @param reverseDirection * represents the orientation of the encoder and inverts the * output values if necessary so forward represents positive * values. * @param encodingType * either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If * 4X is selected, then an encoder FPGA object is used and the * returned counts will be 4x the encoder spec'd value since all * rising and falling edges are counted. If 1X or 2X are selected * then a counter object will be used and the returned value will * either exactly match the spec'd count or be double (2x) the * spec'd count. */ public Encoder(final int aChannel, final int bChannel, boolean reverseDirection, final EncodingType encodingType) { m_allocatedA = true; m_allocatedB = true; m_allocatedI = false; if (encodingType == null) throw new NullPointerException("Given encoding type was null"); m_encodingType = encodingType; m_aSource = new DigitalInput(aChannel); m_bSource = new DigitalInput(bChannel); initEncoder(reverseDirection); } /** * Encoder constructor. Construct a Encoder given a and b channels. * Using an index pulse forces 4x encoding * * The encoder will start counting immediately. * * @param aChannel * The a channel digital input channel. * @param bChannel * The b channel digital input channel. * @param indexChannel * The index channel digital input channel. * @param reverseDirection * represents the orientation of the encoder and inverts the * output values if necessary so forward represents positive * values. */ public Encoder(final int aChannel, final int bChannel, final int indexChannel, boolean reverseDirection) { m_allocatedA = true; m_allocatedB = true; m_allocatedI = true; m_aSource = new DigitalInput(aChannel); m_bSource = new DigitalInput(bChannel); m_indexSource = new DigitalInput(indexChannel); initEncoder(reverseDirection); setIndexSource(indexChannel); } /** * Encoder constructor. Construct a Encoder given a and b channels. * Using an index pulse forces 4x encoding * * The encoder will start counting immediately. * * @param aChannel * The a channel digital input channel. * @param bChannel * The b channel digital input channel. * @param indexChannel * The index channel digital input channel. */ public Encoder(final int aChannel, final int bChannel, final int indexChannel) { this(aChannel, bChannel, indexChannel, false); } /** * Encoder constructor. Construct a Encoder given a and b channels as * digital inputs. This is used in the case where the digital inputs are * shared. The Encoder class will not allocate the digital inputs and assume * that they already are counted. * * The encoder will start counting immediately. * * @param aSource * The source that should be used for the a channel. * @param bSource * the source that should be used for the b channel. * @param reverseDirection * represents the orientation of the encoder and inverts the * output values if necessary so forward represents positive * values. */ public Encoder(DigitalSource aSource, DigitalSource bSource, boolean reverseDirection) { m_allocatedA = false; m_allocatedB = false; m_allocatedI = false; if (aSource == null) throw new NullPointerException("Digital Source A was null"); m_aSource = aSource; if (bSource == null) throw new NullPointerException("Digital Source B was null"); m_bSource = bSource; initEncoder(reverseDirection); } /** * Encoder constructor. Construct a Encoder given a and b channels as * digital inputs. This is used in the case where the digital inputs are * shared. The Encoder class will not allocate the digital inputs and assume * that they already are counted. * * The encoder will start counting immediately. * * @param aSource * The source that should be used for the a channel. * @param bSource * the source that should be used for the b channel. */ public Encoder(DigitalSource aSource, DigitalSource bSource) { this(aSource, bSource, false); } /** * Encoder constructor. Construct a Encoder given a and b channels as * digital inputs. This is used in the case where the digital inputs are * shared. The Encoder class will not allocate the digital inputs and assume * that they already are counted. * * The encoder will start counting immediately. * * @param aSource * The source that should be used for the a channel. * @param bSource * the source that should be used for the b channel. * @param reverseDirection * represents the orientation of the encoder and inverts the * output values if necessary so forward represents positive * values. * @param encodingType * either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If * 4X is selected, then an encoder FPGA object is used and the * returned counts will be 4x the encoder spec'd value since all * rising and falling edges are counted. If 1X or 2X are selected * then a counter object will be used and the returned value will * either exactly match the spec'd count or be double (2x) the * spec'd count. */ public Encoder(DigitalSource aSource, DigitalSource bSource, boolean reverseDirection, final EncodingType encodingType) { m_allocatedA = false; m_allocatedB = false; m_allocatedI = false; if (encodingType == null) throw new NullPointerException("Given encoding type was null"); m_encodingType = encodingType; if (aSource == null) throw new NullPointerException("Digital Source A was null"); m_aSource = aSource; if (bSource == null) throw new NullPointerException("Digital Source B was null"); m_aSource = aSource; m_bSource = bSource; initEncoder(reverseDirection); } /** * Encoder constructor. Construct a Encoder given a, b and index channels as * digital inputs. This is used in the case where the digital inputs are * shared. The Encoder class will not allocate the digital inputs and assume * that they already are counted. * * The encoder will start counting immediately. * * @param aSource * The source that should be used for the a channel. * @param bSource * the source that should be used for the b channel. * @param indexSource * the source that should be used for the index channel. * @param reverseDirection * represents the orientation of the encoder and inverts the * output values if necessary so forward represents positive * values. */ public Encoder(DigitalSource aSource, DigitalSource bSource, DigitalSource indexSource, boolean reverseDirection) { m_allocatedA = false; m_allocatedB = false; m_allocatedI = false; if (aSource == null) throw new NullPointerException("Digital Source A was null"); m_aSource = aSource; if (bSource == null) throw new NullPointerException("Digital Source B was null"); m_aSource = aSource; m_bSource = bSource; m_indexSource = indexSource; initEncoder(reverseDirection); setIndexSource(indexSource); } /** * Encoder constructor. Construct a Encoder given a, b and index channels as * digital inputs. This is used in the case where the digital inputs are * shared. The Encoder class will not allocate the digital inputs and assume * that they already are counted. * * The encoder will start counting immediately. * * @param aSource * The source that should be used for the a channel. * @param bSource * the source that should be used for the b channel. * @param indexSource * the source that should be used for the index channel. */ public Encoder(DigitalSource aSource, DigitalSource bSource, DigitalSource indexSource) { this(aSource, bSource, indexSource, false); } /** * @return the Encoder's FPGA index */ public int getFPGAIndex() { return m_index; } /** * @return the encoding scale factor 1x, 2x, or 4x, per the requested * encodingType. Used to divide raw edge counts down to spec'd counts. */ public int getEncodingScale() { return m_encodingScale; } public void free() { if (m_aSource != null && m_allocatedA) { m_aSource.free(); m_allocatedA = false; } if (m_bSource != null && m_allocatedB) { m_bSource.free(); m_allocatedB = false; } if (m_indexSource != null && m_allocatedI) { m_indexSource.free(); m_allocatedI = false; } m_aSource = null; m_bSource = null; m_indexSource = null; if (m_counter != null) { m_counter.free(); m_counter = null; } else { ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); EncoderJNI.freeEncoder(m_encoder, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); } } /** * Gets the raw value from the encoder. The raw value is the actual count * unscaled by the 1x, 2x, or 4x scale factor. * * @return Current raw count from the encoder */ public int getRaw() { int value; if (m_counter != null) { value = m_counter.get(); } else { ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); value = EncoderJNI.getEncoder(m_encoder, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); } return value; } /** * Gets the current count. Returns the current count on the Encoder. This * method compensates for the decoding type. * * @return Current count from the Encoder adjusted for the 1x, 2x, or 4x * scale factor. */ public int get() { return (int) (getRaw() * decodingScaleFactor()); } /** * Reset the Encoder distance to zero. Resets the current count to zero on * the encoder. */ public void reset() { if (m_counter != null) { m_counter.reset(); } else { ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); EncoderJNI.resetEncoder(m_encoder, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); } } /** * Returns the period of the most recent pulse. Returns the period of the * most recent Encoder pulse in seconds. This method compensates for the * decoding type. * * @deprecated Use getRate() in favor of this method. This returns unscaled * periods and getRate() scales using value from * setDistancePerPulse(). * * @return Period in seconds of the most recent pulse. */ public double getPeriod() { double measuredPeriod; if (m_counter != null) { measuredPeriod = m_counter.getPeriod() / decodingScaleFactor(); } else { ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); measuredPeriod = EncoderJNI.getEncoderPeriod(m_encoder, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); } return measuredPeriod; } /** * Sets the maximum period for stopped detection. Sets the value that * represents the maximum period of the Encoder before it will assume that * the attached device is stopped. This timeout allows users to determine if * the wheels or other shaft has stopped rotating. This method compensates * for the decoding type. * * * @param maxPeriod * The maximum time between rising and falling edges before the * FPGA will report the device stopped. This is expressed in * seconds. */ public void setMaxPeriod(double maxPeriod) { if (m_counter != null) { m_counter.setMaxPeriod(maxPeriod * decodingScaleFactor()); } else { ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); EncoderJNI.setEncoderMaxPeriod(m_encoder, maxPeriod, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); } } /** * Determine if the encoder is stopped. Using the MaxPeriod value, a boolean * is returned that is true if the encoder is considered stopped and false * if it is still moving. A stopped encoder is one where the most recent * pulse width exceeds the MaxPeriod. * * @return True if the encoder is considered stopped. */ public boolean getStopped() { if (m_counter != null) { return m_counter.getStopped(); } else { ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); boolean value = EncoderJNI.getEncoderStopped(m_encoder, status.asIntBuffer()) != 0; HALUtil.checkStatus(status.asIntBuffer()); return value; } } /** * The last direction the encoder value changed. * * @return The last direction the encoder value changed. */ public boolean getDirection() { if (m_counter != null) { return m_counter.getDirection(); } else { ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); boolean value = EncoderJNI.getEncoderDirection(m_encoder, status.asIntBuffer()) != 0; HALUtil.checkStatus(status.asIntBuffer()); return value; } } /** * The scale needed to convert a raw counter value into a number of encoder * pulses. */ private double decodingScaleFactor() { switch (m_encodingType.value) { case EncodingType.k1X_val: return 1.0; case EncodingType.k2X_val: return 0.5; case EncodingType.k4X_val: return 0.25; default: // This is never reached, EncodingType enum limits values return 0.0; } } /** * Get the distance the robot has driven since the last reset. * * @return The distance driven since the last reset as scaled by the value * from setDistancePerPulse(). */ public double getDistance() { return getRaw() * decodingScaleFactor() * m_distancePerPulse; } /** * Get the current rate of the encoder. Units are distance per second as * scaled by the value from setDistancePerPulse(). * * @return The current rate of the encoder. */ public double getRate() { return m_distancePerPulse / getPeriod(); } /** * Set the minimum rate of the device before the hardware reports it * stopped. * * @param minRate * The minimum rate. The units are in distance per second as * scaled by the value from setDistancePerPulse(). */ public void setMinRate(double minRate) { setMaxPeriod(m_distancePerPulse / minRate); } /** * Set the distance per pulse for this encoder. This sets the multiplier * used to determine the distance driven based on the count value from the * encoder. Do not include the decoding type in this scale. The library * already compensates for the decoding type. Set this value based on the * encoder's rated Pulses per Revolution and factor in gearing reductions * following the encoder shaft. This distance can be in any units you like, * linear or angular. * * @param distancePerPulse * The scale factor that will be used to convert pulses to useful * units. */ public void setDistancePerPulse(double distancePerPulse) { m_distancePerPulse = distancePerPulse; } /** * Set the direction sensing for this encoder. This sets the direction * sensing on the encoder so that it could count in the correct software * direction regardless of the mounting. * * @param reverseDirection * true if the encoder direction should be reversed */ public void setReverseDirection(boolean reverseDirection) { if (m_counter != null) { m_counter.setReverseDirection(reverseDirection); } else { } } /** * Set the Samples to Average which specifies the number of samples of the * timer to average when calculating the period. Perform averaging to * account for mechanical imperfections or as oversampling to increase * resolution. * * TODO: Should this throw a checked exception, so that the user has to deal * with giving an incorrect value? * * @param samplesToAverage * The number of samples to average from 1 to 127. */ public void setSamplesToAverage(int samplesToAverage) { switch (m_encodingType.value) { case EncodingType.k4X_val: ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); EncoderJNI.setEncoderSamplesToAverage(m_encoder, samplesToAverage, status.asIntBuffer()); if (status.duplicate().get() == HALUtil.PARAMETER_OUT_OF_RANGE) { throw new BoundaryException(BoundaryException.getMessage( samplesToAverage, 1, 127)); } HALUtil.checkStatus(status.asIntBuffer()); break; case EncodingType.k1X_val: case EncodingType.k2X_val: m_counter.setSamplesToAverage(samplesToAverage); } } /** * Get the Samples to Average which specifies the number of samples of the * timer to average when calculating the period. Perform averaging to * account for mechanical imperfections or as oversampling to increase * resolution. * * @return SamplesToAverage The number of samples being averaged (from 1 to * 127) */ public int getSamplesToAverage() { switch (m_encodingType.value) { case EncodingType.k4X_val: ByteBuffer status = ByteBuffer.allocateDirect(4); // set the byte order status.order(ByteOrder.LITTLE_ENDIAN); int value = EncoderJNI.getEncoderSamplesToAverage(m_encoder, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); return value; case EncodingType.k1X_val: case EncodingType.k2X_val: return m_counter.getSamplesToAverage(); } return 1; } /** * Set which parameter of the encoder you are using as a process control * variable. The encoder class supports the rate and distance parameters. * * @param pidSource * An enum to select the parameter. */ public void setPIDSourceParameter(PIDSourceParameter pidSource) { BoundaryException.assertWithinBounds(pidSource.value, 0, 1); m_pidSource = pidSource; } /** * Implement the PIDSource interface. * * @return The current value of the selected source parameter. */ public double pidGet() { switch (m_pidSource.value) { case PIDSourceParameter.kDistance_val: return getDistance(); case PIDSourceParameter.kRate_val: return getRate(); default: return 0.0; } } /** * Set the index source for the encoder. When this source rises, the encoder count automatically resets. * * @param channel A DIO channel to set as the encoder index * @param type The state that will cause the encoder to reset */ public void setIndexSource(int channel, IndexingType type) { ByteBuffer status = ByteBuffer.allocateDirect(4); status.order(ByteOrder.LITTLE_ENDIAN); boolean activeHigh = (type == IndexingType.kResetWhileHigh) || (type == IndexingType.kResetOnRisingEdge); boolean edgeSensitive = (type == IndexingType.kResetOnFallingEdge) || (type == IndexingType.kResetOnRisingEdge); EncoderJNI.setEncoderIndexSource(m_encoder, channel, false, activeHigh, edgeSensitive, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); } /** * Set the index source for the encoder. When this source is activated, the encoder count automatically resets. * * @param channel A DIO channel to set as the encoder index */ public void setIndexSource(int channel) { this.setIndexSource(channel, IndexingType.kResetOnRisingEdge); } /** * Set the index source for the encoder. When this source rises, the encoder count automatically resets. * * @param source A digital source to set as the encoder index * @param type The state that will cause the encoder to reset */ public void setIndexSource(DigitalSource source, IndexingType type) { ByteBuffer status = ByteBuffer.allocateDirect(4); status.order(ByteOrder.LITTLE_ENDIAN); boolean activeHigh = (type == IndexingType.kResetWhileHigh) || (type == IndexingType.kResetOnRisingEdge); boolean edgeSensitive = (type == IndexingType.kResetOnFallingEdge) || (type == IndexingType.kResetOnRisingEdge); EncoderJNI.setEncoderIndexSource(m_encoder, source.getChannelForRouting(), source.getAnalogTriggerForRouting(), activeHigh, edgeSensitive, status.asIntBuffer()); HALUtil.checkStatus(status.asIntBuffer()); } /** * Set the index source for the encoder. When this source is activated, the encoder count automatically resets. * * @param source A digital source to set as the encoder index */ public void setIndexSource(DigitalSource source) { this.setIndexSource(source, IndexingType.kResetOnRisingEdge); } /* * Live Window code, only does anything if live window is activated. */ public String getSmartDashboardType() { switch (m_encodingType.value) { case EncodingType.k4X_val: return "Quadrature Encoder"; default: return "Encoder"; } } private ITable m_table; /** * {@inheritDoc} */ public void initTable(ITable subtable) { m_table = subtable; updateTable(); } /** * {@inheritDoc} */ public ITable getTable() { return m_table; } /** * {@inheritDoc} */ public void updateTable() { if (m_table != null) { m_table.putNumber("Speed", getRate()); m_table.putNumber("Distance", getDistance()); m_table.putNumber("Distance per Tick", m_distancePerPulse); } } /** * {@inheritDoc} */ public void startLiveWindowMode() { } /** * {@inheritDoc} */ public void stopLiveWindowMode() { } }
package hyn.com.datastorage.disk; import android.graphics.Bitmap; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import hyn.com.datastorage.exception.BusyInUsingException; /** * Created by Administrator on 2014/8/20. * This class is unlimited, which means there is no size limited and age limited. */ class BasicDiskStorage implements DiskStorage{ protected final static int VERSION = 1; private final IStreamStorage mStreamStorage; BasicDiskStorage(IStreamStorage streamStorage){ mStreamStorage = streamStorage; } BasicDiskStorage(File rootDirectory){ if(null == rootDirectory) throw new NullPointerException("Input root directory for BasicDiskStorage is empty."); if(!rootDirectory.isDirectory()) throw new IllegalArgumentException("BasicStorage need directory!"); IStreamStorage streamStorage = null; try { streamStorage = FlexibleDiskStorageImpl.open(rootDirectory, VERSION); } catch (IOException e) { streamStorage = null; }finally { mStreamStorage = streamStorage; } } @Override public File getDirectory() { if(null == mStreamStorage) return null; return mStreamStorage.getRootFile(); } public void setOverFlowRemoveListener(DiskStorage.OverFlowRemoveListener overFlowRemoveListener){ mStreamStorage.setOverFlowRemoveListener(overFlowRemoveListener); } @Override public long getCurrentSize() { if(null == mStreamStorage) return -1; return mStreamStorage.getCurrentSize(); } @Override public File get(String key) { throw new UnsupportedOperationException("Unsupported get operation."); } /** * User must close it. * @param key key * @return */ @Override public InputStream getInputStream(String key) { try { IStreamStorage.Snapshot snapShot = mStreamStorage.get(key); if(null == snapShot) return null; return new InputStreamWrapper(snapShot, snapShot.getInputStream()); } catch (IOException e) { return null; } } /** * User must close it. * @param key * @return */ @Override public OutputStream getOutputStream(String key) { try { IStreamStorage.Editor editor = mStreamStorage.edit(key); if(null == editor) return null; OutputStream outputStream = editor.newOutputStream(); if(null == outputStream){ editor.abort(); editor.close(); return null; } return new OutputStreamWrapper(editor,outputStream); } catch (IOException e) { e.printStackTrace(); return null; } catch (BusyInUsingException busyInUsingError) { busyInUsingError.printStackTrace(); return null; } } @Override public boolean save(String key, InputStream inputStream) throws IOException { return save(key, inputStream, DEFAULT_COPIER, REST_STREAM_SIZE, VALID_FOREVER); } @Override public boolean save(String key, InputStream inputStream, Copier copier, long size, long expireTime) throws IOException { if(null == copier) throw new NullPointerException(""); if(null == inputStream) return false; IStreamStorage.Editor editor = null; try { editor = mStreamStorage.edit(key); } catch (BusyInUsingException busyInUsingError) { busyInUsingError.printStackTrace(); if(null != editor) editor.abort(); return false; } if(null == editor) return false; OutputStream outputStream = editor.newOutputStream(); if(null == outputStream){ editor.abort(); editor.close(); return false; } copier.copy(inputStream, outputStream, size); outputStream.close(); editor.setExpireTime(expireTime); editor.commit(); return true; } @Override public boolean save(String key, InputStream inputStream, long expireTime) throws IOException { return save(key, inputStream, DEFAULT_COPIER, REST_STREAM_SIZE, expireTime); } @Override public boolean save(String key, InputStream inputStream, Copier copier,long expireTime) throws IOException { return save(key, inputStream, copier, REST_STREAM_SIZE, expireTime); } @Override public boolean save(String key, Bitmap bitmap, Bitmap.CompressFormat format, int quality) throws IOException { return save(key,bitmap,format,quality, Long.MAX_VALUE); } @Override public boolean save(String key, Bitmap bitmap, Bitmap.CompressFormat format, int quality, long expireTime) throws IOException { boolean savedSuccessfully = false; boolean commitSuccessfully = false; if(null == bitmap || format==null) return false; IStreamStorage.Editor editor = null; try { editor = mStreamStorage.edit(key); } catch (BusyInUsingException busyInUsingError) { busyInUsingError.printStackTrace(); return false; } if(null == editor) return false; OutputStream outputStream = editor.newOutputStream(); if(null == outputStream){ editor.close(); return false; } savedSuccessfully = bitmap.compress(format, quality, outputStream); outputStream.close(); editor.setExpireTime(expireTime); commitSuccessfully = editor.commit(); return savedSuccessfully&commitSuccessfully; } @Override public <T extends Serializable> boolean saveObject(String key, T serializable) throws IOException { return saveObject(key, serializable, VALID_FOREVER); } @Override public <T extends Serializable> boolean saveObject(String key, T serializable, long expireTime) throws IOException { IStreamStorage.Editor editor = null; try { editor = mStreamStorage.edit(key); if(null == editor) return false; OutputStream outputStream = editor.newOutputStream(); if(null == outputStream){ return false; } ObjectOutputStream out = new ObjectOutputStream(outputStream); out.writeObject(serializable); out.close(); outputStream.close(); editor.setExpireTime(expireTime); return editor.commit(); } catch (BusyInUsingException busyInUsingError) { busyInUsingError.printStackTrace(); editor.abort(); return false; }finally { if(null != editor) editor.close(); } } @Override public <T extends Serializable> T getObject(String key, Class<T> clazz) throws IOException{ InputStream inputStream = null; try { inputStream = getInputStream(key); if(null == inputStream) return null; ObjectInputStream in = new ObjectInputStream(inputStream); T content = (T)in.readObject(); in.close(); inputStream.close(); inputStream = null; return content; } catch (ClassNotFoundException e) { e.printStackTrace(); return null; } finally { if(null != inputStream) inputStream.close(); } } @Override public boolean remove(String key) { try { return mStreamStorage.remove(key); } catch (IOException e) { return false; } } @Override public boolean contains(String key) { return mStreamStorage.contain(key); } @Override public void close() { try { mStreamStorage.close(); } catch (IOException e) { e.printStackTrace(); } } @Override public void clear() throws IOException { //TODO } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.stream.table; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.planner.utils.JavaStreamTableTestUtil; import org.apache.flink.table.planner.utils.TableTestBase; import org.apache.flink.table.types.DataType; import org.apache.flink.types.Row; import org.junit.Test; import java.time.LocalTime; import java.util.Arrays; import java.util.List; import static java.util.Collections.singletonList; import static org.apache.flink.table.api.Expressions.lit; import static org.apache.flink.table.api.Expressions.map; import static org.apache.flink.table.api.Expressions.nullOf; import static org.apache.flink.table.api.Expressions.pi; import static org.apache.flink.table.api.Expressions.row; /** * Tests for {@link org.apache.flink.table.api.TableEnvironment#fromValues}. */ public class ValuesTest extends TableTestBase { @Test public void testValuesAllEqualTypes() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( row(1, 2L, "JKL"), row(2, 3L, "GHI"), row(3, 4L, "DEF"), row(4, 5L, "ABC") ); util.verifyPlan(t); } @Test public void testValuesFromLiterals() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( 1, 3.1f, 99L, null ); util.verifyPlan(t); } @Test public void testValuesFromRowExpression() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( row(lit(1).plus(3), "ABC", map("a", 3d)), row(lit(-1).abs().plus(2), "ABC", map("a", lit(-5).abs().plus(-5))), row(pi(), "ABC", map("abc", 3f)), row(3.1f, "DEF", map("abcd", 3L)), row(99L, "DEFG", map("a", 1)), row(0d, "D", lit(null, DataTypes.MAP(DataTypes.CHAR(1), DataTypes.INT()))) ); util.verifyPlan(t); } @Test public void testValuesFromRowObject() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( Row.of(1, "ABC", null), Row.of(Math.PI, "ABC", 1), Row.of(3.1f, "DEF", 2), Row.of(99L, "DEFG", 3), Row.of(0d, "D", 4) ); util.verifyPlan(t); } @Test public void testValuesFromMixedObjectsAndExpressions() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( row(1, "ABC", null), Row.of(Math.PI, "ABC", 1), Row.of(3.1f, "DEF", 2), row(99L, "DEFG", nullOf(DataTypes.INT())), Row.of(0d, "D", 4) ); util.verifyPlan(t); } @Test public void testValuesFromRowObjectInCollection() { JavaStreamTableTestUtil util = javaStreamTestUtil(); List<Object> data = Arrays.asList( row(1, lit("ABC")), row(Math.PI, "ABC"), row(3.1f, "DEF"), row(99L, lit("DEFG")), row(0d, "D") ); DataType rowType = DataTypes.ROW( DataTypes.FIELD("a", DataTypes.DECIMAL(10, 2).notNull()), DataTypes.FIELD("b", DataTypes.CHAR(4).notNull())); Table t = util.getTableEnv().fromValues( rowType, data ); util.verifyPlan(t); } @Test public void testValuesFromNestedRowObject() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( Row.of(1, Row.of("A", 2), singletonList(1)), Row.of(Math.PI, Row.of("ABC", 3.0), singletonList(3L)) ); util.verifyPlan(t); } @Test public void testValuesOverrideSchema() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( DataTypes.ROW( DataTypes.FIELD("a", DataTypes.BIGINT()), DataTypes.FIELD("b", DataTypes.STRING())), row(lit(1).plus(2), "ABC"), row(2, "ABC") ); util.verifyPlan(t); } @Test public void testValuesOverrideNullability() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( DataTypes.ROW( DataTypes.FIELD("a", DataTypes.BIGINT().notNull()), DataTypes.FIELD("b", DataTypes.VARCHAR(4).notNull()), DataTypes.FIELD("c", DataTypes.BINARY(4).notNull()) ), row(lit(1).plus(2), "ABC", new byte[]{1, 2, 3}) ); util.verifyPlan(t); } @Test public void testValuesWithComplexNesting() { JavaStreamTableTestUtil util = javaStreamTestUtil(); Table t = util.getTableEnv().fromValues( DataTypes.ROW( DataTypes.FIELD("number", DataTypes.DOUBLE()), DataTypes.FIELD("row", DataTypes.ROW( DataTypes.FIELD("string", DataTypes.CHAR(5)), DataTypes.FIELD("decimal", DataTypes.DECIMAL(10, 2)), DataTypes.FIELD("nestedRow", DataTypes.ROW( DataTypes.FIELD("time", DataTypes.TIME(4)) )) ) ), DataTypes.FIELD("array", DataTypes.ARRAY(DataTypes.BIGINT())) ), Row.of(1, Row.of("A", 2, Row.of(LocalTime.of(0, 0, 0))), singletonList(1)), Row.of(Math.PI, Row.of("ABC", 3.0, Row.of(100 /* uses integer for a TIME(4)*/)), singletonList(3L)) ); util.verifyPlan(t); } @Test public void testNoCommonType() { thrown().expect(ValidationException.class); thrown().expectMessage("Types in fromValues(...) must have a common super type. Could not" + " find a common type for all rows at column 1."); JavaStreamTableTestUtil util = javaStreamTestUtil(); util.getTableEnv().fromValues( row("ABC", 1L), row("ABC", lit(LocalTime.of(0, 0, 0))), row("ABC", 2) ); } @Test public void testCannotCast() { thrown().expect(ValidationException.class); thrown().expectMessage("Could not cast the value of the 0 column: [ 4 ] of a row: [ 4 ]" + " to the requested type: BINARY(3)"); JavaStreamTableTestUtil util = javaStreamTestUtil(); util.getTableEnv().fromValues( DataTypes.ROW( DataTypes.FIELD("f1", DataTypes.BINARY(3)) ), row(4) ); } @Test public void testWrongRowTypeLength() { thrown().expect(ValidationException.class); thrown().expectMessage( "All rows in a fromValues(...) clause must have the same fields number. Row [4] has a different" + " length than the expected size: 2."); JavaStreamTableTestUtil util = javaStreamTestUtil(); util.getTableEnv().fromValues( DataTypes.ROW( DataTypes.FIELD("f1", DataTypes.BINARY(3)), DataTypes.FIELD("f2", DataTypes.STRING()) ), row(4) ); } }
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.event.listener; import com.facebook.buck.artifact_cache.HttpArtifactCacheEvent; import com.facebook.buck.core.build.event.BuildRuleEvent; import com.facebook.buck.core.model.BuildId; import com.facebook.buck.distributed.DistBuildMode; import com.facebook.buck.distributed.DistBuildService; import com.facebook.buck.distributed.DistBuildUtil; import com.facebook.buck.distributed.FileMaterializationStatsTracker; import com.facebook.buck.distributed.build_slave.BuildSlaveTimingStatsTracker; import com.facebook.buck.distributed.build_slave.CoordinatorBuildRuleEventsPublisher; import com.facebook.buck.distributed.build_slave.HealthCheckStatsTracker; import com.facebook.buck.distributed.build_slave.MinionBuildProgressTracker; import com.facebook.buck.distributed.thrift.BuildRuleFinishedEvent; import com.facebook.buck.distributed.thrift.BuildRuleStartedEvent; import com.facebook.buck.distributed.thrift.BuildRuleUnlockedEvent; import com.facebook.buck.distributed.thrift.BuildSlaveEvent; import com.facebook.buck.distributed.thrift.BuildSlaveEventType; import com.facebook.buck.distributed.thrift.BuildSlaveFinishedStats; import com.facebook.buck.distributed.thrift.BuildSlaveRunId; import com.facebook.buck.distributed.thrift.BuildSlaveStatus; import com.facebook.buck.distributed.thrift.CoordinatorBuildProgress; import com.facebook.buck.distributed.thrift.CoordinatorBuildProgressEvent; import com.facebook.buck.distributed.thrift.StampedeId; import com.facebook.buck.event.BuckEventListener; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.log.Logger; import com.facebook.buck.log.TimedLogger; import com.facebook.buck.util.network.hostname.HostnameFetching; import com.facebook.buck.util.timing.Clock; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.eventbus.Subscribe; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Optional; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import javax.annotation.Nullable; /** * Listener to transmit DistBuildSlave events over to buck frontend. NOTE: We do not promise to * transmit every single update to BuildSlaveStatus, but we do promise to always transmit * BuildSlaveStatus with the latest updates. */ public class DistBuildSlaveEventBusListener implements CoordinatorBuildRuleEventsPublisher, MinionBuildProgressTracker, BuckEventListener, Closeable { private static final TimedLogger LOG = new TimedLogger(Logger.get(DistBuildSlaveEventBusListener.class)); private static final int DEFAULT_SERVER_UPDATE_PERIOD_MILLIS = 500; private static final int SHUTDOWN_TIMEOUT_SECONDS = 10; private final StampedeId stampedeId; private final BuildSlaveRunId buildSlaveRunId; private final Clock clock; private final ScheduledFuture<?> scheduledServerUpdates; private final String hostname; private final Object sendServerUpdatesLock = new Object(); private final List<BuildSlaveEvent> pendingSlaveEvents = new LinkedList<>(); private final CacheRateStatsKeeper cacheRateStatsKeeper = new CacheRateStatsKeeper(); private final AtomicInteger totalBuildRuleFinishedEventsSent = new AtomicInteger(0); private final AtomicInteger buildRulesTotalCount = new AtomicInteger(0); private final AtomicInteger buildRulesFinishedCount = new AtomicInteger(0); private final AtomicInteger buildRulesBuildingCount = new AtomicInteger(0); private final AtomicInteger buildRulesFailureCount = new AtomicInteger(0); private final RemoteCacheUploadStats remoteCacheUploadStats = new RemoteCacheUploadStats(); private final FileMaterializationStatsTracker fileMaterializationStatsTracker; private final HealthCheckStatsTracker healthCheckStatsTracker; private final BuildSlaveTimingStatsTracker slaveStatsTracker; private final DistBuildMode distBuildMode; private volatile @Nullable CoordinatorBuildProgress coordinatorBuildProgress = null; private volatile @Nullable DistBuildService distBuildService; private volatile Optional<Integer> exitCode = Optional.empty(); private volatile boolean sentFinishedStatsToServer; public DistBuildSlaveEventBusListener( StampedeId stampedeId, BuildSlaveRunId buildSlaveRunId, DistBuildMode distBuildMode, Clock clock, BuildSlaveTimingStatsTracker slaveStatsTracker, FileMaterializationStatsTracker fileMaterializationStatsTracker, HealthCheckStatsTracker healthCheckStatsTracker, ScheduledExecutorService networkScheduler) { this( stampedeId, buildSlaveRunId, distBuildMode, clock, slaveStatsTracker, fileMaterializationStatsTracker, healthCheckStatsTracker, networkScheduler, DEFAULT_SERVER_UPDATE_PERIOD_MILLIS); } public DistBuildSlaveEventBusListener( StampedeId stampedeId, BuildSlaveRunId runId, DistBuildMode distBuildMode, Clock clock, BuildSlaveTimingStatsTracker slaveStatsTracker, FileMaterializationStatsTracker fileMaterializationStatsTracker, HealthCheckStatsTracker healthCheckStatsTracker, ScheduledExecutorService networkScheduler, long serverUpdatePeriodMillis) { this.stampedeId = stampedeId; this.buildSlaveRunId = runId; this.clock = clock; this.slaveStatsTracker = slaveStatsTracker; this.fileMaterializationStatsTracker = fileMaterializationStatsTracker; this.healthCheckStatsTracker = healthCheckStatsTracker; this.distBuildMode = distBuildMode; scheduledServerUpdates = networkScheduler.scheduleAtFixedRate( this::sendServerUpdates, 0, serverUpdatePeriodMillis, TimeUnit.MILLISECONDS); String hostname; try { hostname = HostnameFetching.getHostname(); } catch (IOException e) { hostname = "unknown"; } this.hostname = hostname; } public void setDistBuildService(DistBuildService service) { this.distBuildService = service; } @Override public void outputTrace(BuildId buildId) {} @Override public void close() throws IOException { stopScheduledUpdates(); } private BuildSlaveStatus createBuildSlaveStatus() { return new BuildSlaveStatus() .setStampedeId(stampedeId) .setBuildSlaveRunId(buildSlaveRunId) .setTotalRulesCount(buildRulesTotalCount.get()) .setRulesFinishedCount(buildRulesFinishedCount.get()) .setRulesBuildingCount(buildRulesBuildingCount.get()) .setRulesFailureCount(buildRulesFailureCount.get()) .setCacheRateStats(cacheRateStatsKeeper.getSerializableStats()) .setHttpArtifactTotalBytesUploaded(remoteCacheUploadStats.getBytesUploaded()) .setHttpArtifactUploadsScheduledCount(remoteCacheUploadStats.getScheduledCount()) .setHttpArtifactUploadsOngoingCount(remoteCacheUploadStats.getOngoingCount()) .setHttpArtifactUploadsSuccessCount(remoteCacheUploadStats.getSuccessCount()) .setHttpArtifactUploadsFailureCount(remoteCacheUploadStats.getFailureCount()) .setFilesMaterializedCount( fileMaterializationStatsTracker.getTotalFilesMaterializedCount()); } private BuildSlaveFinishedStats createBuildSlaveFinishedStats() { BuildSlaveFinishedStats finishedStats = new BuildSlaveFinishedStats() .setHostname(hostname) .setDistBuildMode(distBuildMode.name()) .setBuildSlaveStatus(createBuildSlaveStatus()) .setFileMaterializationStats( fileMaterializationStatsTracker.getFileMaterializationStats()) .setHealthCheckStats(healthCheckStatsTracker.getHealthCheckStats()) .setBuildSlavePerStageTimingStats(slaveStatsTracker.generateStats()); Preconditions.checkState( exitCode.isPresent(), "BuildSlaveFinishedStats can only be generated after we are finished building."); finishedStats.setExitCode(exitCode.get()); return finishedStats; } private synchronized void sendFinishedStatsToFrontend(BuildSlaveFinishedStats finishedStats) { if (distBuildService == null || sentFinishedStatsToServer) { return; } try { distBuildService.storeBuildSlaveFinishedStats(stampedeId, buildSlaveRunId, finishedStats); sentFinishedStatsToServer = true; } catch (IOException e) { LOG.error(e, "Could not update slave status to frontend."); } } private synchronized void sendAllRulesFinishedEvent() { if (totalBuildRuleFinishedEventsSent.get() == 0) { return; // This was not the coordinator. } try { if (distBuildService != null) { distBuildService.sendAllBuildRulesPublishedEvent( stampedeId, buildSlaveRunId, clock.currentTimeMillis()); } } catch (RuntimeException | IOException e) { LOG.error(e, "Failed to send slave final server updates."); } } private void sendStatusToFrontend() { if (distBuildService == null) { return; } try { distBuildService.updateBuildSlaveStatus( stampedeId, buildSlaveRunId, createBuildSlaveStatus()); } catch (IOException e) { LOG.error(e, "Could not update slave status to frontend."); } } private Optional<BuildSlaveEvent> createCoordinatorBuildProgressEvent() { if (coordinatorBuildProgress == null) { return Optional.empty(); } CoordinatorBuildProgressEvent progressEvent = new CoordinatorBuildProgressEvent().setBuildProgress(coordinatorBuildProgress); BuildSlaveEvent buildSlaveEvent = DistBuildUtil.createBuildSlaveEvent( BuildSlaveEventType.COORDINATOR_BUILD_PROGRESS_EVENT, clock.currentTimeMillis()); buildSlaveEvent.setCoordinatorBuildProgressEvent(progressEvent); return Optional.of(buildSlaveEvent); } private void sendServerUpdates() { if (distBuildService == null) { return; } synchronized (sendServerUpdatesLock) { LOG.info("Sending server updates.."); sendStatusToFrontend(); List<BuildSlaveEvent> slaveEvents; synchronized (pendingSlaveEvents) { slaveEvents = new ArrayList<>(pendingSlaveEvents); } createCoordinatorBuildProgressEvent().ifPresent(slaveEvents::add); if (slaveEvents.size() == 0) { return; } try { // TODO(alisdair, shivanker): Consider batching if list is too big. distBuildService.uploadBuildSlaveEvents(stampedeId, buildSlaveRunId, slaveEvents); synchronized (pendingSlaveEvents) { pendingSlaveEvents.removeAll(slaveEvents); } } catch (IOException e) { LOG.error(e, "Failed to upload slave events."); } } } private void stopScheduledUpdates() { if (scheduledServerUpdates.isCancelled()) { return; // This method has already been called. Cancelling again will fail. } boolean cancelled = scheduledServerUpdates.cancel(false); if (!cancelled) { // Wait for the timer to shut down. try { scheduledServerUpdates.get(SHUTDOWN_TIMEOUT_SECONDS, TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { LOG.error(e); } catch (CancellationException e) { LOG.error(e, "Failed to call get() on scheduled executor future, as already cancelled."); } } // Send final updates. sendServerUpdates(); } /** Publishes events from slave back to client that kicked off build (via frontend) */ public void sendFinalServerUpdates(int exitCode) { this.exitCode = Optional.of(exitCode); stopScheduledUpdates(); sendAllRulesFinishedEvent(); sendFinishedStatsToFrontend(createBuildSlaveFinishedStats()); } /** Record unexpected cache misses in build slaves. */ @Override public void onUnexpectedCacheMiss(int numUnexpectedMisses) { cacheRateStatsKeeper.recordUnexpectedCacheMisses(numUnexpectedMisses); } @Override public void updateTotalRuleCount(int totalRuleCount) { buildRulesTotalCount.set(totalRuleCount); } @Override public void updateFinishedRuleCount(int finishedRuleCount) { buildRulesFinishedCount.set(finishedRuleCount); } @Subscribe public void logEvent(ConsoleEvent event) { if (!event.getLevel().equals(Level.WARNING) && !event.getLevel().equals(Level.SEVERE)) { return; } synchronized (pendingSlaveEvents) { BuildSlaveEvent slaveConsoleEvent = DistBuildUtil.createBuildSlaveConsoleEvent(event, clock.currentTimeMillis()); pendingSlaveEvents.add(slaveConsoleEvent); } } @SuppressWarnings("unused") @Subscribe public void buildRuleStarted(BuildRuleEvent.Started started) { buildRulesBuildingCount.incrementAndGet(); // For calculating the cache rate, total rule count = rules that were processed. So we increment // for started and resumed events, and decrement for suspended event. We do not decrement for a // finished event. cacheRateStatsKeeper.ruleCount.incrementAndGet(); } @Subscribe public void buildRuleFinished(BuildRuleEvent.Finished finished) { cacheRateStatsKeeper.buildRuleFinished(finished); buildRulesBuildingCount.decrementAndGet(); switch (finished.getStatus()) { case SUCCESS: break; case FAIL: buildRulesFailureCount.incrementAndGet(); break; case CANCELED: break; } } @SuppressWarnings("unused") @Subscribe public void buildRuleResumed(BuildRuleEvent.Resumed resumed) { buildRulesBuildingCount.incrementAndGet(); cacheRateStatsKeeper.ruleCount.incrementAndGet(); } @SuppressWarnings("unused") @Subscribe public void buildRuleSuspended(BuildRuleEvent.Suspended suspended) { buildRulesBuildingCount.decrementAndGet(); cacheRateStatsKeeper.ruleCount.decrementAndGet(); } @Subscribe public void onHttpArtifactCacheScheduledEvent(HttpArtifactCacheEvent.Scheduled event) { remoteCacheUploadStats.processScheduledEvent(event); } @Subscribe public void onHttpArtifactCacheStartedEvent(HttpArtifactCacheEvent.Started event) { remoteCacheUploadStats.processStartedEvent(event); } @Subscribe public void onHttpArtifactCacheFinishedEvent(HttpArtifactCacheEvent.Finished event) { remoteCacheUploadStats.processFinishedEvent(event); } @Override public void updateCoordinatorBuildProgress(CoordinatorBuildProgress progress) { coordinatorBuildProgress = progress; } @Override public void createBuildRuleStartedEvents(ImmutableList<String> startedTargets) { if (startedTargets.size() == 0) { return; } List<BuildSlaveEvent> ruleStartedEvents = new LinkedList<>(); for (String target : startedTargets) { LOG.info(String.format("Queueing build rule started event for target [%s]", target)); BuildRuleStartedEvent startedEvent = new BuildRuleStartedEvent(); startedEvent.setBuildTarget(target); BuildSlaveEvent buildSlaveEvent = DistBuildUtil.createBuildSlaveEvent( BuildSlaveEventType.BUILD_RULE_STARTED_EVENT, clock.currentTimeMillis()); buildSlaveEvent.setBuildRuleStartedEvent(startedEvent); ruleStartedEvents.add(buildSlaveEvent); } synchronized (pendingSlaveEvents) { pendingSlaveEvents.addAll(ruleStartedEvents); } } @Override public void createBuildRuleCompletionEvents(ImmutableList<String> finishedTargets) { if (finishedTargets.size() == 0) { return; } List<BuildSlaveEvent> ruleCompletionEvents = new LinkedList<>(); for (String target : finishedTargets) { LOG.info(String.format("Queueing build rule finished event for target [%s]", target)); BuildRuleFinishedEvent finishedEvent = new BuildRuleFinishedEvent(); finishedEvent.setBuildTarget(target); BuildSlaveEvent buildSlaveEvent = DistBuildUtil.createBuildSlaveEvent( BuildSlaveEventType.BUILD_RULE_FINISHED_EVENT, clock.currentTimeMillis()); buildSlaveEvent.setBuildRuleFinishedEvent(finishedEvent); ruleCompletionEvents.add(buildSlaveEvent); } synchronized (pendingSlaveEvents) { pendingSlaveEvents.addAll(ruleCompletionEvents); } } @Override public void createBuildRuleUnlockedEvents(ImmutableList<String> unlockedTargets) { if (unlockedTargets.size() == 0) { return; } List<BuildSlaveEvent> ruleUnlockedEvents = new LinkedList<>(); for (String target : unlockedTargets) { LOG.info(String.format("Queueing build rule unlocked event for target [%s]", target)); BuildRuleUnlockedEvent unlockedEvent = new BuildRuleUnlockedEvent(); unlockedEvent.setBuildTarget(target); BuildSlaveEvent buildSlaveEvent = DistBuildUtil.createBuildSlaveEvent( BuildSlaveEventType.BUILD_RULE_UNLOCKED_EVENT, clock.currentTimeMillis()); buildSlaveEvent.setBuildRuleUnlockedEvent(unlockedEvent); ruleUnlockedEvents.add(buildSlaveEvent); } synchronized (pendingSlaveEvents) { pendingSlaveEvents.addAll(ruleUnlockedEvents); } } @Override public void createMostBuildRulesCompletedEvent() { synchronized (pendingSlaveEvents) { pendingSlaveEvents.add( DistBuildUtil.createBuildSlaveEvent( BuildSlaveEventType.MOST_BUILD_RULES_FINISHED_EVENT, clock.currentTimeMillis())); } } }
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gson.functional; import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; import junit.framework.TestCase; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonPrimitive; import com.google.gson.TypeAdapter; import com.google.gson.TypeAdapterFactory; import com.google.gson.annotations.JsonAdapter; import com.google.gson.internal.Streams; import com.google.gson.reflect.TypeToken; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; /** * Functional tests for the RuntimeTypeAdapterFactory feature in extras. */ public final class RuntimeTypeAdapterFactoryFunctionalTest extends TestCase { private final Gson gson = new Gson(); /** * This test also ensures that {@link TypeAdapterFactory} registered through {@link JsonAdapter} * work correctly for {@link Gson#getDelegateAdapter(TypeAdapterFactory, TypeToken)}. */ public void testSubclassesAutomaticallySerialized() throws Exception { Shape shape = new Circle(25); String json = gson.toJson(shape); shape = gson.fromJson(json, Shape.class); assertEquals(25, ((Circle)shape).radius); shape = new Square(15); json = gson.toJson(shape); shape = gson.fromJson(json, Shape.class); assertEquals(15, ((Square)shape).side); assertEquals(ShapeType.SQUARE, shape.type); } @JsonAdapter(Shape.JsonAdapterFactory.class) static class Shape { final ShapeType type; Shape(ShapeType type) { this.type = type; } private static final class JsonAdapterFactory extends RuntimeTypeAdapterFactory<Shape> { public JsonAdapterFactory() { super(Shape.class, "type"); registerSubtype(Circle.class, ShapeType.CIRCLE.toString()); registerSubtype(Square.class, ShapeType.SQUARE.toString()); } } } public enum ShapeType { SQUARE, CIRCLE } private static final class Circle extends Shape { final int radius; Circle(int radius) { super(ShapeType.CIRCLE); this.radius = radius; } } private static final class Square extends Shape { final int side; Square(int side) { super(ShapeType.SQUARE); this.side = side; } } // Copied from the extras package static class RuntimeTypeAdapterFactory<T> implements TypeAdapterFactory { private final Class<?> baseType; private final String typeFieldName; private final Map<String, Class<?>> labelToSubtype = new LinkedHashMap<String, Class<?>>(); private final Map<Class<?>, String> subtypeToLabel = new LinkedHashMap<Class<?>, String>(); protected RuntimeTypeAdapterFactory(Class<?> baseType, String typeFieldName) { if (typeFieldName == null || baseType == null) { throw new NullPointerException(); } this.baseType = baseType; this.typeFieldName = typeFieldName; } /** * Creates a new runtime type adapter using for {@code baseType} using {@code * typeFieldName} as the type field name. Type field names are case sensitive. */ public static <T> RuntimeTypeAdapterFactory<T> of(Class<T> baseType, String typeFieldName) { return new RuntimeTypeAdapterFactory<T>(baseType, typeFieldName); } /** * Creates a new runtime type adapter for {@code baseType} using {@code "type"} as * the type field name. */ public static <T> RuntimeTypeAdapterFactory<T> of(Class<T> baseType) { return new RuntimeTypeAdapterFactory<T>(baseType, "type"); } /** * Registers {@code type} identified by {@code label}. Labels are case * sensitive. * * @throws IllegalArgumentException if either {@code type} or {@code label} * have already been registered on this type adapter. */ public RuntimeTypeAdapterFactory<T> registerSubtype(Class<? extends T> type, String label) { if (type == null || label == null) { throw new NullPointerException(); } if (subtypeToLabel.containsKey(type) || labelToSubtype.containsKey(label)) { throw new IllegalArgumentException("types and labels must be unique"); } labelToSubtype.put(label, type); subtypeToLabel.put(type, label); return this; } /** * Registers {@code type} identified by its {@link Class#getSimpleName simple * name}. Labels are case sensitive. * * @throws IllegalArgumentException if either {@code type} or its simple name * have already been registered on this type adapter. */ public RuntimeTypeAdapterFactory<T> registerSubtype(Class<? extends T> type) { return registerSubtype(type, type.getSimpleName()); } @Override public <R> TypeAdapter<R> create(Gson gson, TypeToken<R> type) { if (type.getRawType() != baseType) { return null; } final Map<String, TypeAdapter<?>> labelToDelegate = new LinkedHashMap<String, TypeAdapter<?>>(); final Map<Class<?>, TypeAdapter<?>> subtypeToDelegate = new LinkedHashMap<Class<?>, TypeAdapter<?>>(); for (Map.Entry<String, Class<?>> entry : labelToSubtype.entrySet()) { TypeAdapter<?> delegate = gson.getDelegateAdapter(this, TypeToken.get(entry.getValue())); labelToDelegate.put(entry.getKey(), delegate); subtypeToDelegate.put(entry.getValue(), delegate); } return new TypeAdapter<R>() { @Override public R read(JsonReader in) throws IOException { JsonElement jsonElement = Streams.parse(in); JsonElement labelJsonElement = jsonElement.getAsJsonObject().get(typeFieldName); if (labelJsonElement == null) { throw new JsonParseException("cannot deserialize " + baseType + " because it does not define a field named " + typeFieldName); } String label = labelJsonElement.getAsString(); @SuppressWarnings("unchecked") // registration requires that subtype extends T TypeAdapter<R> delegate = (TypeAdapter<R>) labelToDelegate.get(label); if (delegate == null) { throw new JsonParseException("cannot deserialize " + baseType + " subtype named " + label + "; did you forget to register a subtype?"); } return delegate.fromJsonTree(jsonElement); } @Override public void write(JsonWriter out, R value) throws IOException { Class<?> srcType = value.getClass(); String label = subtypeToLabel.get(srcType); @SuppressWarnings("unchecked") // registration requires that subtype extends T TypeAdapter<R> delegate = (TypeAdapter<R>) subtypeToDelegate.get(srcType); if (delegate == null) { throw new JsonParseException("cannot serialize " + srcType.getName() + "; did you forget to register a subtype?"); } JsonObject jsonObject = delegate.toJsonTree(value).getAsJsonObject(); if (!jsonObject.has(typeFieldName)) { JsonObject clone = new JsonObject(); clone.add(typeFieldName, new JsonPrimitive(label)); for (Map.Entry<String, JsonElement> e : jsonObject.entrySet()) { clone.add(e.getKey(), e.getValue()); } jsonObject = clone; } Streams.write(jsonObject, out); } }; } } }
/* Copyright 1995-2015 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For additional information, contact: Environmental Systems Research Institute, Inc. Attn: Contracts Dept 380 New York Street Redlands, California, USA 92373 email: contracts@esri.com */ package com.esri.core.geometry; final class PolygonUtils { enum PiPResult { PiPOutside, PiPInside, PiPBoundary }; // enum_class PiPResult { PiPOutside = 0, PiPInside = 1, PiPBoundary = 2}; /** * Tests if Point is inside the Polygon. Returns PiPOutside if not in * polygon, PiPInside if in the polygon, PiPBoundary is if on the border. It * tests border only if the tolerance is > 0, otherwise PiPBoundary cannot * be returned. Note: If the tolerance is not 0, the test is more expensive * because it calculates closest distance from a point to each segment. * * O(n) complexity, where n is the number of polygon segments. */ public static PiPResult isPointInPolygon2D(Polygon polygon, Point inputPoint, double tolerance) { int res = PointInPolygonHelper.isPointInPolygon(polygon, inputPoint, tolerance); if (res == 0) return PiPResult.PiPOutside; if (res == 1) return PiPResult.PiPInside; return PiPResult.PiPBoundary; } public static PiPResult isPointInPolygon2D(Polygon polygon, Point2D inputPoint, double tolerance) { int res = PointInPolygonHelper.isPointInPolygon(polygon, inputPoint, tolerance); if (res == 0) return PiPResult.PiPOutside; if (res == 1) return PiPResult.PiPInside; return PiPResult.PiPBoundary; } static PiPResult isPointInPolygon2D(Polygon polygon, double inputPointXVal, double inputPointYVal, double tolerance) { int res = PointInPolygonHelper.isPointInPolygon(polygon, inputPointXVal, inputPointYVal, tolerance); if (res == 0) return PiPResult.PiPOutside; if (res == 1) return PiPResult.PiPInside; return PiPResult.PiPBoundary; } /** * Tests if Point is inside the Polygon's ring. Returns PiPOutside if not in * ring, PiPInside if in the ring, PiPBoundary is if on the border. It tests * border only if the tolerance is > 0, otherwise PiPBoundary cannot be * returned. Note: If the tolerance is not 0, the test is more expensive * because it calculates closest distance from a point to each segment. * * O(n) complexity, where n is the number of ring segments. */ public static PiPResult isPointInRing2D(Polygon polygon, int iRing, Point2D inputPoint, double tolerance) { MultiPathImpl polygonImpl = (MultiPathImpl) polygon._getImpl(); int res = PointInPolygonHelper.isPointInRing(polygonImpl, iRing, inputPoint, tolerance, null); if (res == 0) return PiPResult.PiPOutside; if (res == 1) return PiPResult.PiPInside; // return PiPResult.PiPBoundary; return PiPResult.PiPInside; // we do not return PiPBoundary. Overwise, // we would have to do more complex // calculations to differentiat between // internal and external boundaries. } /** * Tests if Point is inside of the any outer ring of a Polygon. Returns * PiPOutside if not in any outer ring, PiPInside if in the any outer ring, * or on the boundary. PiPBoundary is never returned. Note: If the tolerance * is not 0, the test is more expensive because it calculates closest * distance from a point to each segment. * * O(n) complexity, where n is the number of polygon segments. */ public static PiPResult isPointInAnyOuterRing(Polygon polygon, Point2D inputPoint, double tolerance) { int res = PointInPolygonHelper.isPointInAnyOuterRing(polygon, inputPoint, tolerance); if (res == 0) return PiPResult.PiPOutside; if (res == 1) return PiPResult.PiPInside; // return PiPResult.PiPBoundary; return PiPResult.PiPInside; // we do not return PiPBoundary. Overwise, // we would have to do more complex // calculations to differentiat between // internal and external boundaries. } // #ifndef DOTNET // /** // *Tests point is inside the Polygon for an array of points. // *Returns PiPOutside if not in polygon, PiPInside if in the polygon, // PiPBoundary is if on the border. // *It tests border only if the tolerance is > 0, otherwise PiPBoundary // cannot be returned. // *Note: If the tolerance is not 0, the test is more expensive. // * // *O(n*m) complexity, where n is the number of polygon segments, m is the // number of input points. // */ // static void TestPointsInPolygon2D(Polygon polygon, const Point2D* // inputPoints, int count, double tolerance, PiPResult testResults) // { // LOCALREFCLASS2(Array<Point2D>, Point2D*, int, inputPointsArr, // const_cast<Point2D*>(inputPoints), count); // LOCALREFCLASS2(Array<PolygonUtils::PiPResult>, PolygonUtils::PiPResult*, // int, testResultsArr, testResults, count); // TestPointsInPolygon2D(polygon, inputPointsArr, count, tolerance, // testResultsArr); // } // #endif /** * Tests point is inside the Polygon for an array of points. Returns * PiPOutside if not in polygon, PiPInside if in the polygon, PiPBoundary is * if on the border. It tests border only if the tolerance is > 0, otherwise * PiPBoundary cannot be returned. Note: If the tolerance is not 0, the test * is more expensive. * * O(n*m) complexity, where n is the number of polygon segments, m is the * number of input points. */ public static void testPointsInPolygon2D(Polygon polygon, Point2D[] inputPoints, int count, double tolerance, PiPResult[] testResults) { if (inputPoints.length < count || testResults.length < count) throw new IllegalArgumentException();// GEOMTHROW(invalid_argument); for (int i = 0; i < count; i++) testResults[i] = isPointInPolygon2D(polygon, inputPoints[i], tolerance); } static void testPointsInPolygon2D(Polygon polygon, double[] xyStreamBuffer, int pointCount, double tolerance, PiPResult[] testResults) { if (xyStreamBuffer.length / 2 < pointCount || testResults.length < pointCount) throw new IllegalArgumentException();// GEOMTHROW(invalid_argument); for (int i = 0; i < pointCount; i++) testResults[i] = isPointInPolygon2D(polygon, xyStreamBuffer[i * 2], xyStreamBuffer[i * 2 + 1], tolerance); } // public static void testPointsInPolygon2D(Polygon polygon, Geometry geom, // int count, double tolerance, PiPResult[] testResults) // { // if(geom.getType() == Type.Point) // { // // } // else if(Geometry.isMultiVertex(geom.getType())) // { // // } // // // if (inputPoints.length < count || testResults.length < count) // throw new IllegalArgumentException();//GEOMTHROW(invalid_argument); // // for (int i = 0; i < count; i++) // testResults[i] = isPointInPolygon2D(polygon, inputPoints[i], tolerance); // } /** * Tests point is inside an Area Geometry (Envelope, Polygon) for an array * of points. Returns PiPOutside if not in area, PiPInside if in the area, * PiPBoundary is if on the border. It tests border only if the tolerance is * > 0, otherwise PiPBoundary cannot be returned. Note: If the tolerance is * not 0, the test is more expensive. * * O(n*m) complexity, where n is the number of polygon segments, m is the * number of input points. */ public static void testPointsInArea2D(Geometry polygon, Point2D[] inputPoints, int count, double tolerance, PiPResult[] testResults) { if (polygon.getType() == Geometry.Type.Polygon) testPointsInPolygon2D((Polygon) polygon, inputPoints, count, tolerance, testResults); else if (polygon.getType() == Geometry.Type.Envelope) { Envelope2D env2D = new Envelope2D(); ((Envelope) polygon).queryEnvelope2D(env2D); _testPointsInEnvelope2D(env2D, inputPoints, count, tolerance, testResults); } else throw new GeometryException("invalid_call");// GEOMTHROW(invalid_call); } public static void testPointsInArea2D(Geometry polygon, double[] xyStreamBuffer, int count, double tolerance, PiPResult[] testResults) { if (polygon.getType() == Geometry.Type.Polygon) testPointsInPolygon2D((Polygon) polygon, xyStreamBuffer, count, tolerance, testResults); else if (polygon.getType() == Geometry.Type.Envelope) { Envelope2D env2D = new Envelope2D(); ((Envelope) polygon).queryEnvelope2D(env2D); _testPointsInEnvelope2D(env2D, xyStreamBuffer, count, tolerance, testResults); } else throw new GeometryException("invalid_call");// GEOMTHROW(invalid_call); } private static void _testPointsInEnvelope2D(Envelope2D env2D, Point2D[] inputPoints, int count, double tolerance, PiPResult[] testResults) { if (inputPoints.length < count || testResults.length < count) throw new IllegalArgumentException(); if (env2D.isEmpty()) { for (int i = 0; i < count; i++) testResults[i] = PiPResult.PiPOutside; return; } Envelope2D envIn = env2D; // note for java port - assignement by value envIn.inflate(-tolerance * 0.5, -tolerance * 0.5); Envelope2D envOut = env2D;// note for java port - assignement by value envOut.inflate(tolerance * 0.5, tolerance * 0.5); for (int i = 0; i < count; i++) { if (envIn.contains(inputPoints[i])) testResults[i] = PiPResult.PiPInside; else if (!envOut.contains(inputPoints[i])) testResults[i] = PiPResult.PiPOutside; else testResults[i] = PiPResult.PiPBoundary; } } private static void _testPointsInEnvelope2D(Envelope2D env2D, double[] xyStreamBuffer, int pointCount, double tolerance, PiPResult[] testResults) { if (xyStreamBuffer.length / 2 < pointCount || testResults.length < pointCount) throw new IllegalArgumentException(); if (env2D.isEmpty()) { for (int i = 0; i < pointCount; i++) testResults[i] = PiPResult.PiPOutside; return; } Envelope2D envIn = env2D; // note for java port - assignement by value envIn.inflate(-tolerance * 0.5, -tolerance * 0.5); Envelope2D envOut = env2D;// note for java port - assignement by value envOut.inflate(tolerance * 0.5, tolerance * 0.5); for (int i = 0; i < pointCount; i++) { if (envIn .contains(xyStreamBuffer[i * 2], xyStreamBuffer[i * 2 + 1])) testResults[i] = PiPResult.PiPInside; else if (!envIn.contains(xyStreamBuffer[i * 2], xyStreamBuffer[i * 2 + 1])) testResults[i] = PiPResult.PiPOutside; else testResults[i] = PiPResult.PiPBoundary; } } static void testPointsOnSegment_(Segment seg, Point2D[] input_points, int count, double tolerance, PolygonUtils.PiPResult[] test_results) { for (int i = 0; i < count; i++) { if (seg.isIntersecting(input_points[i], tolerance)) test_results[i] = PiPResult.PiPBoundary; else test_results[i] = PiPResult.PiPOutside; } } static void testPointsOnPolyline2D_(Polyline poly, Point2D[] input_points, int count, double tolerance, PolygonUtils.PiPResult[] test_results) { MultiPathImpl mp_impl = (MultiPathImpl) poly._getImpl(); GeometryAccelerators accel = mp_impl._getAccelerators(); RasterizedGeometry2D rgeom = null; if (accel != null) { rgeom = accel.getRasterizedGeometry(); } int pointsLeft = count; for (int i = 0; i < count; i++) { test_results[i] = PiPResult.PiPInside;// set to impossible value if (rgeom != null) { Point2D input_point = input_points[i]; RasterizedGeometry2D.HitType hit = rgeom.queryPointInGeometry( input_point.x, input_point.y); if (hit == RasterizedGeometry2D.HitType.Outside) { test_results[i] = PiPResult.PiPOutside; pointsLeft--; } } } if (pointsLeft != 0) { SegmentIteratorImpl iter = mp_impl.querySegmentIterator(); while (iter.nextPath() && pointsLeft != 0) { while (iter.hasNextSegment() && pointsLeft != 0) { Segment segment = iter.nextSegment(); for (int i = 0; i < count && pointsLeft != 0; i++) { if (test_results[i] == PiPResult.PiPInside) { if (segment.isIntersecting(input_points[i], tolerance)) { test_results[i] = PiPResult.PiPBoundary; pointsLeft--; } } } } } } for (int i = 0; i < count; i++) { if (test_results[i] == PiPResult.PiPInside) test_results[i] = PiPResult.PiPOutside; } } static void testPointsOnLine2D(Geometry line, Point2D[] input_points, int count, double tolerance, PolygonUtils.PiPResult[] test_results) { Geometry.Type gt = line.getType(); if (gt == Geometry.Type.Polyline) testPointsOnPolyline2D_((Polyline) line, input_points, count, tolerance, test_results); else if (Geometry.isSegment(gt.value())) { testPointsOnSegment_((Segment) line, input_points, count, tolerance, test_results); } else throw new GeometryException("Invalid call."); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.10.09 at 10:10:23 AM CST // package com.elong.nb.model.elong; import java.math.BigDecimal; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import java.util.List; import com.alibaba.fastjson.annotation.JSONField; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlType; import java.util.Date; /** * <p>Java class for BaseDrrRule complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="BaseDrrRule"> * &lt;complexContent> * &lt;extension base="{}BaseRule"> * &lt;sequence> * &lt;element name="TypeCode" type="{}EnumDrrRuleCode"/> * &lt;element name="DateType" type="{}EnumDateType"/> * &lt;element name="StartDate" type="{http://www.w3.org/2001/XMLSchema}dateTime"/> * &lt;element name="EndDate" type="{http://www.w3.org/2001/XMLSchema}dateTime"/> * &lt;element name="DayNum" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="CheckInNum" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="EveryCheckInNum" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="LastDayNum" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="WhichDayNum" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="CashScale" type="{}EnumDrrPreferential"/> * &lt;element name="DeductNum" type="{http://www.w3.org/2001/XMLSchema}decimal"/> * &lt;element name="WeekSet" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="FeeType" type="{}EnumDrrFeeType"/> * &lt;element name="RoomTypeIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "BaseDrrRule", propOrder = { "typeCode", "dateType", "startDate", "endDate", "dayNum", "checkInNum", "everyCheckInNum", "lastDayNum", "whichDayNum", "cashScale", "deductNum", "weekSet", "feeType", "roomTypeIds" }) @XmlSeeAlso({ DrrRule.class }) public class BaseDrrRule extends BaseRule { @JSONField(name = "TypeCode") @XmlSchemaType(name = "string") protected EnumDrrRuleCode typeCode; @JSONField(name = "DateType") @XmlSchemaType(name = "string") protected EnumDateType dateType; @JSONField(name = "StartDate") @XmlSchemaType(name = "dateTime") protected java.util.Date startDate; @JSONField(name = "EndDate") @XmlSchemaType(name = "dateTime") protected java.util.Date endDate; @JSONField(name = "DayNum") protected int dayNum; @JSONField(name = "CheckInNum") protected int checkInNum; @JSONField(name = "EveryCheckInNum") protected int everyCheckInNum; @JSONField(name = "LastDayNum") protected int lastDayNum; @JSONField(name = "WhichDayNum") protected int whichDayNum; @JSONField(name = "CashScale") @XmlSchemaType(name = "string") protected EnumDrrPreferential cashScale; @JSONField(name = "DeductNum") protected BigDecimal deductNum; @JSONField(name = "WeekSet") protected String weekSet; @JSONField(name = "FeeType") @XmlSchemaType(name = "string") protected EnumDrrFeeType feeType; @JSONField(name = "RoomTypeIds") protected String roomTypeIds; /** * Gets the value of the typeCode property. * * @return * possible object is * {@link EnumDrrRuleCode } * */ public EnumDrrRuleCode getTypeCode() { return typeCode; } /** * Sets the value of the typeCode property. * * @param value * allowed object is * {@link EnumDrrRuleCode } * */ public void setTypeCode(EnumDrrRuleCode value) { this.typeCode = value; } /** * Gets the value of the dateType property. * * @return * possible object is * {@link EnumDateType } * */ public EnumDateType getDateType() { return dateType; } /** * Sets the value of the dateType property. * * @param value * allowed object is * {@link EnumDateType } * */ public void setDateType(EnumDateType value) { this.dateType = value; } /** * Gets the value of the startDate property. * * @return * possible object is * {@link java.util.Date } * */ public java.util.Date getStartDate() { return startDate; } /** * Sets the value of the startDate property. * * @param value * allowed object is * {@link java.util.Date } * */ public void setStartDate(java.util.Date value) { this.startDate = value; } /** * Gets the value of the endDate property. * * @return * possible object is * {@link java.util.Date } * */ public java.util.Date getEndDate() { return endDate; } /** * Sets the value of the endDate property. * * @param value * allowed object is * {@link java.util.Date } * */ public void setEndDate(java.util.Date value) { this.endDate = value; } /** * Gets the value of the dayNum property. * */ public int getDayNum() { return dayNum; } /** * Sets the value of the dayNum property. * */ public void setDayNum(int value) { this.dayNum = value; } /** * Gets the value of the checkInNum property. * */ public int getCheckInNum() { return checkInNum; } /** * Sets the value of the checkInNum property. * */ public void setCheckInNum(int value) { this.checkInNum = value; } /** * Gets the value of the everyCheckInNum property. * */ public int getEveryCheckInNum() { return everyCheckInNum; } /** * Sets the value of the everyCheckInNum property. * */ public void setEveryCheckInNum(int value) { this.everyCheckInNum = value; } /** * Gets the value of the lastDayNum property. * */ public int getLastDayNum() { return lastDayNum; } /** * Sets the value of the lastDayNum property. * */ public void setLastDayNum(int value) { this.lastDayNum = value; } /** * Gets the value of the whichDayNum property. * */ public int getWhichDayNum() { return whichDayNum; } /** * Sets the value of the whichDayNum property. * */ public void setWhichDayNum(int value) { this.whichDayNum = value; } /** * Gets the value of the cashScale property. * * @return * possible object is * {@link EnumDrrPreferential } * */ public EnumDrrPreferential getCashScale() { return cashScale; } /** * Sets the value of the cashScale property. * * @param value * allowed object is * {@link EnumDrrPreferential } * */ public void setCashScale(EnumDrrPreferential value) { this.cashScale = value; } /** * Gets the value of the deductNum property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getDeductNum() { return deductNum; } /** * Sets the value of the deductNum property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setDeductNum(BigDecimal value) { this.deductNum = value; } /** * Gets the value of the weekSet property. * * @return * possible object is * {@link String } * */ public String getWeekSet() { return weekSet; } /** * Sets the value of the weekSet property. * * @param value * allowed object is * {@link String } * */ public void setWeekSet(String value) { this.weekSet = value; } /** * Gets the value of the feeType property. * * @return * possible object is * {@link EnumDrrFeeType } * */ public EnumDrrFeeType getFeeType() { return feeType; } /** * Sets the value of the feeType property. * * @param value * allowed object is * {@link EnumDrrFeeType } * */ public void setFeeType(EnumDrrFeeType value) { this.feeType = value; } /** * Gets the value of the roomTypeIds property. * * @return * possible object is * {@link String } * */ public String getRoomTypeIds() { return roomTypeIds; } /** * Sets the value of the roomTypeIds property. * * @param value * allowed object is * {@link String } * */ public void setRoomTypeIds(String value) { this.roomTypeIds = value; } }
/* * Copyright 2011 Henry Coles * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.pitest.classinfo; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; import org.junit.Test; import nl.jqno.equalsverifier.EqualsVerifier; public class ClassNameTest { @Test public void shouldConvertJavaNamesToInternalNames() { final ClassName testee = ClassName.fromString("com.foo.bar"); assertEquals("com/foo/bar", testee.asInternalName()); } @Test public void shouldConvertInternalNamesToJavaNames() { final ClassName testee = ClassName.fromString("com/foo/bar"); assertEquals("com.foo.bar", testee.asJavaName()); } @Test public void shouldTreatSameClassNameAsEqual() { final ClassName left = ClassName.fromString("com/foo/bar"); final ClassName right = ClassName.fromString("com.foo.bar"); assertTrue(left.equals(right)); assertTrue(right.equals(left)); } @Test public void shouldDisplayJavaNameInToString() { final ClassName testee = ClassName.fromString("com/foo/bar"); assertEquals("com.foo.bar", testee.toString()); } @Test public void getNameWithoutPackageShouldReturnNameOnlyWhenClassIsOuterClass() { assertEquals(ClassName.fromString("String"), ClassName.fromClass(String.class).getNameWithoutPackage()); } static class Foo { } @Test public void getNameWithoutPackageShouldReturnNameWhenClassIsInnerClass() { assertEquals(ClassName.fromString("ClassNameTest$Foo"), ClassName.fromClass(Foo.class).getNameWithoutPackage()); } @Test public void getNameWithoutPackageShouldReturnNameWhenClassInPackageDefault() { assertEquals(ClassName.fromString("Foo"), ClassName.fromString("Foo").getNameWithoutPackage()); } @Test public void getPackageShouldReturnEmptyPackageWhenClassInPackageDefault() { assertEquals(ClassName.fromString(""), ClassName.fromString("Foo").getPackage()); } @Test public void getPackageShouldReturnPackageWhenClassWithinAPackage() { assertEquals(ClassName.fromString("org.pitest.classinfo"), ClassName.fromClass( ClassNameTest.class).getPackage()); } @Test public void withoutSuffixCharsShouldReturnPacakgeAndClassWithoutSuffixChars() { assertEquals(ClassName.fromString("com.example.Foo"), ClassName.fromString( "com.example.FooTest").withoutSuffixChars(4)); } @Test public void withoutPrefeixCharsShouldReturnPacakgeAndClassWithoutPrefixChars() { assertEquals(ClassName.fromString("com.example.Foo"), ClassName.fromString( "com.example.TestFoo").withoutPrefixChars(4)); } @Test public void shouldSortByName() { final ClassName a = ClassName.fromString("a.a.c"); final ClassName b = ClassName.fromString("a.b.c"); final ClassName c = ClassName.fromString("b.a.c"); final List<ClassName> actual = Arrays.asList(b, c, a); Collections.sort(actual); assertEquals(Arrays.asList(a, b, c), actual); } @Test public void shouldConvertStringToClassName() { assertEquals(ClassName.fromString("foo"), ClassName.stringToClassName() .apply("foo")); } @Test public void shouldProduceSameHashCodeForSameClass() { assertEquals(ClassName.fromString("org/example/Foo").hashCode(), ClassName .fromString("org.example.Foo").hashCode()); } @Test public void shouldProduceDifferentHashCodeForDifferentClasses() { assertFalse(ClassName.fromString("org/example/Foo").hashCode() == ClassName .fromString("org.example.Bar").hashCode()); } @Test public void shouldTreatSameClassAsEqual() { assertEquals(ClassName.fromString("org/example/Foo"), ClassName.fromString("org.example.Foo")); } @Test public void shouldTreatDifferentClassesAsNotEqual() { assertFalse(ClassName.fromString("org/example/Foo").equals( ClassName.fromString("org.example.Bar"))); } @Test public void nameToClassShouldReturnClassWhenKnownToLoader() { assertThat(ClassName.nameToClass().apply(ClassName.fromString("java.lang.String"))) .contains(String.class); } @Test public void stringToClassShouldReturnEmptyWhenClassNotKnownToLoader() { assertThat(ClassName.nameToClass() .apply(ClassName.fromString("org.unknown.Unknown"))) .isEmpty(); } @Test public void shouldObeyHashcodeEqualsContract() { EqualsVerifier.forClass(ClassName.class).verify(); } @Test public void shouldUseCachedInstancesForObject() { assertThat(ClassName.fromClass(Object.class)).isSameAs(ClassName.fromClass(Object.class)); } @Test public void shouldUseCachedInstancesForString() { assertThat(ClassName.fromClass(String.class)).isSameAs(ClassName.fromClass(String.class)); } @Test public void shouldUseCachedInstancesForInteger() { assertThat(ClassName.fromClass(Integer.class)).isSameAs(ClassName.fromClass(Integer.class)); } @Test public void shouldUseCachedInstancesForList() { assertThat(ClassName.fromClass(List.class)).isSameAs(ClassName.fromClass(List.class)); } @Test public void shouldUseCachedInstancesForArrayList() { assertThat(ClassName.fromClass(ArrayList.class)).isSameAs(ClassName.fromClass(ArrayList.class)); } @Test public void shouldUseCachedInstancesForStream() { assertThat(ClassName.fromClass(Stream.class)).isSameAs(ClassName.fromClass(Stream.class)); } @Test public void shouldUseCachedInstancesForFunction() { assertThat(ClassName.fromClass(Function.class)).isSameAs(ClassName.fromClass(Function.class)); } @Test public void shouldUseCachedInstancesForPredicate() { assertThat(ClassName.fromClass(Predicate.class)).isSameAs(ClassName.fromClass(Predicate.class)); } }
package org.bouncycastle.pqc.jcajce.provider.mceliece; import java.io.ByteArrayOutputStream; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import java.security.spec.AlgorithmParameterSpec; import javax.crypto.BadPaddingException; import javax.crypto.IllegalBlockSizeException; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.x509.X509ObjectIdentifiers; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.digests.SHA1Digest; import org.bouncycastle.crypto.digests.SHA224Digest; import org.bouncycastle.crypto.digests.SHA256Digest; import org.bouncycastle.crypto.digests.SHA384Digest; import org.bouncycastle.crypto.digests.SHA512Digest; import org.bouncycastle.crypto.params.ParametersWithRandom; import org.bouncycastle.pqc.crypto.mceliece.McElieceCCA2KeyParameters; import org.bouncycastle.pqc.crypto.mceliece.McElieceKobaraImaiCipher; import org.bouncycastle.pqc.jcajce.provider.util.AsymmetricHybridCipher; public class McElieceKobaraImaiCipherSpi extends AsymmetricHybridCipher implements PKCSObjectIdentifiers, X509ObjectIdentifiers { // TODO digest needed? private Digest digest; private McElieceKobaraImaiCipher cipher; /** * buffer to store the input data */ private ByteArrayOutputStream buf = new ByteArrayOutputStream(); public McElieceKobaraImaiCipherSpi() { buf = new ByteArrayOutputStream(); } protected McElieceKobaraImaiCipherSpi(Digest digest, McElieceKobaraImaiCipher cipher) { this.digest = digest; this.cipher = cipher; buf = new ByteArrayOutputStream(); } /** * Continue a multiple-part encryption or decryption operation. * * @param input byte array containing the next part of the input * @param inOff index in the array where the input starts * @param inLen length of the input * @return the processed byte array. */ public byte[] update(byte[] input, int inOff, int inLen) { buf.write(input, inOff, inLen); return new byte[0]; } /** * Encrypts or decrypts data in a single-part operation, or finishes a * multiple-part operation. The data is encrypted or decrypted, depending on * how this cipher was initialized. * * @param input the input buffer * @param inOff the offset in input where the input starts * @param inLen the input length * @return the new buffer with the result * @throws BadPaddingException if this cipher is in decryption mode, and (un)padding has * been requested, but the decrypted data is not bounded by * the appropriate padding bytes */ public byte[] doFinal(byte[] input, int inOff, int inLen) throws BadPaddingException { update(input, inOff, inLen); if (opMode == ENCRYPT_MODE) { try { return cipher.messageEncrypt(this.pad()); } catch (Exception e) { e.printStackTrace(); } } else if (opMode == DECRYPT_MODE) { byte[] inputOfDecr = buf.toByteArray(); buf.reset(); try { return unpad(cipher.messageDecrypt(inputOfDecr)); } catch (Exception e) { e.printStackTrace(); } } return null; } protected int encryptOutputSize(int inLen) { return 0; } protected int decryptOutputSize(int inLen) { return 0; } protected void initCipherEncrypt(Key key, AlgorithmParameterSpec params, SecureRandom sr) throws InvalidKeyException, InvalidAlgorithmParameterException { buf.reset(); CipherParameters param; param = McElieceCCA2KeysToParams.generatePublicKeyParameter((PublicKey)key); param = new ParametersWithRandom(param, sr); digest.reset(); cipher.init(true, param); } protected void initCipherDecrypt(Key key, AlgorithmParameterSpec params) throws InvalidKeyException, InvalidAlgorithmParameterException { buf.reset(); CipherParameters param; param = McElieceCCA2KeysToParams.generatePrivateKeyParameter((PrivateKey)key); digest.reset(); cipher.init(false, param); } public String getName() { return "McElieceKobaraImaiCipher"; } public int getKeySize(Key key) throws InvalidKeyException { McElieceCCA2KeyParameters mcElieceCCA2KeyParameters; if (key instanceof PublicKey) { mcElieceCCA2KeyParameters = (McElieceCCA2KeyParameters)McElieceCCA2KeysToParams.generatePublicKeyParameter((PublicKey)key); return cipher.getKeySize(mcElieceCCA2KeyParameters); } else if (key instanceof PrivateKey) { mcElieceCCA2KeyParameters = (McElieceCCA2KeyParameters)McElieceCCA2KeysToParams.generatePrivateKeyParameter((PrivateKey)key); return cipher.getKeySize(mcElieceCCA2KeyParameters); } else { throw new InvalidKeyException(); } } /** * Pad and return the message stored in the message buffer. * * @return the padded message */ private byte[] pad() { buf.write(0x01); byte[] result = buf.toByteArray(); buf.reset(); return result; } /** * Unpad a message. * * @param pmBytes the padded message * @return the message * @throws BadPaddingException if the padded message is invalid. */ private byte[] unpad(byte[] pmBytes) throws BadPaddingException { // find first non-zero byte int index; for (index = pmBytes.length - 1; index >= 0 && pmBytes[index] == 0; index--) { ; } // check if padding byte is valid if (pmBytes[index] != 0x01) { throw new BadPaddingException("invalid ciphertext"); } // extract and return message byte[] mBytes = new byte[index]; System.arraycopy(pmBytes, 0, mBytes, 0, index); return mBytes; } public byte[] messageEncrypt() throws IllegalBlockSizeException, BadPaddingException, NoSuchAlgorithmException { byte[] output = null; try { output = cipher.messageEncrypt((this.pad())); } catch (Exception e) { e.printStackTrace(); } return output; } public byte[] messageDecrypt() throws IllegalBlockSizeException, BadPaddingException, NoSuchAlgorithmException { byte[] output = null; byte[] inputOfDecr = buf.toByteArray(); buf.reset(); try { output = unpad(cipher.messageDecrypt(inputOfDecr)); } catch (Exception e) { e.printStackTrace(); } return output; } static public class McElieceKobaraImai extends McElieceKobaraImaiCipherSpi { public McElieceKobaraImai() { super(new SHA1Digest(), new McElieceKobaraImaiCipher()); } } static public class McElieceKobaraImai224 extends McElieceKobaraImaiCipherSpi { public McElieceKobaraImai224() { super(new SHA224Digest(), new McElieceKobaraImaiCipher()); } } static public class McElieceKobaraImai256 extends McElieceKobaraImaiCipherSpi { public McElieceKobaraImai256() { super(new SHA256Digest(), new McElieceKobaraImaiCipher()); } } static public class McElieceKobaraImai384 extends McElieceKobaraImaiCipherSpi { public McElieceKobaraImai384() { super(new SHA384Digest(), new McElieceKobaraImaiCipher()); } } static public class McElieceKobaraImai512 extends McElieceKobaraImaiCipherSpi { public McElieceKobaraImai512() { super(new SHA512Digest(), new McElieceKobaraImaiCipher()); } } }
/** * Copyright (C) [2013] [The FURTHeR Project] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.utah.further.core.data.hibernate.query; import static edu.utah.further.core.api.collections.CollectionUtil.newList; import static edu.utah.further.core.api.message.Messages.unsupportedMessage; import static org.hibernate.criterion.CriteriaSpecification.DISTINCT_ROOT_ENTITY; import static org.hibernate.criterion.Order.asc; import static org.hibernate.criterion.Order.desc; import static org.slf4j.LoggerFactory.getLogger; import java.util.List; import org.apache.commons.lang.Validate; import org.hibernate.FetchMode; import org.hibernate.SessionFactory; import org.hibernate.criterion.Criterion; import org.hibernate.criterion.DetachedCriteria; import org.hibernate.metadata.ClassMetadata; import org.hibernate.transform.ResultTransformer; import org.slf4j.Logger; import edu.utah.further.core.api.context.Implementation; import edu.utah.further.core.api.data.PersistentEntity; import edu.utah.further.core.api.exception.ApplicationException; import edu.utah.further.core.api.exception.BusinessRuleException; import edu.utah.further.core.api.lang.Builder; import edu.utah.further.core.data.hibernate.adapter.CriteriaType; import edu.utah.further.core.data.hibernate.adapter.GenericCriteria; import edu.utah.further.core.data.hibernate.adapter.GenericCriteriaFactory; import edu.utah.further.core.query.domain.SearchCriterion; import edu.utah.further.core.query.domain.SearchQuery; import edu.utah.further.core.query.domain.SearchQueryAlias; import edu.utah.further.core.query.domain.SortCriterion; import edu.utah.further.core.query.domain.SortType; /** * Converts a composite {@link SearchQuery} object to a Hibernate {@link GenericCriteria} * object. Note that this class is not thread-safe and must be synchronized externally. * <p> * -----------------------------------------------------------------------------------<br> * (c) 2008-2013 FURTHeR Project, Health Sciences IT, University of Utah<br> * Contact: {@code <further@utah.edu>}<br> * Biomedical Informatics, 26 South 2000 East<br> * Room 5775 HSEB, Salt Lake City, UT 84112<br> * Day Phone: 1-801-581-4080<br> * ----------------------------------------------------------------------------------- * * @author Oren E. Livne {@code <oren.livne@utah.edu>} * @author N. Dustin Schultz {@code <dustin.schultz@utah.edu>} * @version May 28, 2009 */ @Implementation public final class QueryBuilderHibernateImpl implements Builder<GenericCriteria> { // ========================= CONSTANTS ================================= /** * A logger that helps identify this class' printouts. */ @SuppressWarnings("unused") private static final Logger log = getLogger(QueryBuilderHibernateImpl.class); // ========================= FIELDS ==================================== /** * A destination object of the principal searchable entity. Filled with * {@link Criterion}s upon returning from this factory. */ private GenericCriteria destination; // ========================= DEPENDENCIES ============================== /** * Session factory, required for retrieving a Hibernate session to which the hibernate * criteria are bound. */ private final SessionFactory sessionFactory; /** * Whether or not this criteria should return distinct results, done programmatically. */ private boolean isDistinct = false; /** * What type of criteria, either main or sub */ private final CriteriaType criteriaType; /** * The {@link SearchQuery} from which to build the {@link GenericCriteria} from. */ private SearchQuery searchQuery; /** * The package where the domainClass lives. */ private final String domainClassPackage; /** * Very useful object to get information about the root entity. */ private ClassMetadata classMetadata; /** * Return a Hibernate criteria instance. * * @param criteriaType * criteria type (main criteria/detached criteria=sub-criteria) * @param domainClass * root entity type * @param sessionFactory * session factory to bind to */ public static QueryBuilderHibernateImpl newInstance(final CriteriaType criteriaType, final String domainClassPackage, final SessionFactory sessionFactory) { return new QueryBuilderHibernateImpl(criteriaType, domainClassPackage, sessionFactory); } /** * Convert a search query to Hibernate criteria. * * @param criteriaType * criteria type (main criteria/detached criteria=sub-criteria) * @param domainClass * root entity type * @param sessionFactory * session factory to bind to * @param searchQuery * search query to convert */ public static GenericCriteria convert(final CriteriaType criteriaType, final String domainClassPackage, final SessionFactory sessionFactory, final SearchQuery searchQuery) { if (searchQuery.getRootObjectName() == null) { throw new ApplicationException( "SearchQuery requires a root object in the package " + domainClassPackage + " but root object name was null"); } return newInstance(criteriaType, domainClassPackage, sessionFactory).setQuery( searchQuery).build(); } /** * Construct a Hibernate criteria builder. * * @param criteriaType * criteria type (main criteria/detached criteria=sub-criteria) * @param domainClass * root entity type * @param sessionFactory * session factory to bind to */ private QueryBuilderHibernateImpl(final CriteriaType criteriaType, final String domainClassPackage, final SessionFactory sessionFactory) { super(); this.criteriaType = criteriaType; this.sessionFactory = sessionFactory; this.domainClassPackage = domainClassPackage; } // ========================= IMPLEMENTATION: Builder<Criteria> ========= /** * Convert abstract the search criteria to a Hibernate Criterion. * * @return root entity Hibernate Criterion destination object * @see edu.utah.further.core.api.lang.Builder#build() */ @Override public GenericCriteria build() { Validate.notNull(searchQuery, "Search query must exist"); Validate.notNull(searchQuery.getRootCriterion(), "Search query root criterion must exist"); Validate.notNull(sessionFactory, "Session Factory"); final Class<? extends PersistentEntity<?>> domainClass = searchQuery .getRootObject(domainClassPackage); destination = createCriteria(domainClass); // Set aliases addAliases(searchQuery, destination); classMetadata = sessionFactory.getClassMetadata(domainClass); // Add in the criterion objects destination.add(convertTree(searchQuery.getRootCriterion())); // Provide an opportunity to attach projections inside the entire criterion tree. // Note: COUNT-type criteria currently add their Projections as part // of convertTree(), so ProjectionFactoryHibernateImpl does not currently add any // projections. destination.setProjection(new ProjectionFactoryHibernateImpl(searchQuery .getRootCriterion(), classMetadata).build()); // Set sorting options addSortOrder(); // Set result set limits setLimits(); // Set result set view if (isDistinct) { addDistinct(); } return destination; } // ========================= METHODS =================================== /** * Add abstract search criteria to the Hibernate destination criteria. * * @param aSearchQuery * input set of search criteria * @return {@code this}, for method chaining */ public QueryBuilderHibernateImpl setQuery(final SearchQuery aSearchQuery) { this.searchQuery = aSearchQuery; return this; } /** * * Sets whether the Hibernate {@link GenericCriteria} should return distinct results * or not. This distinct is done programmatically versus database distinct. The * distinct is based on the root entity. * * @param distinct * A flag indicating whether or not return distinct results * programmatically * @return {@code this}, for method chaining */ public QueryBuilderHibernateImpl distinct(final boolean distinct) { this.isDistinct = distinct; return this; } /** * Return the sessionFactory property. For testing only. * * @return the sessionFactory */ SessionFactory getSessionFactory() { return sessionFactory; } // ========================= PRIVATE METHODS =========================== /** * Apply alias definitions of a search criterion to the corresponding Hibernate * criteria. * * @param searchQuery * @param destination */ static void addAliases(final SearchQuery searchQuery, final GenericCriteria destination) { for (final SearchQueryAlias alias : searchQuery.getAliases()) { // Note the argument order reversing between the search framework // and Hibernate but not between our search framework and Hibernate adapters destination.addAlias(alias.getKey(), alias.getValue()).setFetchMode( alias.getValue(), FetchMode.JOIN); } } /** * Convert a search criterion tree into a Hibernate search criterion tree (like an * adapter, but simply generates a Hibernate criterion instance). */ private Criterion convertTree(final SearchCriterion searchCriterion) { // Convert sub-criteria first final List<Criterion> convertedCriteria = newList(); for (final SearchCriterion childCriterion : searchCriterion.getCriteria()) { convertedCriteria.add(convertTree(childCriterion)); } final List<DetachedCriteria> convertedQueries = newList(); for (final SearchQuery childQuery : searchCriterion.getQueries()) { final GenericCriteria convertedQuery = QueryBuilderHibernateImpl.convert( CriteriaType.SUB_CRITERIA, domainClassPackage, sessionFactory, childQuery); // Copy all root query aliases to sub-queries? Right now letting them have // their own // addAliases(searchQuery, convertedQuery); convertedQueries.add(convertedQuery.getHibernateDetachedCriteria()); } // Now convert the parent return CriterionBuilderHibernateImpl.convert(searchCriterion, convertedCriteria, convertedQueries, classMetadata); } /** * Apply sort orders of the search criteria to the Hibernate criteria. */ private void addSortOrder() { for (final SortCriterion criterion : searchQuery.getSortCriteria()) { final SortType sortType = criterion.getSortType(); switch (sortType) { case ASCENDING: { destination.addOrder(asc(criterion.getPropertyName())); break; } case DESCENDING: { destination.addOrder(desc(criterion.getPropertyName())); break; } default: { throw new BusinessRuleException(unsupportedMessage(sortType)); } } } } /** * Adds a {@link ResultTransformer} to the {@link #destination} that makes it * distinct. */ private void addDistinct() { destination.setResultTransformer(DISTINCT_ROOT_ENTITY); } /** * Set result set limits on {@link #destination}. */ private void setLimits() { if (searchQuery.getFirstResult() != null) { destination.setFirstResult(searchQuery.getFirstResult().intValue()); } if (searchQuery.getMaxResults() != null) { destination.setMaxResults(searchQuery.getMaxResults().intValue()); } } // ========================= PRIVATE METHODS =========================== /** * Create Hibernate search criteria for a root class <T> root entity type * * @param criteriaType * criteria type (main criteria/detached criteria=sub-criteria) * @param clazz * root entity type * @return empty hibernate search criteria for root entity */ private GenericCriteria createCriteria( final Class<? extends PersistentEntity<?>> clazz) { return GenericCriteriaFactory.criteria(criteriaType, clazz, sessionFactory.getCurrentSession()); } }
package pers.medusa.circleindicator.widget; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.drawable.ShapeDrawable; import android.graphics.drawable.shapes.OvalShape; import android.support.v4.view.ViewPager; import android.util.AttributeSet; import android.util.Log; import android.view.View; import java.util.ArrayList; import java.util.List; import pers.medusa.circleindicator.R; import pers.medusa.circleindicator.widget.holder.ShapeHolder; /** * Created by xiayong on 2015/9/29. */ public class CircleIndicator extends View{ private ViewPager viewPager; private List<ShapeHolder> tabItems; private ShapeHolder movingItem; //config list private int mCurItemPosition; private float mCurItemPositionOffset; private float mIndicatorRadius; private float mIndicatorMargin; private int mIndicatorBackground; private int mIndicatorSelectedBackground; private Gravity mIndicatorLayoutGravity; private Mode mIndicatorMode; //default value private final int DEFAULT_INDICATOR_RADIUS = 10; private final int DEFAULT_INDICATOR_MARGIN = 40; private final int DEFAULT_INDICATOR_BACKGROUND = Color.BLUE; private final int DEFAULT_INDICATOR_SELECTED_BACKGROUND = Color.RED; private final int DEFAULT_INDICATOR_LAYOUT_GRAVITY = Gravity.CENTER.ordinal(); private final int DEFAULT_INDICATOR_MODE = Mode.SOLO.ordinal(); public enum Gravity{ LEFT, CENTER, RIGHT } public enum Mode{ INSIDE, OUTSIDE, SOLO } public CircleIndicator(Context context) { super(context); init(context, null); } public CircleIndicator(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs); } public CircleIndicator(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context, attrs); } private void init(Context context,AttributeSet attrs){ tabItems = new ArrayList<>(); handleTypedArray(context, attrs); } private void handleTypedArray(Context context, AttributeSet attrs) { if(attrs == null) return; TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.CircleIndicator); mIndicatorRadius = typedArray.getDimensionPixelSize(R.styleable.CircleIndicator_ci_radius, DEFAULT_INDICATOR_RADIUS); mIndicatorMargin = typedArray.getDimensionPixelSize(R.styleable.CircleIndicator_ci_margin, DEFAULT_INDICATOR_MARGIN); mIndicatorBackground = typedArray.getColor(R.styleable.CircleIndicator_ci_background, DEFAULT_INDICATOR_BACKGROUND); mIndicatorSelectedBackground = typedArray.getColor(R.styleable.CircleIndicator_ci_selected_background,DEFAULT_INDICATOR_SELECTED_BACKGROUND); int gravity = typedArray.getInt(R.styleable.CircleIndicator_ci_gravity,DEFAULT_INDICATOR_LAYOUT_GRAVITY); mIndicatorLayoutGravity = Gravity.values()[gravity]; int mode = typedArray.getInt(R.styleable.CircleIndicator_ci_mode,DEFAULT_INDICATOR_MODE); mIndicatorMode = Mode.values()[mode]; typedArray.recycle(); } public void setViewPager(final ViewPager viewPager){ this.viewPager = viewPager; createTabItems(); createMovingItem(); setUpListener(); } private void setUpListener() { viewPager.addOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { super.onPageScrolled(position, positionOffset, positionOffsetPixels); if(mIndicatorMode != Mode.SOLO){ trigger(position,positionOffset); } } @Override public void onPageSelected(int position) { super.onPageSelected(position); if(mIndicatorMode == Mode.SOLO){ trigger(position,0); } } }); } /** * trigger to redraw the indicator when the ViewPager's selected item changed! * @param position * @param positionOffset */ private void trigger(int position,float positionOffset){ CircleIndicator.this.mCurItemPosition = position; CircleIndicator.this.mCurItemPositionOffset = positionOffset; Log.e("CircleIndicator", "onPageScrolled()" + position + ":" + positionOffset); requestLayout(); invalidate(); } private void createTabItems() { for (int i = 0; i < viewPager.getAdapter().getCount(); i++) { OvalShape circle = new OvalShape(); ShapeDrawable drawable = new ShapeDrawable(circle); ShapeHolder shapeHolder = new ShapeHolder(drawable); Paint paint = drawable.getPaint(); paint.setColor(mIndicatorBackground); paint.setAntiAlias(true); shapeHolder.setPaint(paint); tabItems.add(shapeHolder); } } private void createMovingItem() { OvalShape circle = new OvalShape(); ShapeDrawable drawable = new ShapeDrawable(circle); movingItem = new ShapeHolder(drawable); Paint paint = drawable.getPaint(); paint.setColor(mIndicatorSelectedBackground); paint.setAntiAlias(true); switch (mIndicatorMode){ case INSIDE: paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_ATOP)); break; case OUTSIDE: paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_OVER)); break; case SOLO: paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC)); break; } movingItem.setPaint(paint); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { Log.e("CircleIndicator","onLayout()"); super.onLayout(changed, left, top, right, bottom); final int width = getWidth(); final int height = getHeight(); layoutTabItems(width, height); layoutMovingItem(mCurItemPosition, mCurItemPositionOffset); } private void layoutTabItems(final int containerWidth,final int containerHeight){ if(tabItems == null){ throw new IllegalStateException("forget to create tabItems?"); } final float yCoordinate = containerHeight*0.5f; final float startPosition = startDrawPosition(containerWidth); for(int i=0;i<tabItems.size();i++){ ShapeHolder item = tabItems.get(i); item.resizeShape(2* mIndicatorRadius,2* mIndicatorRadius); item.setY(yCoordinate- mIndicatorRadius); float x = startPosition + (mIndicatorMargin + mIndicatorRadius*2)*i; item.setX(x); } } private float startDrawPosition(final int containerWidth){ if(mIndicatorLayoutGravity == Gravity.LEFT) return 0; float tabItemsLength = tabItems.size()*(2* mIndicatorRadius + mIndicatorMargin)- mIndicatorMargin; if(containerWidth<tabItemsLength){ return 0; } if(mIndicatorLayoutGravity == Gravity.CENTER){ return (containerWidth-tabItemsLength)/2; } return containerWidth - tabItemsLength; } private void layoutMovingItem(final int position,final float positionOffset){ if(movingItem == null){ throw new IllegalStateException("forget to create movingItem?"); } if(tabItems.size() == 0) { return; } ShapeHolder item = tabItems.get(position); movingItem.resizeShape(item.getWidth(), item.getHeight()); float x = item.getX()+(mIndicatorMargin + mIndicatorRadius*2)*positionOffset; movingItem.setX(x); movingItem.setY(item.getY()); } @Override protected void onDraw(Canvas canvas) { Log.e("CircleIndicator", "onDraw()"); super.onDraw(canvas); int sc = canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.MATRIX_SAVE_FLAG | Canvas.CLIP_SAVE_FLAG | Canvas.HAS_ALPHA_LAYER_SAVE_FLAG | Canvas.FULL_COLOR_LAYER_SAVE_FLAG | Canvas.CLIP_TO_LAYER_SAVE_FLAG); for(ShapeHolder item : tabItems){ drawItem(canvas,item); } if(movingItem != null){ drawItem(canvas,movingItem); } canvas.restoreToCount(sc); } private void drawItem(Canvas canvas,ShapeHolder shapeHolder ) { canvas.save(); canvas.translate(shapeHolder.getX(),shapeHolder.getY()); shapeHolder.getShape().draw(canvas); canvas.restore(); } public void setIndicatorRadius(float mIndicatorRadius) { this.mIndicatorRadius = mIndicatorRadius; } public void setIndicatorMargin(float mIndicatorMargin) { this.mIndicatorMargin = mIndicatorMargin; } public void setIndicatorBackground(int mIndicatorBackground) { this.mIndicatorBackground = mIndicatorBackground; } public void setIndicatorSelectedBackground(int mIndicatorSelectedBackground) { this.mIndicatorSelectedBackground = mIndicatorSelectedBackground; } public void setIndicatorLayoutGravity(Gravity mIndicatorLayoutGravity) { this.mIndicatorLayoutGravity = mIndicatorLayoutGravity; } public void setIndicatorMode(Mode mIndicatorMode) { this.mIndicatorMode = mIndicatorMode; } }
package org.apache.pdfbox.pdmodel.graphics.image; import java.io.IOException; import java.io.InputStream; import java.util.List; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSStream; import org.apache.pdfbox.filter.DecodeResult; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDResources; import org.apache.pdfbox.pdmodel.common.PDMetadata; import org.apache.pdfbox.pdmodel.common.PDStream; import org.apache.pdfbox.pdmodel.graphics.PDXObject; import org.apache.pdfbox.pdmodel.graphics.color.PDColorSpace; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceGray; import android.graphics.Bitmap; import android.graphics.Color; import android.util.Log; /** * An Image XObject. * * @author John Hewson * @author Ben Litchfield */ public final class PDImageXObject extends PDXObject implements PDImage { private Bitmap cachedImage; private PDColorSpace colorSpace; private PDResources resources; // current resource dictionary (has color spaces) /** * Creates a thumbnail Image XObject from the given COSBase and name. * @param cosStream the COS stream * @return an XObject * @throws IOException if there is an error creating the XObject. */ public static PDImageXObject createThumbnail(COSStream cosStream) throws IOException { // thumbnails are special, any non-null subtype is treated as being "Image" PDStream pdStream = new PDStream(cosStream); return new PDImageXObject(pdStream, null); } /** * Creates an Image XObject in the given document. * @param document the current document * @throws java.io.IOException if there is an error creating the XObject. */ public PDImageXObject(PDDocument document) throws IOException { this(new PDStream(document), null); } /** * Creates an Image XObject in the given document using the given filtered stream. * @param document the current document * @param filteredStream a filtered stream of image data * @param cosFilter the filter or a COSArray of filters * @param width the image width * @param height the image height * @param bitsPerComponent the bits per component * @param initColorSpace the color space * @throws IOException if there is an error creating the XObject. */ public PDImageXObject(PDDocument document, InputStream filteredStream, COSBase cosFilter, int width, int height, int bitsPerComponent , PDColorSpace initColorSpace) throws IOException { super(new PDStream(document, filteredStream, true), COSName.IMAGE); getCOSStream().setItem(COSName.FILTER, cosFilter); resources = null; colorSpace = null; setBitsPerComponent(bitsPerComponent); setWidth(width); setHeight(height); setColorSpace(initColorSpace); } /** * Creates an Image XObject with the given stream as its contents and current color spaces. * @param stream the XObject stream to read * @param resources the current resources * @throws java.io.IOException if there is an error creating the XObject. */ public PDImageXObject(PDStream stream, PDResources resources) throws IOException { this(stream, resources, stream.getStream().getDecodeResult()); } // repairs parameters using decode result private PDImageXObject(PDStream stream, PDResources resources, DecodeResult decodeResult) { super(repair(stream, decodeResult), COSName.IMAGE); this.resources = resources; // this.colorSpace = decodeResult.getJPXColorSpace();TODO } // repairs parameters using decode result private static PDStream repair(PDStream stream, DecodeResult decodeResult) { stream.getStream().addAll(decodeResult.getParameters()); return stream; } /** * Returns the metadata associated with this XObject, or null if there is none. * @return the metadata associated with this object. */ public PDMetadata getMetadata() { COSStream cosStream = (COSStream) getCOSStream().getDictionaryObject(COSName.METADATA); if (cosStream != null) { return new PDMetadata(cosStream); } return null; } /** * Sets the metadata associated with this XObject, or null if there is none. * @param meta the metadata associated with this object */ public void setMetadata(PDMetadata meta) { getCOSStream().setItem(COSName.METADATA, meta); } /** * Returns the key of this XObject in the structural parent tree. * @return this object's key the structural parent tree */ public int getStructParent() { return getCOSStream().getInt(COSName.STRUCT_PARENT, 0); } /** * Sets the key of this XObject in the structural parent tree. * @param key the new key for this XObject */ public void setStructParent(int key) { getCOSStream().setInt(COSName.STRUCT_PARENT, key); } /** * {@inheritDoc} * The returned images are cached for the lifetime of this XObject. */ @Override public Bitmap getImage() throws IOException { if (cachedImage != null) { return cachedImage; } // get image as RGB Bitmap image = SampledImageReader.getRGBImage(this, getColorKeyMask()); // soft mask (overrides explicit mask) PDImageXObject softMask = getSoftMask(); if (softMask != null) { image = applyMask(image, softMask.getOpaqueImage(), true); } else { // explicit mask PDImageXObject mask = getMask(); if (mask != null) { image = applyMask(image, mask.getOpaqueImage(), false); } } cachedImage = image; return image; } /** * {@inheritDoc} * The returned images are not cached. */ // @Override // public BufferedImage getStencilImage(Paint paint) throws IOException // { // if (!isStencil()) // { // throw new IllegalStateException("Image is not a stencil"); // } // return SampledImageReader.getStencilImage(this, paint); // }TODO /** * Returns an RGB buffered image containing the opaque image stream without any masks applied. * If this Image XObject is a mask then the buffered image will contain the raw mask. * @return the image without any masks applied * @throws IOException if the image cannot be read */ public Bitmap getOpaqueImage() throws IOException { return SampledImageReader.getRGBImage(this, null); } // explicit mask: RGB + Binary -> ARGB // soft mask: RGB + Gray -> ARGB private Bitmap applyMask(Bitmap image, Bitmap mask, boolean isSoft) throws IOException { if (mask == null) { return image; } int width = image.getWidth(); int height = image.getHeight(); // compose to ARGB Bitmap masked = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); //new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); // scale mask to fit image if (mask.getWidth() != width || mask.getHeight() != height) { mask = Bitmap.createScaledBitmap(mask, width, height, true); } int alphaPixel; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { int color = image.getPixel(x, y); alphaPixel = Color.alpha(mask.getPixel(x, y)); if (!isSoft) { alphaPixel = 255 - alphaPixel; } masked.setPixel(x, y, Color.argb(alphaPixel, Color.red(color), Color.green(color), Color.blue(color))); } } return masked; } /** * Returns the Mask Image XObject associated with this image, or null if there is none. * @return Mask Image XObject */ public PDImageXObject getMask() throws IOException { COSBase mask = getCOSStream().getDictionaryObject(COSName.MASK); if (mask instanceof COSArray) { // color key mask, no explicit mask to return return null; } else { COSStream cosStream = (COSStream)getCOSStream().getDictionaryObject(COSName.MASK); if (cosStream != null) { return new PDImageXObject(new PDStream(cosStream), null); // always DeviceGray } return null; } } /** * Returns the color key mask array associated with this image, or null if there is none. * @return Mask Image XObject */ public COSArray getColorKeyMask() { COSBase mask = getCOSStream().getDictionaryObject(COSName.MASK); if (mask instanceof COSArray) { return (COSArray)mask; } return null; } /** * Returns the Soft Mask Image XObject associated with this image, or null if there is none. * @return the SMask Image XObject, or null. */ public PDImageXObject getSoftMask() throws IOException { COSStream cosStream = (COSStream)getCOSStream().getDictionaryObject(COSName.SMASK); if (cosStream != null) { return new PDImageXObject(new PDStream(cosStream), null); // always DeviceGray } return null; } @Override public int getBitsPerComponent() { if (isStencil()) { return 1; } else { return getCOSStream().getInt(COSName.BITS_PER_COMPONENT, COSName.BPC); } } @Override public void setBitsPerComponent(int bpc) { getCOSStream().setInt(COSName.BITS_PER_COMPONENT, bpc); } @Override public PDColorSpace getColorSpace() throws IOException { if (colorSpace == null) { COSBase cosBase = getCOSStream().getDictionaryObject(COSName.COLORSPACE, COSName.CS); if (cosBase != null) { colorSpace = PDColorSpace.create(cosBase, resources); } else if (isStencil()) { // stencil mask color space must be gray, it is often missing return PDDeviceGray.INSTANCE; } else { // an image without a color space is always broken throw new IOException("could not determine color space"); } } return colorSpace; } @Override public PDStream getStream() throws IOException { return getPDStream(); } @Override public void setColorSpace(PDColorSpace cs) { getCOSStream().setItem(COSName.COLORSPACE, cs != null ? cs.getCOSObject() : null); } @Override public int getHeight() { return getCOSStream().getInt(COSName.HEIGHT); } @Override public void setHeight(int h) { getCOSStream().setInt(COSName.HEIGHT, h); } @Override public int getWidth() { return getCOSStream().getInt(COSName.WIDTH); } @Override public void setWidth(int w) { getCOSStream().setInt(COSName.WIDTH, w); } @Override public boolean getInterpolate() { return getCOSStream().getBoolean(COSName.INTERPOLATE, false); } @Override public void setInterpolate(boolean value) { getCOSStream().setBoolean(COSName.INTERPOLATE, value); } @Override public void setDecode(COSArray decode) { getCOSStream().setItem(COSName.DECODE, decode); } @Override public COSArray getDecode() { COSBase decode = getCOSStream().getDictionaryObject(COSName.DECODE); if (decode instanceof COSArray) { return (COSArray) decode; } return null; } @Override public boolean isStencil() { return getCOSStream().getBoolean(COSName.IMAGE_MASK, false); } @Override public void setStencil(boolean isStencil) { getCOSStream().setBoolean(COSName.IMAGE_MASK, isStencil); } /** * This will get the suffix for this image type, e.g. jpg/png. * @return The image suffix or null if not available. */ public String getSuffix() { List<COSName> filters = getPDStream().getFilters(); if (filters == null) { return "png"; } else if (filters.contains(COSName.DCT_DECODE)) { return "jpg"; } else if (filters.contains(COSName.JPX_DECODE)) { return "jpx"; } else if (filters.contains(COSName.CCITTFAX_DECODE)) { return "tiff"; } else if (filters.contains(COSName.FLATE_DECODE) || filters.contains(COSName.LZW_DECODE) || filters.contains(COSName.RUN_LENGTH_DECODE)) { return "png"; } else { Log.w("PdfBoxAndroid", "getSuffix() returns null, filters: " + filters); // TODO more... return null; } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.ex; import com.intellij.codeInsight.FileModificationService; import com.intellij.codeInspection.CommonProblemDescriptor; import com.intellij.codeInspection.InspectionManager; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.reference.RefElement; import com.intellij.codeInspection.reference.RefEntity; import com.intellij.codeInspection.reference.RefManagerImpl; import com.intellij.codeInspection.ui.InspectionResultsView; import com.intellij.codeInspection.ui.InspectionTree; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CustomShortcutSet; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.ReadonlyStatusHandler; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.presentation.java.SymbolPresentationUtil; import com.intellij.util.SequentialModalProgressTask; import com.intellij.util.SequentialTask; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.*; /** * @author max */ public class QuickFixAction extends AnAction { protected final InspectionToolWrapper myToolWrapper; public static InspectionResultsView getInvoker(AnActionEvent e) { return InspectionResultsView.DATA_KEY.getData(e.getDataContext()); } protected QuickFixAction(String text, @NotNull InspectionToolWrapper toolWrapper) { this(text, AllIcons.Actions.CreateFromUsage, null, toolWrapper); } protected QuickFixAction(String text, Icon icon, KeyStroke keyStroke, @NotNull InspectionToolWrapper toolWrapper) { super(text, null, icon); myToolWrapper = toolWrapper; if (keyStroke != null) { registerCustomShortcutSet(new CustomShortcutSet(keyStroke), null); } } @Override public void update(AnActionEvent e) { final InspectionResultsView view = getInvoker(e); if (view == null) { e.getPresentation().setEnabled(false); return; } e.getPresentation().setVisible(false); e.getPresentation().setEnabled(false); final InspectionTree tree = view.getTree(); final InspectionToolWrapper toolWrapper = tree.getSelectedToolWrapper(); if (!view.isSingleToolInSelection() || toolWrapper != myToolWrapper) { return; } if (!isProblemDescriptorsAcceptable() && tree.getSelectedElements().length > 0 || isProblemDescriptorsAcceptable() && tree.getSelectedDescriptors().length > 0) { e.getPresentation().setVisible(true); e.getPresentation().setEnabled(true); } } protected boolean isProblemDescriptorsAcceptable() { return false; } public String getText(RefEntity where) { return getTemplatePresentation().getText(); } @Override public void actionPerformed(final AnActionEvent e) { final InspectionResultsView view = getInvoker(e); final InspectionTree tree = view.getTree(); if (isProblemDescriptorsAcceptable()) { final CommonProblemDescriptor[] descriptors = tree.getSelectedDescriptors(); if (descriptors.length > 0) { doApplyFix(view.getProject(), descriptors, tree.getContext()); return; } } doApplyFix(getSelectedElements(e), view); } protected void applyFix(@NotNull Project project, @NotNull GlobalInspectionContextImpl context, @NotNull CommonProblemDescriptor[] descriptors, @NotNull Set<PsiElement> ignoredElements) { } private void doApplyFix(@NotNull final Project project, @NotNull final CommonProblemDescriptor[] descriptors, @NotNull final GlobalInspectionContextImpl context) { final Set<VirtualFile> readOnlyFiles = new THashSet<VirtualFile>(); for (CommonProblemDescriptor descriptor : descriptors) { final PsiElement psiElement = descriptor instanceof ProblemDescriptor ? ((ProblemDescriptor)descriptor).getPsiElement() : null; if (psiElement != null && !psiElement.isWritable()) { readOnlyFiles.add(psiElement.getContainingFile().getVirtualFile()); } } if (!FileModificationService.getInstance().prepareVirtualFilesForWrite(project, readOnlyFiles)) return; final RefManagerImpl refManager = (RefManagerImpl)context.getRefManager(); final boolean initial = refManager.isInProcess(); refManager.inspectionReadActionFinished(); try { final Set<PsiElement> ignoredElements = new HashSet<PsiElement>(); CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { CommandProcessor.getInstance().markCurrentCommandAsGlobal(project); ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { final SequentialModalProgressTask progressTask = new SequentialModalProgressTask(project, getTemplatePresentation().getText(), false); progressTask.setMinIterationTime(200); progressTask.setTask(new PerformFixesTask(project, descriptors, ignoredElements, progressTask, context)); ProgressManager.getInstance().run(progressTask); } }); } }, getTemplatePresentation().getText(), null); refreshViews(project, ignoredElements, myToolWrapper); } finally { //to make offline view lazy if (initial) refManager.inspectionReadActionStarted(); } } public void doApplyFix(@NotNull final RefEntity[] refElements, @NotNull InspectionResultsView view) { final RefManagerImpl refManager = (RefManagerImpl)view.getGlobalInspectionContext().getRefManager(); final boolean initial = refManager.isInProcess(); refManager.inspectionReadActionFinished(); try { final boolean[] refreshNeeded = {false}; if (refElements.length > 0) { final Project project = refElements[0].getRefManager().getProject(); CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { CommandProcessor.getInstance().markCurrentCommandAsGlobal(project); ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { refreshNeeded[0] = applyFix(refElements); } }); } }, getTemplatePresentation().getText(), null); } if (refreshNeeded[0]) { refreshViews(view.getProject(), refElements, myToolWrapper); } } finally { //to make offline view lazy if (initial) refManager.inspectionReadActionStarted(); } } public static void removeElements(@NotNull RefEntity[] refElements, @NotNull Project project, @NotNull InspectionToolWrapper toolWrapper) { refreshViews(project, refElements, toolWrapper); final ArrayList<RefElement> deletedRefs = new ArrayList<RefElement>(1); for (RefEntity refElement : refElements) { if (!(refElement instanceof RefElement)) continue; refElement.getRefManager().removeRefElement((RefElement)refElement, deletedRefs); } } private static Set<VirtualFile> getReadOnlyFiles(@NotNull RefEntity[] refElements) { Set<VirtualFile> readOnlyFiles = new THashSet<VirtualFile>(); for (RefEntity refElement : refElements) { PsiElement psiElement = refElement instanceof RefElement ? ((RefElement)refElement).getElement() : null; if (psiElement == null || psiElement.getContainingFile() == null) continue; readOnlyFiles.add(psiElement.getContainingFile().getVirtualFile()); } return readOnlyFiles; } private static RefEntity[] getSelectedElements(AnActionEvent e) { final InspectionResultsView invoker = getInvoker(e); if (invoker == null) return new RefElement[0]; List<RefEntity> selection = new ArrayList<RefEntity>(Arrays.asList(invoker.getTree().getSelectedElements())); PsiDocumentManager.getInstance(invoker.getProject()).commitAllDocuments(); Collections.sort(selection, new Comparator<RefEntity>() { @Override public int compare(RefEntity o1, RefEntity o2) { if (o1 instanceof RefElement && o2 instanceof RefElement) { RefElement r1 = (RefElement)o1; RefElement r2 = (RefElement)o2; final PsiElement element1 = r1.getElement(); final PsiElement element2 = r2.getElement(); final PsiFile containingFile1 = element1.getContainingFile(); final PsiFile containingFile2 = element2.getContainingFile(); if (containingFile1 == containingFile2) { int i1 = element1.getTextOffset(); int i2 = element2.getTextOffset(); if (i1 < i2) { return 1; } else if (i1 > i2){ return -1; } return 0; } return containingFile1.getName().compareTo(containingFile2.getName()); } if (o1 instanceof RefElement) { return 1; } if (o2 instanceof RefElement) { return -1; } return o1.getName().compareTo(o2.getName()); } }); return selection.toArray(new RefEntity[selection.size()]); } private static void refreshViews(@NotNull Project project, @NotNull Set<PsiElement> selectedElements, @NotNull InspectionToolWrapper toolWrapper) { InspectionManagerEx managerEx = (InspectionManagerEx)InspectionManager.getInstance(project); final Set<GlobalInspectionContextImpl> runningContexts = managerEx.getRunningContexts(); for (GlobalInspectionContextImpl context : runningContexts) { for (PsiElement element : selectedElements) { context.ignoreElement(toolWrapper.getTool(), element); } context.refreshViews(); } } private static void refreshViews(@NotNull Project project, @NotNull RefEntity[] refElements, @NotNull InspectionToolWrapper toolWrapper) { final Set<PsiElement> ignoredElements = new HashSet<PsiElement>(); for (RefEntity element : refElements) { final PsiElement psiElement = element instanceof RefElement ? ((RefElement)element).getElement() : null; if (psiElement != null && psiElement.isValid()) { ignoredElements.add(psiElement); } } refreshViews(project, ignoredElements, toolWrapper); } /** * @return true if immediate UI update needed. */ protected boolean applyFix(@NotNull RefEntity[] refElements) { Set<VirtualFile> readOnlyFiles = getReadOnlyFiles(refElements); if (!readOnlyFiles.isEmpty()) { final Project project = refElements[0].getRefManager().getProject(); final ReadonlyStatusHandler.OperationStatus operationStatus = ReadonlyStatusHandler.getInstance(project).ensureFilesWritable( VfsUtilCore.toVirtualFileArray(readOnlyFiles)); if (operationStatus.hasReadonlyFiles()) return false; } return true; } private class PerformFixesTask implements SequentialTask { @NotNull private final Project myProject; private final CommonProblemDescriptor[] myDescriptors; @NotNull private final Set<PsiElement> myIgnoredElements; private final SequentialModalProgressTask myTask; @NotNull private final GlobalInspectionContextImpl myContext; private int myCount = 0; public PerformFixesTask(@NotNull Project project, @NotNull CommonProblemDescriptor[] descriptors, @NotNull Set<PsiElement> ignoredElements, @NotNull SequentialModalProgressTask task, @NotNull GlobalInspectionContextImpl context) { myProject = project; myDescriptors = descriptors; myIgnoredElements = ignoredElements; myTask = task; myContext = context; } @Override public void prepare() { } @Override public boolean isDone() { return myCount > myDescriptors.length - 1; } @Override public boolean iteration() { final CommonProblemDescriptor descriptor = myDescriptors[myCount++]; ProgressIndicator indicator = myTask.getIndicator(); if (indicator != null) { indicator.setFraction((double)myCount / myDescriptors.length); if (descriptor instanceof ProblemDescriptor) { final PsiElement psiElement = ((ProblemDescriptor)descriptor).getPsiElement(); if (psiElement != null) { indicator.setText("Processing " + SymbolPresentationUtil.getSymbolPresentableText(psiElement)); } } } applyFix(myProject, myContext, new CommonProblemDescriptor[]{descriptor}, myIgnoredElements); return isDone(); } @Override public void stop() { } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.hive; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.slice.Slice; import io.airlift.stats.Distribution; import io.airlift.units.DataSize; import io.prestosql.connector.CatalogName; import io.prestosql.execution.Lifespan; import io.prestosql.metadata.MetadataManager; import io.prestosql.metadata.Split; import io.prestosql.operator.DriverContext; import io.prestosql.operator.ScanFilterAndProjectOperator.ScanFilterAndProjectOperatorFactory; import io.prestosql.operator.SourceOperator; import io.prestosql.operator.SourceOperatorFactory; import io.prestosql.operator.TableScanOperator.TableScanOperatorFactory; import io.prestosql.operator.project.CursorProcessor; import io.prestosql.operator.project.PageProcessor; import io.prestosql.plugin.hive.orc.OrcPageSourceFactory; import io.prestosql.spi.Page; import io.prestosql.spi.block.Block; import io.prestosql.spi.classloader.ThreadContextClassLoader; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ConnectorPageSource; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.type.Type; import io.prestosql.sql.gen.ExpressionCompiler; import io.prestosql.sql.gen.PageFunctionCompiler; import io.prestosql.sql.planner.plan.PlanNodeId; import io.prestosql.sql.relational.RowExpression; import io.prestosql.testing.TestingConnectorSession; import io.prestosql.testing.TestingSplit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcFile; import org.apache.hadoop.hive.ql.io.orc.OrcFile.WriterOptions; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.apache.hadoop.hive.ql.io.orc.Writer; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.orc.NullMemoryManager; import org.apache.orc.impl.WriterImpl; import org.joda.time.DateTimeZone; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.OptionalInt; import java.util.Properties; import java.util.Random; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.function.Supplier; import java.util.stream.Collectors; import static com.google.common.base.Predicates.not; import static com.google.common.collect.Iterables.filter; import static com.google.common.collect.Iterables.transform; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.testing.Assertions.assertBetweenInclusive; import static io.airlift.units.DataSize.Unit.BYTE; import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; import static io.prestosql.orc.OrcReader.MAX_BATCH_SIZE; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY; import static io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.REGULAR; import static io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.prestosql.plugin.hive.HiveTestUtils.SESSION; import static io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER; import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType; import static io.prestosql.sql.relational.Expressions.field; import static io.prestosql.testing.TestingHandles.TEST_TABLE_HANDLE; import static io.prestosql.testing.TestingSession.testSessionBuilder; import static io.prestosql.testing.TestingTaskContext.createTaskContext; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT; import static org.apache.hadoop.hive.ql.io.orc.CompressionKind.ZLIB; import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB; import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector; import static org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_CODEC; import static org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_TYPE; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; public class TestOrcPageSourceMemoryTracking { private static final String ORC_RECORD_WRITER = OrcOutputFormat.class.getName() + "$OrcRecordWriter"; private static final Constructor<? extends RecordWriter> WRITER_CONSTRUCTOR = getOrcWriterConstructor(); private static final Configuration CONFIGURATION = new Configuration(); private static final int NUM_ROWS = 50000; private static final int STRIPE_ROWS = 20000; private static final MetadataManager metadata = createTestMetadataManager(); private static final ExpressionCompiler EXPRESSION_COMPILER = new ExpressionCompiler(metadata, new PageFunctionCompiler(metadata, 0)); private final Random random = new Random(); private final List<TestColumn> testColumns = ImmutableList.<TestColumn>builder() .add(new TestColumn("p_empty_string", javaStringObjectInspector, () -> "", true)) .add(new TestColumn("p_string", javaStringObjectInspector, () -> Long.toHexString(random.nextLong()), false)) .build(); private File tempFile; private TestPreparer testPreparer; @DataProvider(name = "rowCount") public static Object[][] rowCount() { return new Object[][] {{50_000}, {10_000}, {5_000}}; } @BeforeClass public void setUp() throws Exception { tempFile = File.createTempFile("presto_test_orc_page_source_memory_tracking", "orc"); tempFile.delete(); testPreparer = new TestPreparer(tempFile.getAbsolutePath()); } @AfterClass(alwaysRun = true) public void tearDown() { tempFile.delete(); } @Test public void testPageSource() throws Exception { // Numbers used in assertions in this test may change when implementation is modified, // feel free to change them if they break in the future FileFormatDataSourceStats stats = new FileFormatDataSourceStats(); ConnectorPageSource pageSource = testPreparer.newPageSource(stats); assertEquals(pageSource.getSystemMemoryUsage(), 0); long memoryUsage = -1; int totalRows = 0; while (totalRows < 20000) { assertFalse(pageSource.isFinished()); Page page = pageSource.getNextPage(); assertNotNull(page); Block block = page.getBlock(1); if (memoryUsage == -1) { assertBetweenInclusive(pageSource.getSystemMemoryUsage(), 180000L, 189999L); // Memory usage before lazy-loading the block createUnboundedVarcharType().getSlice(block, block.getPositionCount() - 1); // trigger loading for lazy block memoryUsage = pageSource.getSystemMemoryUsage(); assertBetweenInclusive(memoryUsage, 460000L, 469999L); // Memory usage after lazy-loading the actual block } else { assertEquals(pageSource.getSystemMemoryUsage(), memoryUsage); createUnboundedVarcharType().getSlice(block, block.getPositionCount() - 1); // trigger loading for lazy block assertEquals(pageSource.getSystemMemoryUsage(), memoryUsage); } totalRows += page.getPositionCount(); } memoryUsage = -1; while (totalRows < 40000) { assertFalse(pageSource.isFinished()); Page page = pageSource.getNextPage(); assertNotNull(page); Block block = page.getBlock(1); if (memoryUsage == -1) { assertBetweenInclusive(pageSource.getSystemMemoryUsage(), 180000L, 189999L); // Memory usage before lazy-loading the block createUnboundedVarcharType().getSlice(block, block.getPositionCount() - 1); // trigger loading for lazy block memoryUsage = pageSource.getSystemMemoryUsage(); assertBetweenInclusive(memoryUsage, 460000L, 469999L); // Memory usage after lazy-loading the actual block } else { assertEquals(pageSource.getSystemMemoryUsage(), memoryUsage); createUnboundedVarcharType().getSlice(block, block.getPositionCount() - 1); // trigger loading for lazy block assertEquals(pageSource.getSystemMemoryUsage(), memoryUsage); } totalRows += page.getPositionCount(); } memoryUsage = -1; while (totalRows < NUM_ROWS) { assertFalse(pageSource.isFinished()); Page page = pageSource.getNextPage(); assertNotNull(page); Block block = page.getBlock(1); if (memoryUsage == -1) { assertBetweenInclusive(pageSource.getSystemMemoryUsage(), 90000L, 99999L); // Memory usage before lazy-loading the block createUnboundedVarcharType().getSlice(block, block.getPositionCount() - 1); // trigger loading for lazy block memoryUsage = pageSource.getSystemMemoryUsage(); assertBetweenInclusive(memoryUsage, 360000L, 369999L); // Memory usage after lazy-loading the actual block } else { assertEquals(pageSource.getSystemMemoryUsage(), memoryUsage); createUnboundedVarcharType().getSlice(block, block.getPositionCount() - 1); // trigger loading for lazy block assertEquals(pageSource.getSystemMemoryUsage(), memoryUsage); } totalRows += page.getPositionCount(); } assertFalse(pageSource.isFinished()); assertNull(pageSource.getNextPage()); assertTrue(pageSource.isFinished()); assertEquals(pageSource.getSystemMemoryUsage(), 0); pageSource.close(); } @Test(dataProvider = "rowCount") public void testMaxReadBytes(int rowCount) throws Exception { int maxReadBytes = 1_000; HiveConfig config = new HiveConfig(); config.setOrcMaxReadBlockSize(new DataSize(maxReadBytes, BYTE)); ConnectorSession session = new TestingConnectorSession(new HiveSessionProperties(config, new OrcFileWriterConfig(), new ParquetFileWriterConfig()).getSessionProperties()); FileFormatDataSourceStats stats = new FileFormatDataSourceStats(); // Build a table where every row gets larger, so we can test that the "batchSize" reduces int numColumns = 5; int step = 250; ImmutableList.Builder<TestColumn> columnBuilder = ImmutableList.<TestColumn>builder() .add(new TestColumn("p_empty_string", javaStringObjectInspector, () -> "", true)); GrowingTestColumn[] dataColumns = new GrowingTestColumn[numColumns]; for (int i = 0; i < numColumns; i++) { dataColumns[i] = new GrowingTestColumn("p_string", javaStringObjectInspector, () -> Long.toHexString(random.nextLong()), false, step * (i + 1)); columnBuilder.add(dataColumns[i]); } List<TestColumn> testColumns = columnBuilder.build(); File tempFile = File.createTempFile("presto_test_orc_page_source_max_read_bytes", "orc"); tempFile.delete(); TestPreparer testPreparer = new TestPreparer(tempFile.getAbsolutePath(), testColumns, rowCount, rowCount); ConnectorPageSource pageSource = testPreparer.newPageSource(stats, session); try { int positionCount = 0; while (true) { Page page = pageSource.getNextPage(); if (pageSource.isFinished()) { break; } assertNotNull(page); page = page.getLoadedPage(); positionCount += page.getPositionCount(); // assert upper bound is tight // ignore the first MAX_BATCH_SIZE rows given the sizes are set when loading the blocks if (positionCount > MAX_BATCH_SIZE) { // either the block is bounded by maxReadBytes or we just load one single large block // an error margin MAX_BATCH_SIZE / step is needed given the block sizes are increasing assertTrue(page.getSizeInBytes() < maxReadBytes * (MAX_BATCH_SIZE / step) || 1 == page.getPositionCount()); } } // verify the stats are correctly recorded Distribution distribution = stats.getMaxCombinedBytesPerRow().getAllTime(); assertEquals((int) distribution.getCount(), 1); // the block is VariableWidthBlock that contains valueIsNull and offsets arrays as overhead assertEquals((int) distribution.getMax(), Arrays.stream(dataColumns).mapToInt(GrowingTestColumn::getMaxSize).sum() + (Integer.BYTES + Byte.BYTES) * numColumns); pageSource.close(); } finally { tempFile.delete(); } } @Test public void testTableScanOperator() { // Numbers used in assertions in this test may change when implementation is modified, // feel free to change them if they break in the future DriverContext driverContext = testPreparer.newDriverContext(); SourceOperator operator = testPreparer.newTableScanOperator(driverContext); assertEquals(driverContext.getSystemMemoryUsage(), 0); long memoryUsage = -1; int totalRows = 0; while (totalRows < 20000) { assertFalse(operator.isFinished()); Page page = operator.getOutput(); assertNotNull(page); page.getBlock(1); if (memoryUsage == -1) { memoryUsage = driverContext.getSystemMemoryUsage(); assertBetweenInclusive(memoryUsage, 460000L, 469999L); } else { assertEquals(driverContext.getSystemMemoryUsage(), memoryUsage); } totalRows += page.getPositionCount(); } memoryUsage = -1; while (totalRows < 40000) { assertFalse(operator.isFinished()); Page page = operator.getOutput(); assertNotNull(page); page.getBlock(1); if (memoryUsage == -1) { memoryUsage = driverContext.getSystemMemoryUsage(); assertBetweenInclusive(memoryUsage, 460000L, 469999L); } else { assertEquals(driverContext.getSystemMemoryUsage(), memoryUsage); } totalRows += page.getPositionCount(); } memoryUsage = -1; while (totalRows < NUM_ROWS) { assertFalse(operator.isFinished()); Page page = operator.getOutput(); assertNotNull(page); page.getBlock(1); if (memoryUsage == -1) { memoryUsage = driverContext.getSystemMemoryUsage(); assertBetweenInclusive(memoryUsage, 360000L, 369999L); } else { assertEquals(driverContext.getSystemMemoryUsage(), memoryUsage); } totalRows += page.getPositionCount(); } assertFalse(operator.isFinished()); assertNull(operator.getOutput()); assertTrue(operator.isFinished()); assertEquals(driverContext.getSystemMemoryUsage(), 0); } @Test public void testScanFilterAndProjectOperator() { // Numbers used in assertions in this test may change when implementation is modified, // feel free to change them if they break in the future DriverContext driverContext = testPreparer.newDriverContext(); SourceOperator operator = testPreparer.newScanFilterAndProjectOperator(driverContext); assertEquals(driverContext.getSystemMemoryUsage(), 0); int totalRows = 0; while (totalRows < NUM_ROWS) { assertFalse(operator.isFinished()); Page page = operator.getOutput(); assertNotNull(page); assertBetweenInclusive(driverContext.getSystemMemoryUsage(), 90_000L, 499_999L); totalRows += page.getPositionCount(); } // done... in the current implementation finish is not set until output returns a null page assertNull(operator.getOutput()); assertTrue(operator.isFinished()); assertBetweenInclusive(driverContext.getSystemMemoryUsage(), 0L, 500L); } private class TestPreparer { private final FileSplit fileSplit; private final Properties schema; private final List<HiveColumnHandle> columns; private final List<Type> types; private final List<HivePartitionKey> partitionKeys; private final ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("test-executor-%s")); private final ScheduledExecutorService scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s")); public TestPreparer(String tempFilePath) throws Exception { this(tempFilePath, testColumns, NUM_ROWS, STRIPE_ROWS); } public TestPreparer(String tempFilePath, List<TestColumn> testColumns, int numRows, int stripeRows) throws Exception { OrcSerde serde = new OrcSerde(); schema = new Properties(); schema.setProperty("columns", testColumns.stream() .map(TestColumn::getName) .collect(Collectors.joining(","))); schema.setProperty("columns.types", testColumns.stream() .map(TestColumn::getType) .collect(Collectors.joining(","))); schema.setProperty(FILE_INPUT_FORMAT, OrcInputFormat.class.getName()); schema.setProperty(SERIALIZATION_LIB, serde.getClass().getName()); partitionKeys = testColumns.stream() .filter(TestColumn::isPartitionKey) .map(input -> new HivePartitionKey(input.getName(), (String) input.getWriteValue())) .collect(toList()); ImmutableList.Builder<HiveColumnHandle> columnsBuilder = ImmutableList.builder(); ImmutableList.Builder<Type> typesBuilder = ImmutableList.builder(); int nextHiveColumnIndex = 0; for (int i = 0; i < testColumns.size(); i++) { TestColumn testColumn = testColumns.get(i); int columnIndex = testColumn.isPartitionKey() ? -1 : nextHiveColumnIndex++; ObjectInspector inspector = testColumn.getObjectInspector(); HiveType hiveType = HiveType.valueOf(inspector.getTypeName()); Type type = hiveType.getType(TYPE_MANAGER); columnsBuilder.add(new HiveColumnHandle(testColumn.getName(), hiveType, type.getTypeSignature(), columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); typesBuilder.add(type); } columns = columnsBuilder.build(); types = typesBuilder.build(); fileSplit = createTestFile(tempFilePath, new OrcOutputFormat(), serde, null, testColumns, numRows, stripeRows); } public ConnectorPageSource newPageSource() { return newPageSource(new FileFormatDataSourceStats(), SESSION); } public ConnectorPageSource newPageSource(FileFormatDataSourceStats stats) { return newPageSource(stats, SESSION); } public ConnectorPageSource newPageSource(FileFormatDataSourceStats stats, ConnectorSession session) { OrcPageSourceFactory orcPageSourceFactory = new OrcPageSourceFactory(TYPE_MANAGER, false, HDFS_ENVIRONMENT, stats); return HivePageSourceProvider.createHivePageSource( ImmutableSet.of(), ImmutableSet.of(orcPageSourceFactory), new Configuration(), session, fileSplit.getPath(), OptionalInt.empty(), fileSplit.getStart(), fileSplit.getLength(), fileSplit.getLength(), schema, TupleDomain.all(), columns, partitionKeys, DateTimeZone.UTC, TYPE_MANAGER, ImmutableMap.of(), Optional.empty(), false) .get(); } public SourceOperator newTableScanOperator(DriverContext driverContext) { ConnectorPageSource pageSource = newPageSource(); SourceOperatorFactory sourceOperatorFactory = new TableScanOperatorFactory( 0, new PlanNodeId("0"), (session, split, table, columnHandles) -> pageSource, TEST_TABLE_HANDLE, columns.stream().map(columnHandle -> (ColumnHandle) columnHandle).collect(toList())); SourceOperator operator = sourceOperatorFactory.createOperator(driverContext); operator.addSplit(new Split(new CatalogName("test"), TestingSplit.createLocalSplit(), Lifespan.taskWide())); return operator; } public SourceOperator newScanFilterAndProjectOperator(DriverContext driverContext) { ConnectorPageSource pageSource = newPageSource(); ImmutableList.Builder<RowExpression> projectionsBuilder = ImmutableList.builder(); for (int i = 0; i < types.size(); i++) { projectionsBuilder.add(field(i, types.get(i))); } Supplier<CursorProcessor> cursorProcessor = EXPRESSION_COMPILER.compileCursorProcessor(Optional.empty(), projectionsBuilder.build(), "key"); Supplier<PageProcessor> pageProcessor = EXPRESSION_COMPILER.compilePageProcessor(Optional.empty(), projectionsBuilder.build()); SourceOperatorFactory sourceOperatorFactory = new ScanFilterAndProjectOperatorFactory( 0, new PlanNodeId("test"), new PlanNodeId("0"), (session, split, table, columnHandles) -> pageSource, cursorProcessor, pageProcessor, TEST_TABLE_HANDLE, columns.stream().map(columnHandle -> (ColumnHandle) columnHandle).collect(toList()), types, new DataSize(0, BYTE), 0); SourceOperator operator = sourceOperatorFactory.createOperator(driverContext); operator.addSplit(new Split(new CatalogName("test"), TestingSplit.createLocalSplit(), Lifespan.taskWide())); return operator; } private DriverContext newDriverContext() { return createTaskContext(executor, scheduledExecutor, testSessionBuilder().build()) .addPipelineContext(0, true, true, false) .addDriverContext(); } } public static FileSplit createTestFile(String filePath, HiveOutputFormat<?, ?> outputFormat, Serializer serializer, String compressionCodec, List<TestColumn> testColumns, int numRows, int stripeRows) throws Exception { // filter out partition keys, which are not written to the file testColumns = ImmutableList.copyOf(filter(testColumns, not(TestColumn::isPartitionKey))); Properties tableProperties = new Properties(); tableProperties.setProperty("columns", Joiner.on(',').join(transform(testColumns, TestColumn::getName))); tableProperties.setProperty("columns.types", Joiner.on(',').join(transform(testColumns, TestColumn::getType))); serializer.initialize(CONFIGURATION, tableProperties); JobConf jobConf = new JobConf(); if (compressionCodec != null) { CompressionCodec codec = new CompressionCodecFactory(CONFIGURATION).getCodecByName(compressionCodec); jobConf.set(COMPRESS_CODEC, codec.getClass().getName()); jobConf.set(COMPRESS_TYPE, SequenceFile.CompressionType.BLOCK.toString()); } RecordWriter recordWriter = createRecordWriter(new Path(filePath), CONFIGURATION); try { SettableStructObjectInspector objectInspector = getStandardStructObjectInspector( ImmutableList.copyOf(transform(testColumns, TestColumn::getName)), ImmutableList.copyOf(transform(testColumns, TestColumn::getObjectInspector))); Object row = objectInspector.create(); List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs()); for (int rowNumber = 0; rowNumber < numRows; rowNumber++) { for (int i = 0; i < testColumns.size(); i++) { Object writeValue = testColumns.get(i).getWriteValue(); if (writeValue instanceof Slice) { writeValue = ((Slice) writeValue).getBytes(); } objectInspector.setStructFieldData(row, fields.get(i), writeValue); } Writable record = serializer.serialize(row, objectInspector); recordWriter.write(record); if (rowNumber % stripeRows == stripeRows - 1) { flushStripe(recordWriter); } } } finally { recordWriter.close(false); } Path path = new Path(filePath); path.getFileSystem(CONFIGURATION).setVerifyChecksum(true); File file = new File(filePath); return new FileSplit(path, 0, file.length(), new String[0]); } private static void flushStripe(RecordWriter recordWriter) { try { Field writerField = OrcOutputFormat.class.getClassLoader() .loadClass(ORC_RECORD_WRITER) .getDeclaredField("writer"); writerField.setAccessible(true); Writer writer = (Writer) writerField.get(recordWriter); Method flushStripe = WriterImpl.class.getDeclaredMethod("flushStripe"); flushStripe.setAccessible(true); flushStripe.invoke(writer); } catch (ReflectiveOperationException e) { throw new RuntimeException(e); } } private static RecordWriter createRecordWriter(Path target, Configuration conf) { try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(FileSystem.class.getClassLoader())) { WriterOptions options = OrcFile.writerOptions(conf) .memory(new NullMemoryManager()) .compress(ZLIB); try { return WRITER_CONSTRUCTOR.newInstance(target, options); } catch (ReflectiveOperationException e) { throw new RuntimeException(e); } } } private static Constructor<? extends RecordWriter> getOrcWriterConstructor() { try { Constructor<? extends RecordWriter> constructor = OrcOutputFormat.class.getClassLoader() .loadClass(ORC_RECORD_WRITER) .asSubclass(RecordWriter.class) .getDeclaredConstructor(Path.class, WriterOptions.class); constructor.setAccessible(true); return constructor; } catch (ReflectiveOperationException e) { throw new RuntimeException(e); } } public static class TestColumn { private final String name; private final ObjectInspector objectInspector; private final Supplier<?> writeValue; private final boolean partitionKey; public TestColumn(String name, ObjectInspector objectInspector, Supplier<?> writeValue, boolean partitionKey) { this.name = requireNonNull(name, "name is null"); this.objectInspector = requireNonNull(objectInspector, "objectInspector is null"); this.writeValue = writeValue; this.partitionKey = partitionKey; } public String getName() { return name; } public String getType() { return objectInspector.getTypeName(); } public ObjectInspector getObjectInspector() { return objectInspector; } public Object getWriteValue() { return writeValue.get(); } public boolean isPartitionKey() { return partitionKey; } @Override public String toString() { StringBuilder sb = new StringBuilder("TestColumn{"); sb.append("name='").append(name).append('\''); sb.append(", objectInspector=").append(objectInspector); sb.append(", partitionKey=").append(partitionKey); sb.append('}'); return sb.toString(); } } public static final class GrowingTestColumn extends TestColumn { private final Supplier<String> writeValue; private int counter; private int step; private int maxSize; public GrowingTestColumn(String name, ObjectInspector objectInspector, Supplier<String> writeValue, boolean partitionKey, int step) { super(name, objectInspector, writeValue, partitionKey); this.writeValue = writeValue; this.counter = step; this.step = step; } @Override public Object getWriteValue() { StringBuilder builder = new StringBuilder(); String source = writeValue.get(); for (int i = 0; i < counter / step; i++) { builder.append(source); } counter++; if (builder.length() > maxSize) { maxSize = builder.length(); } return builder.toString(); } public int getMaxSize() { return maxSize; } } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.customtabs.content; import static org.chromium.chrome.browser.customtabs.content.CustomTabActivityNavigationController.FinishReason.OTHER; import static org.chromium.chrome.browser.customtabs.content.CustomTabActivityNavigationController.FinishReason.REPARENTING; import static org.chromium.chrome.browser.customtabs.content.CustomTabActivityNavigationController.FinishReason.USER_NAVIGATION; import android.app.Activity; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.os.SystemClock; import android.text.TextUtils; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import androidx.core.app.ActivityOptionsCompat; import org.chromium.base.metrics.RecordUserAction; import org.chromium.base.task.PostTask; import org.chromium.chrome.R; import org.chromium.chrome.browser.IntentHandler; import org.chromium.chrome.browser.browserservices.intents.BrowserServicesIntentDataProvider; import org.chromium.chrome.browser.customtabs.CloseButtonNavigator; import org.chromium.chrome.browser.customtabs.CustomTabObserver; import org.chromium.chrome.browser.customtabs.CustomTabsConnection; import org.chromium.chrome.browser.dependency_injection.ActivityScope; import org.chromium.chrome.browser.externalnav.ExternalNavigationDelegateImpl; import org.chromium.chrome.browser.fullscreen.FullscreenManager; import org.chromium.chrome.browser.init.ChromeBrowserInitializer; import org.chromium.chrome.browser.lifecycle.ActivityLifecycleDispatcher; import org.chromium.chrome.browser.lifecycle.StartStopWithNativeObserver; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.toolbar.ToolbarManager; import org.chromium.components.dom_distiller.core.DomDistillerUrlUtils; import org.chromium.content_public.browser.LoadUrlParams; import org.chromium.content_public.browser.RenderFrameHost; import org.chromium.content_public.browser.UiThreadTaskTraits; import org.chromium.content_public.browser.WebContents; import org.chromium.ui.base.PageTransition; import org.chromium.url.GURL; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import javax.inject.Inject; import dagger.Lazy; /** * Responsible for navigating to new pages and going back to previous pages. */ @ActivityScope public class CustomTabActivityNavigationController implements StartStopWithNativeObserver { @IntDef({USER_NAVIGATION, REPARENTING, OTHER}) @Retention(RetentionPolicy.SOURCE) public @interface FinishReason { int USER_NAVIGATION = 0; int REPARENTING = 1; int OTHER = 2; } /** A handler of back presses. */ public interface BackHandler { /** * Called when back button is pressed, unless already handled by another handler. * The implementation should do one of the following: * 1) Synchronously accept and handle the event and return true; * 2) Synchronously reject the event by returning false; * 3) Accept the event by returning true, handle it asynchronously, and if the handling * fails, trigger the default handling routine by running the defaultBackHandler. */ boolean handleBackPressed(Runnable defaultBackHandler); } /** Interface encapsulating the process of handling the custom tab closing. */ public interface FinishHandler { void onFinish(@FinishReason int reason); } /** Interface which gets the package name of the default web browser on the device. */ public interface DefaultBrowserProvider { /** Returns the package name for the default browser on the device as a string. */ @Nullable String getDefaultBrowser(); } private final CustomTabActivityTabController mTabController; private final CustomTabActivityTabProvider mTabProvider; private final BrowserServicesIntentDataProvider mIntentDataProvider; private final CustomTabsConnection mConnection; private final Lazy<CustomTabObserver> mCustomTabObserver; private final CloseButtonNavigator mCloseButtonNavigator; private final ChromeBrowserInitializer mChromeBrowserInitializer; private final Activity mActivity; private final Lazy<FullscreenManager> mFullscreenManager; private final DefaultBrowserProvider mDefaultBrowserProvider; @Nullable private ToolbarManager mToolbarManager; @Nullable private FinishHandler mFinishHandler; private boolean mIsFinishing; private boolean mIsHandlingUserNavigation; private final CustomTabActivityTabProvider.Observer mTabObserver = new CustomTabActivityTabProvider.Observer() { @Override public void onAllTabsClosed() { finish(mIsHandlingUserNavigation ? USER_NAVIGATION : OTHER); } }; @Inject public CustomTabActivityNavigationController(CustomTabActivityTabController tabController, CustomTabActivityTabProvider tabProvider, BrowserServicesIntentDataProvider intentDataProvider, CustomTabsConnection connection, Lazy<CustomTabObserver> customTabObserver, CloseButtonNavigator closeButtonNavigator, ChromeBrowserInitializer chromeBrowserInitializer, Activity activity, ActivityLifecycleDispatcher lifecycleDispatcher, Lazy<FullscreenManager> fullscreenManager, DefaultBrowserProvider customTabsDefaultBrowserProvider) { mTabController = tabController; mTabProvider = tabProvider; mIntentDataProvider = intentDataProvider; mConnection = connection; mCustomTabObserver = customTabObserver; mCloseButtonNavigator = closeButtonNavigator; mChromeBrowserInitializer = chromeBrowserInitializer; mActivity = activity; mFullscreenManager = fullscreenManager; mDefaultBrowserProvider = customTabsDefaultBrowserProvider; lifecycleDispatcher.register(this); mTabProvider.addObserver(mTabObserver); } /** * Notifies the navigation controller that the ToolbarManager has been created and is ready for * use. ToolbarManager isn't passed directly to the constructor because it's not guaranteed to * be initialized yet. */ public void onToolbarInitialized(ToolbarManager manager) { assert manager != null : "Toolbar manager not initialized"; mToolbarManager = manager; } /** * Navigates to given url. */ public void navigate(String url) { navigate(new LoadUrlParams(url), SystemClock.elapsedRealtime()); } /** * Performs navigation using given {@link LoadUrlParams}. * Uses provided timestamp as the initial time for tracking page loading times * (see {@link CustomTabObserver}). */ public void navigate(final LoadUrlParams params, long timeStamp) { Tab tab = mTabProvider.getTab(); if (tab == null) { assert false; return; } // TODO(pkotwicz): Figure out whether we want to record these metrics for WebAPKs. if (mIntentDataProvider.getWebappExtras() == null) { mCustomTabObserver.get().trackNextPageLoadFromTimestamp(tab, timeStamp); } IntentHandler.addReferrerAndHeaders(params, mIntentDataProvider.getIntent()); // Launching a TWA, WebAPK or a standalone-mode homescreen shortcut counts as a TOPLEVEL // transition since it opens up an app-like experience, and should count towards site // engagement scores. CCTs on the other hand still count as LINK transitions. int transition; if (mIntentDataProvider.isTrustedWebActivity() || mIntentDataProvider.isWebappOrWebApkActivity()) { transition = PageTransition.AUTO_TOPLEVEL | PageTransition.FROM_API; } else { transition = PageTransition.LINK | PageTransition.FROM_API; } params.setTransitionType(IntentHandler.getTransitionTypeFromIntent( mIntentDataProvider.getIntent(), transition)); IntentHandler.setAttributionParamsFromIntent(params, mIntentDataProvider.getIntent()); tab.loadUrl(params); } /** * Handles back button navigation. */ public boolean navigateOnBack() { if (!mChromeBrowserInitializer.isFullBrowserInitialized()) return false; RecordUserAction.record("CustomTabs.SystemBack"); if (mTabProvider.getTab() == null) return false; if (mFullscreenManager.get().getPersistentFullscreenMode()) { mFullscreenManager.get().exitPersistentFullscreenMode(); return true; } final WebContents webContents = mTabProvider.getTab().getWebContents(); if (webContents != null) { RenderFrameHost focusedFrame = webContents.getFocusedFrame(); if (focusedFrame != null && focusedFrame.signalCloseWatcherIfActive()) return true; } if (mToolbarManager != null && mToolbarManager.back()) return true; if (mTabController.onlyOneTabRemaining()) { // If we're closing the last tab, just finish the Activity manually. If we had called // mTabController.closeTab() and waited for the Activity to close as a result we would // have a visual glitch: https://crbug.com/1087108. finish(USER_NAVIGATION); } else { mTabController.closeTab(); } return true; } /** * Handles close button navigation. */ public void navigateOnClose() { mIsHandlingUserNavigation = true; mCloseButtonNavigator.navigateOnClose(); mIsHandlingUserNavigation = false; } /** * Opens the URL currently being displayed in the Custom Tab in the regular browser. * @param forceReparenting Whether tab reparenting should be forced for testing. * * @return Whether or not the tab was sent over successfully. */ public boolean openCurrentUrlInBrowser(boolean forceReparenting) { Tab tab = mTabProvider.getTab(); if (tab == null) return false; GURL gurl = tab.getUrl(); if (DomDistillerUrlUtils.isDistilledPage(gurl)) { gurl = DomDistillerUrlUtils.getOriginalUrlFromDistillerUrl(gurl); } String url = gurl.getSpec(); if (TextUtils.isEmpty(url)) url = mIntentDataProvider.getUrlToLoad(); Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.putExtra(IntentHandler.EXTRA_FROM_OPEN_IN_BROWSER, true); String packageName = mDefaultBrowserProvider.getDefaultBrowser(); if (packageName != null) { intent.setPackage(packageName); if (intent.resolveActivity(mActivity.getPackageManager()) == null) { intent.setPackage(null); } } boolean willChromeHandleIntent = mIntentDataProvider.isOpenedByChrome() || mIntentDataProvider.isIncognito(); // If the tab is opened by TWA or Webapp, do not reparent and finish the Custom Tab // activity because we still want to keep the app alive. boolean canFinishActivity = !mIntentDataProvider.isTrustedWebActivity() && !mIntentDataProvider.isWebappOrWebApkActivity(); willChromeHandleIntent |= ExternalNavigationDelegateImpl.willChromeHandleIntent(intent, true); Bundle startActivityOptions = ActivityOptionsCompat.makeCustomAnimation( mActivity, R.anim.abc_fade_in, R.anim.abc_fade_out).toBundle(); if (canFinishActivity && willChromeHandleIntent || forceReparenting) { // Remove observer to not trigger finishing in onAllTabsClosed() callback - we'll use // reparenting finish callback instead. mTabProvider.removeObserver(mTabObserver); mTabController.detachAndStartReparenting(intent, startActivityOptions, () -> finish(REPARENTING)); } else { if (mIntentDataProvider.isInfoPage()) { IntentHandler.startChromeLauncherActivityForTrustedIntent(intent); } else { mActivity.startActivity(intent, startActivityOptions); } } return true; } /** * Finishes the Custom Tab activity and removes the reference from the Android recents. * * @param reason The reason for finishing. */ public void finish(@FinishReason int reason) { if (mIsFinishing) return; mIsFinishing = true; if (reason != REPARENTING) { // Closing the activity destroys the renderer as well. Re-create a spare renderer some // time after, so that we have one ready for the next tab open. This does not increase // memory consumption, as the current renderer goes away. We create a renderer as a lot // of users open several Custom Tabs in a row. The delay is there to avoid jank in the // transition animation when closing the tab. PostTask.postDelayedTask(UiThreadTaskTraits.DEFAULT, CustomTabsConnection::createSpareWebContents, 500); } if (mFinishHandler != null) { mFinishHandler.onFinish(reason); } } /** * Sets a {@link FinishHandler} to be notified when the custom tab is being closed. */ public void setFinishHandler(FinishHandler finishHandler) { assert mFinishHandler == null : "Multiple FinishedHandlers not supported, replace with ObserverList if necessary"; mFinishHandler = finishHandler; } /** * Sets a criterion to choose a page to land to when close button is pressed. * Only one such criterion can be set. * If no page in the navigation history meets the criterion, or there is no criterion, then * pressing close button will finish the Custom Tab activity. */ public void setLandingPageOnCloseCriterion(CloseButtonNavigator.PageCriteria criterion) { mCloseButtonNavigator.setLandingPageCriteria(criterion); } @Override public void onStartWithNative() { mIsFinishing = false; } @Override public void onStopWithNative() { if (mIsFinishing) { mTabController.closeAndForgetTab(); } else { mTabController.saveState(); } } }
/** * Copyright 2008-2017 Qualogy Solutions B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.qualogy.qafe.mgwt.client.util; import java.util.HashSet; import java.util.Set; import com.google.gwt.dom.client.NativeEvent; import com.google.gwt.event.dom.client.KeyCodes; public class KeyBoardHelper { public static final int KEY_F1 = 112; public static final int KEY_F2 = 113; public static final int KEY_F3 = 114; public static final int KEY_F4 = 115; public static final int KEY_F5 = 116; public static final int KEY_F6 = 117; public static final int KEY_F7 = 118; public static final int KEY_F8 = 119; public static final int KEY_F9 = 120; public static final int KEY_F10 = 121; public static final int KEY_F11 = 122; public static final int KEY_F12 = 123; public static final int KEY_SPACE = 32; public static final int KEY_INSERT = 45; public static final int KEY_NUMPAD_0 = 96; public static final int KEY_NUMPAD_1 = 97; public static final int KEY_NUMPAD_2 = 98; public static final int KEY_NUMPAD_3 = 99; public static final int KEY_NUMPAD_4 = 100; public static final int KEY_NUMPAD_5 = 101; public static final int KEY_NUMPAD_6 = 102; public static final int KEY_NUMPAD_7 = 103; public static final int KEY_NUMPAD_8 = 104; public static final int KEY_NUMPAD_9 = 105; public static final int KEY_NUMPAD_MULTIPLY = 106; public static final int KEY_NUMPAD_ADD = 107; public static final int KEY_NUMPAD_SUBTRACT = 109; public static final int KEY_NUMPAD_DECIMAL = 110; public static final int KEY_NUMPAD_DIVIDE = 111; public static final int KEY_NUM_LOCK = 144; public static final int KEY_SEMICOLON = 186; public static final int KEY_EQUALS = 187; public static final int KEY_COMMA = 188; public static final int KEY_HYPHEN = 189; public static final int KEY_DOT = 190; public static final int KEY_SLASH = 191; public static final int KEY_BACKSLASH = 220; public static final int KEY_BACKTICK = 192; public static final int KEY_SQUAREBRACKET_OPEN = 219; public static final int KEY_SQUAREBRACKET_CLOSE = 221; public static final int KEY_SINGLEQUOTE = 222; public static final String KEY_NAME_F1 = "F1"; public static final String KEY_NAME_F2 = "F2"; public static final String KEY_NAME_F3 = "F3"; public static final String KEY_NAME_F4 = "F4"; public static final String KEY_NAME_F5 = "F5"; public static final String KEY_NAME_F6 = "F6"; public static final String KEY_NAME_F7 = "F7"; public static final String KEY_NAME_F8 = "F8"; public static final String KEY_NAME_F9 = "F9"; public static final String KEY_NAME_F10 = "F10"; public static final String KEY_NAME_F11 = "F11"; public static final String KEY_NAME_F12 = "F12"; public static final String KEY_NAME_SPACE = "KEY_SPACE"; public static final String KEY_NAME_INSERT = "KEY_INSERT"; public static final String KEY_NAME_DELETE = "KEY_DELETE"; public static final String KEY_NAME_HOME = "KEY_HOME"; public static final String KEY_NAME_END = "KEY_END"; public static final String KEY_NAME_PAGEUP = "KEY_PAGEUP"; public static final String KEY_NAME_PAGEDOWN = "KEY_PAGEDOWN"; public static final String KEY_NAME_UP = "KEY_UP"; public static final String KEY_NAME_DOWN = "KEY_DOWN"; public static final String KEY_NAME_LEFT = "KEY_LEFT"; public static final String KEY_NAME_RIGHT = "KEY_RIGHT"; public static final String KEY_NAME_ALT = "KEY_ALT"; public static final String KEY_NAME_BACKSPACE = "KEY_BACKSPACE"; public static final String KEY_NAME_CTRL = "KEY_CTRL"; public static final String KEY_NAME_ENTER = "KEY_ENTER"; public static final String KEY_NAME_ESCAPE = "KEY_ESCAPE"; public static final String KEY_NAME_SHIFT = "KEY_SHIFT"; public static final String KEY_NAME_TAB = "KEY_TAB"; public static final String KEY_NAME_NUM_LOCK = "KEY_NUMLOCK"; public static final String PARAM_KEY = "key"; public static final String PARAM_KEY_DELIMITER = "\\+"; private KeyBoardHelper() { } public static boolean isKeyInput(final String param, final String value, final String keyInput) { if (PARAM_KEY.equals(param) && (value != null)) { return value.equals(keyInput); } return false; } public static boolean isKeyInput(final String param, final String value, final NativeEvent keyEvent) { if (PARAM_KEY.equals(param) && (value != null)) { Set<String> keyInputs = getKeyInputs(keyEvent); String[] keyValues = value.toUpperCase().split(PARAM_KEY_DELIMITER); if ((keyInputs.size() > 0) && (keyInputs.size() == keyValues.length)) { for (int i=0; i<keyValues.length; i++) { String keyValue = keyValues[i]; if (!keyInputs.contains(keyValue.trim())) { return false; } } return true; } } return false; } // CHECKSTYLE.OFF: CyclomaticComplexity private static Set<String> getKeyInputs(final NativeEvent keyEvent) { Set<String> keyInputs = new HashSet<String>(); if (keyEvent != null) { int keyCode = keyEvent.getKeyCode(); if (keyEvent.getAltKey()) { keyInputs.add(KEY_NAME_ALT); } if (keyEvent.getCtrlKey()) { keyInputs.add(KEY_NAME_CTRL); } if (keyEvent.getShiftKey()) { keyInputs.add(KEY_NAME_SHIFT); } switch (keyCode) { case KeyCodes.KEY_ALT : { keyInputs.add(KEY_NAME_ALT); } break; case KeyCodes.KEY_CTRL : { keyInputs.add(KEY_NAME_CTRL); } break; case KeyCodes.KEY_SHIFT : { keyInputs.add(KEY_NAME_SHIFT); } break; case KeyCodes.KEY_UP : { keyInputs.add(KEY_NAME_UP); } break; case KeyCodes.KEY_DOWN : { keyInputs.add(KEY_NAME_DOWN); } break; case KeyCodes.KEY_LEFT : { keyInputs.add(KEY_NAME_LEFT); } break; case KeyCodes.KEY_RIGHT : { keyInputs.add(KEY_NAME_RIGHT); } break; case KeyCodes.KEY_TAB : { keyInputs.add(KEY_NAME_TAB); } break; case KeyCodes.KEY_ENTER : { keyInputs.add(KEY_NAME_ENTER); } break; case KeyCodes.KEY_ESCAPE : { keyInputs.add(KEY_NAME_ESCAPE); } break; case KeyCodes.KEY_BACKSPACE : { keyInputs.add(KEY_NAME_BACKSPACE); } break; case KEY_INSERT : { keyInputs.add(KEY_NAME_INSERT); } break; case KeyCodes.KEY_DELETE : { keyInputs.add(KEY_NAME_DELETE); } break; case KeyCodes.KEY_HOME : { keyInputs.add(KEY_NAME_HOME); } break; case KeyCodes.KEY_END : { keyInputs.add(KEY_NAME_END); } break; case KeyCodes.KEY_PAGEUP : { keyInputs.add(KEY_NAME_PAGEUP); } break; case KeyCodes.KEY_PAGEDOWN : { keyInputs.add(KEY_NAME_PAGEDOWN); } break; case KEY_SPACE : { keyInputs.add(KEY_NAME_SPACE); } break; case KEY_F1 : { keyInputs.add(KEY_NAME_F1); } break; case KEY_F2 : { keyInputs.add(KEY_NAME_F2); } break; case KEY_F3 : { keyInputs.add(KEY_NAME_F3); } break; case KEY_F4 : { keyInputs.add(KEY_NAME_F4); } break; case KEY_F5 : { keyInputs.add(KEY_NAME_F5); } break; case KEY_F6 : { keyInputs.add(KEY_NAME_F6); } break; case KEY_F7 : { keyInputs.add(KEY_NAME_F7); } break; case KEY_F8 : { keyInputs.add(KEY_NAME_F8); } break; case KEY_F9 : { keyInputs.add(KEY_NAME_F9); } break; case KEY_F10 : { keyInputs.add(KEY_NAME_F10); } break; case KEY_F11 : { keyInputs.add(KEY_NAME_F11); } break; case KEY_F12 : { keyInputs.add(KEY_NAME_F12); } break; case KEY_NUM_LOCK : { keyInputs.add(KEY_NAME_NUM_LOCK); } break; case KEY_NUMPAD_DIVIDE : { keyInputs.add("/"); } break; case KEY_NUMPAD_MULTIPLY : { keyInputs.add("*"); } break; case KEY_NUMPAD_SUBTRACT : { keyInputs.add("-"); } break; case KEY_NUMPAD_ADD : { keyInputs.add("+"); } break; case KEY_NUMPAD_DECIMAL : { keyInputs.add("."); } break; case KEY_NUMPAD_0 : { keyInputs.add("0"); } break; case KEY_NUMPAD_1 : { keyInputs.add("1"); } break; case KEY_NUMPAD_2 : { keyInputs.add("2"); } break; case KEY_NUMPAD_3 : { keyInputs.add("3"); } break; case KEY_NUMPAD_4 : { keyInputs.add("4"); } break; case KEY_NUMPAD_5 : { keyInputs.add("5"); } break; case KEY_NUMPAD_6 : { keyInputs.add("6"); } break; case KEY_NUMPAD_7 : { keyInputs.add("7"); } break; case KEY_NUMPAD_8 : { keyInputs.add("8"); } break; case KEY_NUMPAD_9 : { keyInputs.add("9"); } break; case KEY_SEMICOLON : { keyInputs.add(";"); } break; case KEY_EQUALS : { keyInputs.add("="); } break; case KEY_COMMA : { keyInputs.add(","); } break; case KEY_HYPHEN : { keyInputs.add("-"); } break; case KEY_DOT : { keyInputs.add("."); } break; case KEY_SLASH : { keyInputs.add("/"); } break; case KEY_BACKSLASH : { keyInputs.add("\\"); } break; case KEY_BACKTICK : { keyInputs.add("`"); } break; case KEY_SQUAREBRACKET_OPEN : { keyInputs.add("["); } break; case KEY_SQUAREBRACKET_CLOSE : { keyInputs.add("]"); } break; case KEY_SINGLEQUOTE : { keyInputs.add("'"); } break; default : { if (isLetter(keyCode) || isDigit(keyCode)) { keyInputs.add(Character.toString((char)keyCode)); } } break; } } return keyInputs; } // CHECKSTYLE.ON: CyclomaticComplexity private static boolean isLetter(int keyCode) { return (keyCode >= 65) && (keyCode <= 90); } private static boolean isDigit(int keyCode) { return (keyCode >= 48) && (keyCode <= 57); } }
package eu.role_project.service.space; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.UUID; import javax.inject.Inject; import javax.inject.Named; import javax.ws.rs.core.UriBuilder; //import org.slf4j.Logger; //import org.slf4j.LoggerFactory; import eu.role_project.service.resource.ROLETerms; import se.kth.csc.kmr.conserve.Concept; import se.kth.csc.kmr.conserve.Request; import se.kth.csc.kmr.conserve.Resolution; import se.kth.csc.kmr.conserve.core.AbstractInitializer; import se.kth.csc.kmr.conserve.core.ConserveTerms; import se.kth.csc.kmr.conserve.iface.internal.RequestNotifier; import se.kth.csc.kmr.conserve.iface.jaxrs.RequestImpl; public class TestInitializer extends AbstractInitializer { // private static Logger log = // LoggerFactory.getLogger(TestInitializer.class); @Inject @Named("conserve.user.context") private UUID userContextUuid; @Inject @Named("conserve.user.predicate") private UUID userPredicateUuid; public static final UUID space1_id = UUID .fromString("6b329f05-f5bd-4702-b8ca-6fb45de385a1"); public static final UUID space2_id = UUID .fromString("4647ae5b-f75d-4d81-a634-5ba54ccbc546"); public static final UUID tool1_id = UUID .fromString("4af91b4d-c670-407c-ad6b-8f9474ccf8c8"); public static final UUID tool2_id = UUID .fromString("df381e94-0f0d-423a-911c-9a3e28961ef1"); public static final UUID tool3_id = UUID .fromString("0d4a10a5-0412-41fb-ba85-32281b323b67"); public static final UUID tool4_id = UUID .fromString("795f95cf-ec73-4b4c-ab52-bae8bce32939"); public static final UUID user1_id = UUID .fromString("b6762683-e025-4bf6-ad65-83310e4a3e3e"); public static final UUID member1_id = UUID .fromString("7af530bb-e351-4d64-97e3-c8195ae1d369"); public static final UUID owner1_id = UUID .fromString("58efa76f-9f67-47c1-aba4-3ed8e7f743d9"); public static final UUID member2_id = UUID .fromString("fc0a98a1-432e-498b-ad82-88d58c99561d"); public static final UUID owner2_id = UUID .fromString("c56d5948-7b17-4345-90f4-762a71d1b175"); public static final UUID user_default_id = UUID .fromString("e4008a52-cf77-4e58-953a-65dc2d9c567f"); @Inject @Named("password-salt") private String salt; // @Inject // private ShindigDbService shindigDb; @Inject private RequestNotifier requestNotifier; @Override public void initialize(Request request) { // Space 1 Concept space1 = store().in(SpaceService.ID).sub(ROLETerms.space) .acquire(space1_id, "test"); store().in(space1) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"Test Space\", \"type\": \"literal\" }]}}"); requestNotifier.setResolution(Resolution.StandardType.CONTEXT, store.getConcept(SpaceService.ID)); requestNotifier.setResolution(Resolution.StandardType.CREATED, space1); requestNotifier.doPost(); // Tool 1 Concept tool1 = store().in(space1).sub(ROLETerms.tool) .acquire(tool1_id); store().in(tool1) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"RSS\", \"type\": \"literal\" }]}}"); store().in(tool1) .put(ConserveTerms.reference, "http://role-project.svn.sourceforge.net/viewvc/role-project/trunk/gadgets/rss/gadget.xml"); store().in(tool1).put(ConserveTerms.type, "http://purl.org/role/terms/OpenSocialGadget"); // Test user Concept user1 = store().in(userContextUuid).sub(userPredicateUuid) .acquire(user1_id, "testuser"); store().in(user1) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"Test User\", \"type\": \"literal\" }]}}"); MessageDigest sha1; try { sha1 = MessageDigest.getInstance("SHA1"); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } byte[] digest = sha1.digest(("roleabdc" + salt).getBytes()); store().in(user1).as(ConserveTerms.secret) .type("application/octet-stream").bytes(digest); requestNotifier.setResolution(Resolution.StandardType.CONTEXT, store.getConcept(userContextUuid)); requestNotifier.setResolution(Resolution.StandardType.CREATED, user1); requestNotifier.doPost(); // Default user Concept defaultUser = store().in(userContextUuid).sub(userPredicateUuid) .acquire(user_default_id); store().in(defaultUser) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"Anonymous\", \"type\": \"literal\" }]}}"); // // User in Shindig // try { // JSONArray people = shindigDb.getDb().getJSONArray("people"); // JSONObject jsonUser = new JSONObject( // "{\"id\" : \"" // + Base64UUID.encode(user1_id) // + // "\",\"displayName\" : \"Test User\",\"gender\" : \"female\",\"hasApp\" : true,\"name\" : {\"familyName\" : \"User\",\"givenName\" : \"Test\",\"formatted\" : \"Test User\"}}"); // people.put(jsonUser); // JSONObject data = shindigDb.getDb().getJSONObject("data"); // JSONObject jsonData = new JSONObject("{\"count\" : \"0\"}"); // data.put(Base64UUID.encode(user1_id), jsonData); // } catch (JSONException e) { // log.error("Error adding test user to Shindig JSON DB", e); // } // // shindigEntityManager.getTransaction().begin(); // // shindigEntityManager // // .createNativeQuery( // // // "insert into Person (person_id, display_name, drinker, gender, network_presence, smoker) values (?i, ?n, ?dr, ?ge, ?ne, ?sm)") // // .setParameter("i", "111").setParameter("n", "Test User") // // .setParameter("dr", "NO").setParameter("ge", "FEMALE") // // .setParameter("ne", "OFFLINE").setParameter("sm", "NO") // // .executeUpdate(); // // shindigEntityManager.getTransaction().commit(); // Space 2 Concept space2 = store().in(SpaceService.ID).sub(ROLETerms.space) .acquire(space2_id, "test2"); store().in(space2) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"Test2 Space\", \"type\": \"literal\" }]}}"); requestNotifier.setResolution(Resolution.StandardType.CONTEXT, store.getConcept(SpaceService.ID)); requestNotifier.setResolution(Resolution.StandardType.CREATED, space2); requestNotifier.doPost(); // Tool 3 Concept tool3 = store().in(space2).sub(ROLETerms.tool) .acquire(tool3_id); UriBuilder urib = ((RequestImpl) request).getUriInfo() .getBaseUriBuilder(); urib.path("spaces/test2/role:widget"); store().in(tool3) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"Space\", \"type\": \"literal\" }]}}"); store().in(tool3).put(ConserveTerms.reference, urib.build().toString()); store().in(tool3).put(ConserveTerms.type, "http://purl.org/role/terms/OpenSocialGadget"); // Member 1 Concept member1 = store().in(space1).sub(ROLETerms.member) .acquire(member1_id); store().in(member1).put(ConserveTerms.reference, user1_id); requestNotifier.setResolution(Resolution.StandardType.CONTEXT, space1); requestNotifier.setResolution(Resolution.StandardType.CREATED, member1); requestNotifier.doPost(); // Owner 1 Concept owner1 = store().in(space1).sub(ConserveTerms.owner) .acquire(owner1_id); store().in(owner1).put(ConserveTerms.reference, user1_id); requestNotifier.setResolution(Resolution.StandardType.CONTEXT, space1); requestNotifier.setResolution(Resolution.StandardType.CREATED, owner1); requestNotifier.doPost(); // Member 2 Concept member2 = store().in(space2).sub(ROLETerms.member) .acquire(member2_id); store().in(member2).put(ConserveTerms.reference, user1_id); requestNotifier.setResolution(Resolution.StandardType.CONTEXT, space2); requestNotifier.setResolution(Resolution.StandardType.CREATED, member2); requestNotifier.doPost(); // Owner 2 Concept owner2 = store().in(space2).sub(ConserveTerms.owner) .acquire(owner2_id); store().in(owner2).put(ConserveTerms.reference, user1_id); requestNotifier.setResolution(Resolution.StandardType.CONTEXT, space2); requestNotifier.setResolution(Resolution.StandardType.CREATED, owner2); requestNotifier.doPost(); // Tool 2 Concept tool2 = store().in(user1).sub(ROLETerms.tool).acquire(tool2_id); store().in(tool2) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"Portfolio\", \"type\": \"literal\" }]}}"); store().in(tool2) .put(ConserveTerms.reference, "http://role-project.svn.sourceforge.net/viewvc/role-project/trunk/gadgets/portfolio/gadget.xml"); store().in(tool2).put(ConserveTerms.type, "http://purl.org/role/terms/OpenSocialGadget"); // Tool 4 Concept tool4 = store().in(defaultUser).sub(ROLETerms.tool) .acquire(tool4_id); store().in(tool4) .as(ConserveTerms.metadata) .type("application/json") .string("{\"\": { \"http://purl.org/dc/terms/title\": [{ \"value\": \"Translator\", \"type\": \"literal\" }]}}"); store().in(tool4) .put(ConserveTerms.reference, "http://role-project.svn.sourceforge.net/viewvc/role-project/trunk/gadgets/language/src/main/webapp/translator/translator.xml"); store().in(tool4).put(ConserveTerms.type, "http://purl.org/role/terms/OpenSocialGadget"); } }
/******************************************************************************* * Created by Koen Deschacht (koendeschacht@gmail.com) 2017-3-11. For license * information see the LICENSE file in the root folder of this repository. ******************************************************************************/ package be.bagofwords.minidepi.implementation; import be.bagofwords.logging.Log; import be.bagofwords.minidepi.*; import be.bagofwords.minidepi.annotations.Bean; import be.bagofwords.minidepi.annotations.Inject; import be.bagofwords.minidepi.annotations.Property; import be.bagofwords.util.SerializationUtils; import java.lang.annotation.Annotation; import java.lang.reflect.*; import java.util.*; public class BeanManager { private final ApplicationContext applicationContext; private final BeanManager parentBeanManager; private final LifeCycleManager lifeCycleManager; private final List<QualifiedBean> beans = new ArrayList<>(); private final Set<Class> beansBeingCreated = new HashSet<>(); public BeanManager(ApplicationContext applicationContext, LifeCycleManager lifeCycleManager, BeanManager parentBeanManager) { this.applicationContext = applicationContext; this.lifeCycleManager = lifeCycleManager; this.parentBeanManager = parentBeanManager; saveBean(applicationContext); } private <T> void saveBean(T bean, String... names) { Set<String> qualifiers = getQualifiers(bean); for (String name : names) { qualifiers.add(name); } beans.add(new QualifiedBean(qualifiers, bean)); } private <T> Set<String> getQualifiers(T bean) { return getQualifiers(bean.getClass()); } private <T> Set<String> getQualifiers(Class<T> beanType) { Annotation[] annotations = beanType.getAnnotations(); return getQualifiers(annotations); } private <T> Set<String> getQualifiers(Annotation[] annotations) { Set<String> qualifiers = new HashSet<>(); for (Annotation annotation : annotations) { Class<? extends Annotation> annotationType = annotation.annotationType(); if (annotationType.equals(Bean.class)) { Bean beanAnnotation = (Bean) annotation; String[] names = beanAnnotation.value(); for (String name : names) { if (name.length() > 0) { qualifiers.add(name); } } } else if (annotationType.equals(Inject.class)) { Inject injectAnnotation = (Inject) annotation; String name = injectAnnotation.value(); if (name.length() > 0) { qualifiers.add(name); } } } return qualifiers; } public <T> List<T> getBeans(Class<T> beanType, String... names) { List<T> result = new ArrayList<>(); for (QualifiedBean qualifiedBean : beans) { if (beanType.isAssignableFrom(qualifiedBean.bean.getClass())) { boolean matchesQualifiers; if (names.length == 0) { matchesQualifiers = true; } else { matchesQualifiers = false; for (String name : names) { matchesQualifiers |= qualifiedBean.qualifiers.contains(name); } } if (matchesQualifiers) { result.add(beanType.cast(qualifiedBean.bean)); } } } return result; } public <T> T getBeanIfPresent(Class<T> beanType, String... names) { List<T> beans = getBeans(beanType, names); if (beans.size() == 1) { return beans.get(0); } else if (beans.size() == 0) { return null; } else { String errorMessage = "Found " + beans.size() + " beans of type " + beanType; errorMessage = appendNames(errorMessage, names); throw new ApplicationContextException(errorMessage); } } public <T> T getBean(Class<T> beanType, String... names) { return getBeanImpl(true, beanType, names); } private <T> T getBeanImpl(boolean createIfNecessary, Class<T> beanType, String... names) { List<T> matchingBeans = getBeans(beanType, names); if (matchingBeans.size() == 1) { return matchingBeans.get(0); } else if (matchingBeans.size() > 1) { String errorMessage = "Found multiple matching beans for type " + beanType.getSimpleName(); errorMessage = appendNames(errorMessage, names); throw new ApplicationContextException(errorMessage); } else if (parentBeanManager != null) { T bean = parentBeanManager.getBeanImpl(false, beanType, names); if (bean != null) { return bean; } } if (createIfNecessary) { //Does this type have the correct qualifiers? If yes, we create it if (doQualifiersMatch(beanType, names)) { createBean(beanType); matchingBeans = getBeans(beanType, names); if (matchingBeans.size() == 1) { return matchingBeans.get(0); } } String errorMessage; if (matchingBeans.size() > 0) { errorMessage = "Found multiple beans with type " + beanType; errorMessage = appendNames(errorMessage, names); errorMessage += " : "; for (int i = 0; i < matchingBeans.size(); i++) { errorMessage += matchingBeans.get(i).getClass().getSimpleName(); if (i < matchingBeans.size() - 1) { errorMessage += ", "; } } } else { errorMessage = "Could not find any bean with type " + beanType; errorMessage = appendNames(errorMessage, names); } throw new ApplicationContextException(errorMessage); } else { return null; } } public <T> boolean doQualifiersMatch(Class<T> beanType, String[] names) { Set<String> qualifiers = getQualifiers(beanType); boolean qualifiersMatch = names.length == 0; for (String name : names) { qualifiersMatch |= qualifiers.contains(name); } return qualifiersMatch; } public String appendNames(String errorMessage, String[] names) { if (names.length > 0) { errorMessage += " and names " + Arrays.toString(names); } return errorMessage; } private <T> boolean canInstantiateBean(Class<T> beanType) { return !Modifier.isAbstract(beanType.getModifiers()); } private <T> T createBean(Class<T> beanType, String... extraNames) { if (!canInstantiateBean(beanType)) { throw new ApplicationContextException("Can not instantiate bean of type " + beanType + ". Is this an abstract type?"); } if (beanType.equals(ApplicationContext.class)) { throw new ApplicationContextException("Refusing to create ApplicationContext as bean"); } if (beansBeingCreated.contains(beanType)) { throw new ApplicationContextException("The bean " + beanType + " is already being created. Possibly cycle?"); } beansBeingCreated.add(beanType); try { T newBean = constructBean(beanType); registerBean(newBean, extraNames); Log.i("Created bean " + newBean); return newBean; } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new ApplicationContextException("Failed to create bean " + beanType, e); } finally { beansBeingCreated.remove(beanType); } } private <T> T constructBean(Class<T> beanClass) throws IllegalAccessException, InvocationTargetException, InstantiationException { String beanName = beanClass.getCanonicalName(); try { //Constructor with a single argument, the application context? try { return beanClass.getConstructor(ApplicationContext.class).newInstance(applicationContext); } catch (NoSuchMethodException exp) { //OK, continue } //Constructor with @Inject annotation? for (Constructor<?> constructor : beanClass.getConstructors()) { if (hasInjectAnnotation(constructor.getDeclaredAnnotations())) { Class<?>[] parameterTypes = constructor.getParameterTypes(); Annotation[][] parameterAnnotations = constructor.getParameterAnnotations(); Object[] args = new Object[parameterTypes.length]; for (int i = 0; i < args.length; i++) { Set<String> qualifiers = getQualifiers(parameterAnnotations[i]); args[i] = getBean(parameterTypes[i], qualifiers.toArray(new String[qualifiers.size()])); } return (T) constructor.newInstance(args); } } //Constructor without any arguments? try { return beanClass.getConstructor().newInstance(); } catch (NoSuchMethodException e) { throw new ApplicationContextException("Could not create bean of type " + beanName + ". " + "Need at least one constructor that has either (1) no parameters, or (2) a single parameter of type ApplicationContext, or (3) the @Inject " + "annotation"); } } catch (Throwable exp) { if (exp instanceof ApplicationContextException) { throw exp; } else { throw new ApplicationContextException("Could not create bean of type " + beanName, exp); } } } public <T> void registerBean(Class<T> beanClass, String... names) { List<T> beans = getBeans(beanClass, names); if (beans.isEmpty()) { createBean(beanClass, names); } } public void registerBean(Object bean, String... names) { saveBean(bean, names); wireBean(bean); if (bean instanceof LifeCycleBean) { lifeCycleManager.ensureBeanCorrectState((LifeCycleBean) bean); } } public void wireBean(Object bean) { try { Class<?> currClass = bean.getClass(); while (!currClass.equals(Object.class)) { wireFields(bean, currClass); if (bean instanceof OnWiredListener) { ((OnWiredListener) bean).onWired(applicationContext); } currClass = currClass.getSuperclass(); } } catch (IllegalAccessException exp) { throw new ApplicationContextException("Failed to wire bean " + bean, exp); } } private void wireFields(Object bean, Class<?> beanClass) throws IllegalAccessException { Field[] fields = beanClass.getDeclaredFields(); for (Field field : fields) { Inject injectAnnotation = field.getAnnotation(Inject.class); if (injectAnnotation != null) { Object value = injectDependentBean(bean, field); if (injectAnnotation.ensureStarted()) { lifeCycleManager.registerStartBeforeDependency(bean, value); } } else { Property propertyAnnotation = field.getAnnotation(Property.class); if (propertyAnnotation != null) { injectProperty(bean, field, propertyAnnotation); } } } } private void injectProperty(Object bean, Field field, Property propertyAnnotation) throws IllegalAccessException { String propertyName = propertyAnnotation.value(); String value; try { if ("".equals(propertyAnnotation.orFrom())) { value = applicationContext.getProperty(propertyName); } else { value = applicationContext.getProperty(propertyName, propertyAnnotation.orFrom()); } } catch (PropertyException exp) { throw new ApplicationContextException("Could not find property \"" + propertyName + "\" for bean " + bean, exp); } field.setAccessible(true); Object convertedValue; if (field.getType() != String.class) { convertedValue = convertValue(field, value, bean); } else { convertedValue = value; } field.set(bean, convertedValue); } private Object convertValue(Field field, String value, Object bean) { return SerializationUtils.deserializeObject(value, field.getType()); } private Object injectDependentBean(Object bean, Field field) throws IllegalAccessException { field.setAccessible(true); if (field.get(bean) == null) { Class<?> fieldType = field.getType(); Object newValue; if (fieldType == List.class) { Type genericType = field.getGenericType(); if (genericType instanceof ParameterizedType) { Type[] genericTypeArgs = ((ParameterizedType) genericType).getActualTypeArguments(); if (genericTypeArgs.length == 1) { newValue = getBeans((Class) genericTypeArgs[0]); } else { throw new ApplicationContextException("Received multiple types for List???"); } } else { throw new ApplicationContextException("Could not determine generic type of field " + field.getName() + " in " + bean); } } else { Set<String> qualifiersSet = getQualifiers(field.getAnnotations()); String[] qualifiers = qualifiersSet.toArray(new String[qualifiersSet.size()]); newValue = getBean(fieldType, qualifiers); } field.set(bean, newValue); return newValue; } else { throw new RuntimeException("The field " + field.getName() + " of " + bean + " was not null!"); } } private boolean hasInjectAnnotation(Annotation[] annotations) { for (Annotation annotation : annotations) { if (annotation.annotationType().equals(Inject.class)) { return true; } } return false; } public boolean hasWiredFields(Object object) { Class<?> currClass = object.getClass(); while (!currClass.equals(Object.class)) { Field[] fields = object.getClass().getDeclaredFields(); for (Field field : fields) { Inject injectAnnotation = field.getAnnotation(Inject.class); if (injectAnnotation != null) { return true; } } currClass = currClass.getSuperclass(); } return false; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ql.tree; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableList; /** * Information about a {@link Node}. * <p> * All the uses of this are fairly non-OO and we're looking * for ways to use this less and less. * <p> * The implementations of this class are super copy-and-paste-ish * but they are better then the sneaky reflection tricks we had * earlier. Still terrifying. * * @param <T> actual subclass of node that produced this {@linkplain NodeInfo} */ public abstract class NodeInfo<T extends Node<?>> { protected final T node; private NodeInfo(T node) { this.node = node; } /** * Values for all properties on the instance that created * this {@linkplain NodeInfo}. */ public final List<Object> properties() { return unmodifiableList(innerProperties()); } protected abstract List<Object> innerProperties(); /** * Transform the properties on {@code node}, returning a new instance * of {@code N} if any properties change. */ final <E> T transform(Function<? super E, ? extends E> rule, Class<E> typeToken) { List<?> children = node.children(); Function<Object, Object> realRule = p -> { if (p != children && false == children.contains(p) && (p == null || typeToken.isInstance(p))) { return rule.apply(typeToken.cast(p)); } return p; }; return innerTransform(realRule); } protected abstract T innerTransform(Function<Object, Object> rule); /** * Builds a {@link NodeInfo} for Nodes without any properties. */ public static <T extends Node<?>> NodeInfo<T> create(T n) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return emptyList(); } protected T innerTransform(Function<Object, Object> rule) { return node; } }; } public static <T extends Node<?>, P1> NodeInfo<T> create(T n, BiFunction<Source, P1, T> ctor, P1 p1) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); return same ? node : ctor.apply(node.source(), newP1); } }; } public static <T extends Node<?>, P1, P2> NodeInfo<T> create(T n, NodeCtor2<P1, P2, T> ctor, P1 p1, P2 p2) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); return same ? node : ctor.apply(node.source(), newP1, newP2); } }; } public interface NodeCtor2<P1, P2, T> { T apply(Source l, P1 p1, P2 p2); } public static <T extends Node<?>, P1, P2, P3> NodeInfo<T> create(T n, NodeCtor3<P1, P2, P3, T> ctor, P1 p1, P2 p2, P3 p3) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3); } }; } public interface NodeCtor3<P1, P2, P3, T> { T apply(Source l, P1 p1, P2 p2, P3 p3); } public static <T extends Node<?>, P1, P2, P3, P4> NodeInfo<T> create( T n, NodeCtor4<P1, P2, P3, P4, T> ctor, P1 p1, P2 p2, P3 p3, P4 p4 ) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3, p4); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); @SuppressWarnings("unchecked") P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4); } }; } public interface NodeCtor4<P1, P2, P3, P4, T> { T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4); } public static <T extends Node<?>, P1, P2, P3, P4, P5> NodeInfo<T> create( T n, NodeCtor5<P1, P2, P3, P4, P5, T> ctor, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5 ) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3, p4, p5); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); @SuppressWarnings("unchecked") P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); @SuppressWarnings("unchecked") P5 newP5 = (P5) rule.apply(p5); same &= Objects.equals(p5, newP5); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5); } }; } public interface NodeCtor5<P1, P2, P3, P4, P5, T> { T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5); } public static <T extends Node<?>, P1, P2, P3, P4, P5, P6> NodeInfo<T> create( T n, NodeCtor6<P1, P2, P3, P4, P5, P6, T> ctor, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6 ) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3, p4, p5, p6); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); @SuppressWarnings("unchecked") P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); @SuppressWarnings("unchecked") P5 newP5 = (P5) rule.apply(p5); same &= Objects.equals(p5, newP5); @SuppressWarnings("unchecked") P6 newP6 = (P6) rule.apply(p6); same &= Objects.equals(p6, newP6); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6); } }; } public interface NodeCtor6<P1, P2, P3, P4, P5, P6, T> { T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6); } public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7> NodeInfo<T> create( T n, NodeCtor7<P1, P2, P3, P4, P5, P6, P7, T> ctor, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7 ) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3, p4, p5, p6, p7); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); @SuppressWarnings("unchecked") P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); @SuppressWarnings("unchecked") P5 newP5 = (P5) rule.apply(p5); same &= Objects.equals(p5, newP5); @SuppressWarnings("unchecked") P6 newP6 = (P6) rule.apply(p6); same &= Objects.equals(p6, newP6); @SuppressWarnings("unchecked") P7 newP7 = (P7) rule.apply(p7); same &= Objects.equals(p7, newP7); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7); } }; } public interface NodeCtor7<P1, P2, P3, P4, P5, P6, P7, T> { T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7); } public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7, P8> NodeInfo<T> create( T n, NodeCtor8<P1, P2, P3, P4, P5, P6, P7, P8, T> ctor, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8 ) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3, p4, p5, p6, p7, p8); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); @SuppressWarnings("unchecked") P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); @SuppressWarnings("unchecked") P5 newP5 = (P5) rule.apply(p5); same &= Objects.equals(p5, newP5); @SuppressWarnings("unchecked") P6 newP6 = (P6) rule.apply(p6); same &= Objects.equals(p6, newP6); @SuppressWarnings("unchecked") P7 newP7 = (P7) rule.apply(p7); same &= Objects.equals(p7, newP7); @SuppressWarnings("unchecked") P8 newP8 = (P8) rule.apply(p8); same &= Objects.equals(p8, newP8); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8); } }; } public interface NodeCtor8<P1, P2, P3, P4, P5, P6, P7, P8, T> { T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8); } public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7, P8, P9> NodeInfo<T> create( T n, NodeCtor9<P1, P2, P3, P4, P5, P6, P7, P8, P9, T> ctor, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9 ) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3, p4, p5, p6, p7, p8, p9); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); @SuppressWarnings("unchecked") P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); @SuppressWarnings("unchecked") P5 newP5 = (P5) rule.apply(p5); same &= Objects.equals(p5, newP5); @SuppressWarnings("unchecked") P6 newP6 = (P6) rule.apply(p6); same &= Objects.equals(p6, newP6); @SuppressWarnings("unchecked") P7 newP7 = (P7) rule.apply(p7); same &= Objects.equals(p7, newP7); @SuppressWarnings("unchecked") P8 newP8 = (P8) rule.apply(p8); same &= Objects.equals(p8, newP8); @SuppressWarnings("unchecked") P9 newP9 = (P9) rule.apply(p9); same &= Objects.equals(p9, newP9); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8, newP9); } }; } public interface NodeCtor9<P1, P2, P3, P4, P5, P6, P7, P8, P9, T> { T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9); } public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7, P8, P9, P10> NodeInfo<T> create( T n, NodeCtor10<P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, T> ctor, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10 ) { return new NodeInfo<T>(n) { @Override protected List<Object> innerProperties() { return Arrays.asList(p1, p2, p3, p4, p5, p6, p7, p8, p9, p10); } protected T innerTransform(Function<Object, Object> rule) { boolean same = true; @SuppressWarnings("unchecked") P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); @SuppressWarnings("unchecked") P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); @SuppressWarnings("unchecked") P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); @SuppressWarnings("unchecked") P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); @SuppressWarnings("unchecked") P5 newP5 = (P5) rule.apply(p5); same &= Objects.equals(p5, newP5); @SuppressWarnings("unchecked") P6 newP6 = (P6) rule.apply(p6); same &= Objects.equals(p6, newP6); @SuppressWarnings("unchecked") P7 newP7 = (P7) rule.apply(p7); same &= Objects.equals(p7, newP7); @SuppressWarnings("unchecked") P8 newP8 = (P8) rule.apply(p8); same &= Objects.equals(p8, newP8); @SuppressWarnings("unchecked") P9 newP9 = (P9) rule.apply(p9); same &= Objects.equals(p9, newP9); @SuppressWarnings("unchecked") P10 newP10 = (P10) rule.apply(p10); same &= Objects.equals(p10, newP10); return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8, newP9, newP10); } }; } public interface NodeCtor10<P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, T> { T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10); } }
/* * Copyright 2008 biaoping.yin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.frameworkset.util; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.lang.reflect.WildcardType; import java.util.Collection; import java.util.Map; /** * <p>Title: GenericCollectionTypeResolver.java</p> * <p>Description: </p> * <p>bboss workgroup</p> * <p>Copyright (c) 2008</p> * @Date 2010-10-24 * @author biaoping.yin * @version 1.0 */ public abstract class GenericCollectionTypeResolver { /** * Determine the generic element type of the given Collection class * (if it declares one through a generic superclass or generic interface). * @param collectionClass the collection class to introspect * @return the generic type, or <code>null</code> if none */ public static Class getCollectionType(Class collectionClass) { return extractTypeFromClass(collectionClass, Collection.class, 0); } /** * Determine the generic key type of the given Map class * (if it declares one through a generic superclass or generic interface). * @param mapClass the map class to introspect * @return the generic type, or <code>null</code> if none */ public static Class getMapKeyType(Class mapClass) { return extractTypeFromClass(mapClass, Map.class, 0); } /** * Determine the generic value type of the given Map class * (if it declares one through a generic superclass or generic interface). * @param mapClass the map class to introspect * @return the generic type, or <code>null</code> if none */ public static Class getMapValueType(Class mapClass) { return extractTypeFromClass(mapClass, Map.class, 1); } /** * Determine the generic element type of the given Collection field. * @param collectionField the collection field to introspect * @return the generic type, or <code>null</code> if none */ public static Class getCollectionFieldType(Field collectionField) { return getGenericFieldType(collectionField, Collection.class, 0, 1); } /** * Determine the generic element type of the given Collection field. * @param collectionField the collection field to introspect * @param nestingLevel the nesting level of the target type * (typically 1; e.g. in case of a List of Lists, 1 would indicate the * nested List, whereas 2 would indicate the element of the nested List) * @return the generic type, or <code>null</code> if none */ public static Class getCollectionFieldType(Field collectionField, int nestingLevel) { return getGenericFieldType(collectionField, Collection.class, 0, nestingLevel); } /** * Determine the generic key type of the given Map field. * @param mapField the map field to introspect * @return the generic type, or <code>null</code> if none */ public static Class getMapKeyFieldType(Field mapField) { return getGenericFieldType(mapField, Map.class, 0, 1); } /** * Determine the generic key type of the given Map field. * @param mapField the map field to introspect * @param nestingLevel the nesting level of the target type * (typically 1; e.g. in case of a List of Lists, 1 would indicate the * nested List, whereas 2 would indicate the element of the nested List) * @return the generic type, or <code>null</code> if none */ public static Class getMapKeyFieldType(Field mapField, int nestingLevel) { return getGenericFieldType(mapField, Map.class, 0, nestingLevel); } /** * Determine the generic value type of the given Map field. * @param mapField the map field to introspect * @return the generic type, or <code>null</code> if none */ public static Class getMapValueFieldType(Field mapField) { return getGenericFieldType(mapField, Map.class, 1, 1); } /** * Determine the generic value type of the given Map field. * @param mapField the map field to introspect * @param nestingLevel the nesting level of the target type * (typically 1; e.g. in case of a List of Lists, 1 would indicate the * nested List, whereas 2 would indicate the element of the nested List) * @return the generic type, or <code>null</code> if none */ public static Class getMapValueFieldType(Field mapField, int nestingLevel) { return getGenericFieldType(mapField, Map.class, 1, nestingLevel); } /** * Determine the generic element type of the given Collection parameter. * @param methodParam the method parameter specification * @return the generic type, or <code>null</code> if none */ public static Class getCollectionParameterType(MethodParameter methodParam) { return getGenericParameterType(methodParam, Collection.class, 0); } /** * Determine the generic key type of the given Map parameter. * @param methodParam the method parameter specification * @return the generic type, or <code>null</code> if none */ public static Class getMapKeyParameterType(MethodParameter methodParam) { return getGenericParameterType(methodParam, Map.class, 0); } /** * Determine the generic value type of the given Map parameter. * @param methodParam the method parameter specification * @return the generic type, or <code>null</code> if none */ public static Class getMapValueParameterType(MethodParameter methodParam) { return getGenericParameterType(methodParam, Map.class, 1); } /** * Determine the generic element type of the given Collection return type. * @param method the method to check the return type for * @return the generic type, or <code>null</code> if none */ public static Class getCollectionReturnType(Method method) { return getGenericReturnType(method, Collection.class, 0, 1); } /** * Determine the generic element type of the given Collection return type. * <p>If the specified nesting level is higher than 1, the element type of * a nested Collection/Map will be analyzed. * @param method the method to check the return type for * @param nestingLevel the nesting level of the target type * (typically 1; e.g. in case of a List of Lists, 1 would indicate the * nested List, whereas 2 would indicate the element of the nested List) * @return the generic type, or <code>null</code> if none */ public static Class getCollectionReturnType(Method method, int nestingLevel) { return getGenericReturnType(method, Collection.class, 0, nestingLevel); } /** * Determine the generic key type of the given Map return type. * @param method the method to check the return type for * @return the generic type, or <code>null</code> if none */ public static Class getMapKeyReturnType(Method method) { return getGenericReturnType(method, Map.class, 0, 1); } /** * Determine the generic key type of the given Map return type. * @param method the method to check the return type for * @param nestingLevel the nesting level of the target type * (typically 1; e.g. in case of a List of Lists, 1 would indicate the * nested List, whereas 2 would indicate the element of the nested List) * @return the generic type, or <code>null</code> if none */ public static Class getMapKeyReturnType(Method method, int nestingLevel) { return getGenericReturnType(method, Map.class, 0, nestingLevel); } /** * Determine the generic value type of the given Map return type. * @param method the method to check the return type for * @return the generic type, or <code>null</code> if none */ public static Class getMapValueReturnType(Method method) { return getGenericReturnType(method, Map.class, 1, 1); } /** * Determine the generic value type of the given Map return type. * @param method the method to check the return type for * @param nestingLevel the nesting level of the target type * (typically 1; e.g. in case of a List of Lists, 1 would indicate the * nested List, whereas 2 would indicate the element of the nested List) * @return the generic type, or <code>null</code> if none */ public static Class getMapValueReturnType(Method method, int nestingLevel) { return getGenericReturnType(method, Map.class, 1, nestingLevel); } /** * Extract the generic parameter type from the given method or constructor. * @param methodParam the method parameter specification * @param source the source class/interface defining the generic parameter types * @param typeIndex the index of the type (e.g. 0 for Collections, * 0 for Map keys, 1 for Map values) * @return the generic type, or <code>null</code> if none */ private static Class getGenericParameterType(MethodParameter methodParam, Class source, int typeIndex) { return extractType(methodParam, GenericTypeResolver.getTargetType(methodParam), source, typeIndex, methodParam.getNestingLevel(), 1); } /** * Extract the generic type from the given field. * @param field the field to check the type for * @param source the source class/interface defining the generic parameter types * @param typeIndex the index of the type (e.g. 0 for Collections, * 0 for Map keys, 1 for Map values) * @param nestingLevel the nesting level of the target type * @return the generic type, or <code>null</code> if none */ private static Class getGenericFieldType(Field field, Class source, int typeIndex, int nestingLevel) { return extractType(null, field.getGenericType(), source, typeIndex, nestingLevel, 1); } /** * Extract the generic return type from the given method. * @param method the method to check the return type for * @param source the source class/interface defining the generic parameter types * @param typeIndex the index of the type (e.g. 0 for Collections, * 0 for Map keys, 1 for Map values) * @param nestingLevel the nesting level of the target type * @return the generic type, or <code>null</code> if none */ private static Class getGenericReturnType(Method method, Class source, int typeIndex, int nestingLevel) { return extractType(null, method.getGenericReturnType(), source, typeIndex, nestingLevel, 1); } /** * Extract the generic type from the given Type object. * @param methodParam the method parameter specification * @param type the Type to check * @param source the source collection/map Class that we check * @param typeIndex the index of the actual type argument * @param nestingLevel the nesting level of the target type * @param currentLevel the current nested level * @return the generic type as Class, or <code>null</code> if none */ private static Class extractType( MethodParameter methodParam, Type type, Class source, int typeIndex, int nestingLevel, int currentLevel) { Type resolvedType = type; if (type instanceof TypeVariable && methodParam != null && methodParam.getTypeVariableMap() != null) { Type mappedType = (Type) methodParam.getTypeVariableMap().get(type); if (mappedType != null) { resolvedType = mappedType; } } if (resolvedType instanceof ParameterizedType) { return extractTypeFromParameterizedType( methodParam, (ParameterizedType) resolvedType, source, typeIndex, nestingLevel, currentLevel); } else if (resolvedType instanceof Class) { return extractTypeFromClass(methodParam, (Class) resolvedType, source, typeIndex, nestingLevel, currentLevel); } else { return null; } } /** * Extract the generic type from the given ParameterizedType object. * @param methodParam the method parameter specification * @param ptype the ParameterizedType to check * @param source the expected raw source type (can be <code>null</code>) * @param typeIndex the index of the actual type argument * @param nestingLevel the nesting level of the target type * @param currentLevel the current nested level * @return the generic type as Class, or <code>null</code> if none */ private static Class extractTypeFromParameterizedType(MethodParameter methodParam, ParameterizedType ptype, Class source, int typeIndex, int nestingLevel, int currentLevel) { if (!(ptype.getRawType() instanceof Class)) { return null; } Class rawType = (Class) ptype.getRawType(); Type[] paramTypes = ptype.getActualTypeArguments(); if (nestingLevel - currentLevel > 0) { int nextLevel = currentLevel + 1; Integer currentTypeIndex = (methodParam != null ? methodParam.getTypeIndexForLevel(nextLevel) : null); // Default is last parameter type: Collection element or Map value. int indexToUse = (currentTypeIndex != null ? currentTypeIndex.intValue() : paramTypes.length - 1); Type paramType = paramTypes[indexToUse]; return extractType(methodParam, paramType, source, typeIndex, nestingLevel, nextLevel); } if (source != null && !source.isAssignableFrom(rawType)) { return null; } Class fromSuperclassOrInterface = extractTypeFromClass(methodParam, rawType, source, typeIndex, nestingLevel, currentLevel); if (fromSuperclassOrInterface != null) { return fromSuperclassOrInterface; } if (paramTypes == null || typeIndex >= paramTypes.length) { return null; } Type paramType = paramTypes[typeIndex]; if (paramType instanceof TypeVariable && methodParam != null && methodParam.getTypeVariableMap() != null) { Type mappedType = (Type) methodParam.getTypeVariableMap().get(paramType); if (mappedType != null) { paramType = mappedType; } } if (paramType instanceof WildcardType) { Type[] lowerBounds = ((WildcardType) paramType).getLowerBounds(); if (lowerBounds != null && lowerBounds.length > 0) { paramType = lowerBounds[0]; } } if (paramType instanceof ParameterizedType) { paramType = ((ParameterizedType) paramType).getRawType(); } if (paramType instanceof GenericArrayType) { // A generic array type... Let's turn it into a straight array type if possible. Type compType = ((GenericArrayType) paramType).getGenericComponentType(); if (compType instanceof Class) { return Array.newInstance((Class) compType, 0).getClass(); } } else if (paramType instanceof Class) { // We finally got a straight Class... return (Class) paramType; } return null; } /** * Extract the generic type from the given Class object. * @param clazz the Class to check * @param source the expected raw source type (can be <code>null</code>) * @param typeIndex the index of the actual type argument * @return the generic type as Class, or <code>null</code> if none */ private static Class extractTypeFromClass(Class clazz, Class source, int typeIndex) { return extractTypeFromClass(null, clazz, source, typeIndex, 1, 1); } /** * Extract the generic type from the given Class object. * @param methodParam the method parameter specification * @param clazz the Class to check * @param source the expected raw source type (can be <code>null</code>) * @param typeIndex the index of the actual type argument * @param nestingLevel the nesting level of the target type * @param currentLevel the current nested level * @return the generic type as Class, or <code>null</code> if none */ private static Class extractTypeFromClass( MethodParameter methodParam, Class clazz, Class source, int typeIndex, int nestingLevel, int currentLevel) { if (clazz.getName().startsWith("java.util.")) { return null; } if (clazz.getSuperclass() != null && isIntrospectionCandidate(clazz.getSuperclass())) { return extractType(methodParam, clazz.getGenericSuperclass(), source, typeIndex, nestingLevel, currentLevel); } Type[] ifcs = clazz.getGenericInterfaces(); if (ifcs != null) { for (int i = 0; i < ifcs.length; i++) { Type ifc = ifcs[i]; Type rawType = ifc; if (ifc instanceof ParameterizedType) { rawType = ((ParameterizedType) ifc).getRawType(); } if (rawType instanceof Class && isIntrospectionCandidate((Class) rawType)) { return extractType(methodParam, ifc, source, typeIndex, nestingLevel, currentLevel); } } } return null; } /** * Determine whether the given class is a potential candidate * that defines generic collection or map types. * @param clazz the class to check * @return whether the given class is assignable to Collection or Map */ private static boolean isIntrospectionCandidate(Class clazz) { return (Collection.class.isAssignableFrom(clazz) || Map.class.isAssignableFrom(clazz)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.dispatcher; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.operators.ResourceSpec; import org.apache.flink.api.common.time.Time; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.Configuration; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.runtime.blob.BlobServer; import org.apache.flink.runtime.checkpoint.Checkpoints; import org.apache.flink.runtime.client.JobSubmissionException; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.runtime.executiongraph.ArchivedExecutionGraph; import org.apache.flink.runtime.heartbeat.HeartbeatServices; import org.apache.flink.runtime.highavailability.HighAvailabilityServices; import org.apache.flink.runtime.highavailability.RunningJobsRegistry; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobStatus; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobmanager.JobGraphStore; import org.apache.flink.runtime.jobmaster.JobManagerRunner; import org.apache.flink.runtime.jobmaster.JobManagerSharedServices; import org.apache.flink.runtime.jobmaster.JobMasterGateway; import org.apache.flink.runtime.jobmaster.JobNotFinishedException; import org.apache.flink.runtime.jobmaster.JobResult; import org.apache.flink.runtime.jobmaster.factories.DefaultJobManagerJobMetricGroupFactory; import org.apache.flink.runtime.leaderelection.LeaderContender; import org.apache.flink.runtime.leaderelection.LeaderElectionService; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.messages.FlinkJobNotFoundException; import org.apache.flink.runtime.messages.webmonitor.ClusterOverview; import org.apache.flink.runtime.messages.webmonitor.JobDetails; import org.apache.flink.runtime.messages.webmonitor.JobsOverview; import org.apache.flink.runtime.messages.webmonitor.MultipleJobsDetails; import org.apache.flink.runtime.metrics.MetricNames; import org.apache.flink.runtime.metrics.groups.JobManagerMetricGroup; import org.apache.flink.runtime.resourcemanager.ResourceManagerGateway; import org.apache.flink.runtime.resourcemanager.ResourceOverview; import org.apache.flink.runtime.rest.handler.legacy.backpressure.OperatorBackPressureStatsResponse; import org.apache.flink.runtime.rpc.FatalErrorHandler; import org.apache.flink.runtime.rpc.FencedRpcEndpoint; import org.apache.flink.runtime.rpc.RpcService; import org.apache.flink.runtime.webmonitor.retriever.GatewayRetriever; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.Preconditions; import org.apache.flink.util.function.BiConsumerWithException; import org.apache.flink.util.function.BiFunctionWithException; import org.apache.flink.util.function.CheckedSupplier; import org.apache.flink.util.function.FunctionUtils; import org.apache.flink.util.function.FunctionWithException; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.function.Function; import java.util.stream.Collectors; /** * Base class for the Dispatcher component. The Dispatcher component is responsible * for receiving job submissions, persisting them, spawning JobManagers to execute * the jobs and to recover them in case of a master failure. Furthermore, it knows * about the state of the Flink session cluster. */ public abstract class Dispatcher extends FencedRpcEndpoint<DispatcherId> implements DispatcherGateway, LeaderContender, JobGraphStore.JobGraphListener { public static final String DISPATCHER_NAME = "dispatcher"; private final Configuration configuration; private final JobGraphStore jobGraphStore; private final RunningJobsRegistry runningJobsRegistry; private final HighAvailabilityServices highAvailabilityServices; private final GatewayRetriever<ResourceManagerGateway> resourceManagerGatewayRetriever; private final JobManagerSharedServices jobManagerSharedServices; private final HeartbeatServices heartbeatServices; private final BlobServer blobServer; private final FatalErrorHandler fatalErrorHandler; private final Map<JobID, CompletableFuture<JobManagerRunner>> jobManagerRunnerFutures; private final LeaderElectionService leaderElectionService; private final ArchivedExecutionGraphStore archivedExecutionGraphStore; private final JobManagerRunnerFactory jobManagerRunnerFactory; private final JobManagerMetricGroup jobManagerMetricGroup; private final HistoryServerArchivist historyServerArchivist; @Nullable private final String metricServiceQueryAddress; private final Map<JobID, CompletableFuture<Void>> jobManagerTerminationFutures; private CompletableFuture<Void> recoveryOperation = CompletableFuture.completedFuture(null); public Dispatcher( RpcService rpcService, String endpointId, Configuration configuration, HighAvailabilityServices highAvailabilityServices, JobGraphStore jobGraphStore, GatewayRetriever<ResourceManagerGateway> resourceManagerGatewayRetriever, BlobServer blobServer, HeartbeatServices heartbeatServices, JobManagerMetricGroup jobManagerMetricGroup, @Nullable String metricServiceQueryAddress, ArchivedExecutionGraphStore archivedExecutionGraphStore, JobManagerRunnerFactory jobManagerRunnerFactory, FatalErrorHandler fatalErrorHandler, HistoryServerArchivist historyServerArchivist) throws Exception { super(rpcService, endpointId); this.configuration = Preconditions.checkNotNull(configuration); this.highAvailabilityServices = Preconditions.checkNotNull(highAvailabilityServices); this.resourceManagerGatewayRetriever = Preconditions.checkNotNull(resourceManagerGatewayRetriever); this.heartbeatServices = Preconditions.checkNotNull(heartbeatServices); this.blobServer = Preconditions.checkNotNull(blobServer); this.fatalErrorHandler = Preconditions.checkNotNull(fatalErrorHandler); this.jobGraphStore = Preconditions.checkNotNull(jobGraphStore); this.jobManagerMetricGroup = Preconditions.checkNotNull(jobManagerMetricGroup); this.metricServiceQueryAddress = metricServiceQueryAddress; this.jobManagerSharedServices = JobManagerSharedServices.fromConfiguration( configuration, this.blobServer); this.runningJobsRegistry = highAvailabilityServices.getRunningJobsRegistry(); jobManagerRunnerFutures = new HashMap<>(16); leaderElectionService = highAvailabilityServices.getDispatcherLeaderElectionService(); this.historyServerArchivist = Preconditions.checkNotNull(historyServerArchivist); this.archivedExecutionGraphStore = Preconditions.checkNotNull(archivedExecutionGraphStore); this.jobManagerRunnerFactory = Preconditions.checkNotNull(jobManagerRunnerFactory); this.jobManagerTerminationFutures = new HashMap<>(2); } //------------------------------------------------------ // Lifecycle methods //------------------------------------------------------ @Override public void onStart() throws Exception { try { startDispatcherServices(); } catch (Exception e) { final DispatcherException exception = new DispatcherException(String.format("Could not start the Dispatcher %s", getAddress()), e); onFatalError(exception); throw exception; } } private void startDispatcherServices() throws Exception { try { jobGraphStore.start(this); leaderElectionService.start(this); registerDispatcherMetrics(jobManagerMetricGroup); } catch (Exception e) { handleStartDispatcherServicesException(e); } } private void handleStartDispatcherServicesException(Exception e) throws Exception { try { stopDispatcherServices(); } catch (Exception exception) { e.addSuppressed(exception); } throw e; } @Override public CompletableFuture<Void> onStop() { log.info("Stopping dispatcher {}.", getAddress()); final CompletableFuture<Void> allJobManagerRunnersTerminationFuture = terminateJobManagerRunnersAndGetTerminationFuture(); return FutureUtils.runAfterwards( allJobManagerRunnersTerminationFuture, () -> { stopDispatcherServices(); log.info("Stopped dispatcher {}.", getAddress()); }); } private void stopDispatcherServices() throws Exception { Exception exception = null; try { jobManagerSharedServices.shutdown(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } try { jobGraphStore.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } try { leaderElectionService.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } jobManagerMetricGroup.close(); ExceptionUtils.tryRethrowException(exception); } //------------------------------------------------------ // RPCs //------------------------------------------------------ @Override public CompletableFuture<Acknowledge> submitJob(JobGraph jobGraph, Time timeout) { log.info("Received JobGraph submission {} ({}).", jobGraph.getJobID(), jobGraph.getName()); try { if (isDuplicateJob(jobGraph.getJobID())) { return FutureUtils.completedExceptionally( new JobSubmissionException(jobGraph.getJobID(), "Job has already been submitted.")); } else if (isPartialResourceConfigured(jobGraph)) { return FutureUtils.completedExceptionally( new JobSubmissionException(jobGraph.getJobID(), "Currently jobs is not supported if parts of the vertices have " + "resources configured. The limitation will be removed in future versions.")); } else { return internalSubmitJob(jobGraph); } } catch (FlinkException e) { return FutureUtils.completedExceptionally(e); } } /** * Checks whether the given job has already been submitted or executed. * * @param jobId identifying the submitted job * @return true if the job has already been submitted (is running) or has been executed * @throws FlinkException if the job scheduling status cannot be retrieved */ private boolean isDuplicateJob(JobID jobId) throws FlinkException { final RunningJobsRegistry.JobSchedulingStatus jobSchedulingStatus; try { jobSchedulingStatus = runningJobsRegistry.getJobSchedulingStatus(jobId); } catch (IOException e) { throw new FlinkException(String.format("Failed to retrieve job scheduling status for job %s.", jobId), e); } return jobSchedulingStatus == RunningJobsRegistry.JobSchedulingStatus.DONE || jobManagerRunnerFutures.containsKey(jobId); } private boolean isPartialResourceConfigured(JobGraph jobGraph) { boolean hasVerticesWithUnknownResource = false; boolean hasVerticesWithConfiguredResource = false; for (JobVertex jobVertex : jobGraph.getVertices()) { if (jobVertex.getMinResources() == ResourceSpec.UNKNOWN) { hasVerticesWithUnknownResource = true; } else { hasVerticesWithConfiguredResource = true; } if (hasVerticesWithUnknownResource && hasVerticesWithConfiguredResource) { return true; } } return false; } private CompletableFuture<Acknowledge> internalSubmitJob(JobGraph jobGraph) { log.info("Submitting job {} ({}).", jobGraph.getJobID(), jobGraph.getName()); final CompletableFuture<Acknowledge> persistAndRunFuture = waitForTerminatingJobManager(jobGraph.getJobID(), jobGraph, this::persistAndRunJob) .thenApply(ignored -> Acknowledge.get()); return persistAndRunFuture.handleAsync((acknowledge, throwable) -> { if (throwable != null) { cleanUpJobData(jobGraph.getJobID(), true); final Throwable strippedThrowable = ExceptionUtils.stripCompletionException(throwable); log.error("Failed to submit job {}.", jobGraph.getJobID(), strippedThrowable); throw new CompletionException( new JobSubmissionException(jobGraph.getJobID(), "Failed to submit job.", strippedThrowable)); } else { return acknowledge; } }, getRpcService().getExecutor()); } private CompletableFuture<Void> persistAndRunJob(JobGraph jobGraph) throws Exception { jobGraphStore.putJobGraph(jobGraph); final CompletableFuture<Void> runJobFuture = runJob(jobGraph); return runJobFuture.whenComplete(BiConsumerWithException.unchecked((Object ignored, Throwable throwable) -> { if (throwable != null) { jobGraphStore.removeJobGraph(jobGraph.getJobID()); } })); } private CompletableFuture<Void> runJob(JobGraph jobGraph) { Preconditions.checkState(!jobManagerRunnerFutures.containsKey(jobGraph.getJobID())); final CompletableFuture<JobManagerRunner> jobManagerRunnerFuture = createJobManagerRunner(jobGraph); jobManagerRunnerFutures.put(jobGraph.getJobID(), jobManagerRunnerFuture); return jobManagerRunnerFuture .thenApply(FunctionUtils.nullFn()) .whenCompleteAsync( (ignored, throwable) -> { if (throwable != null) { jobManagerRunnerFutures.remove(jobGraph.getJobID()); } }, getMainThreadExecutor()); } private CompletableFuture<JobManagerRunner> createJobManagerRunner(JobGraph jobGraph) { final RpcService rpcService = getRpcService(); final CompletableFuture<JobManagerRunner> jobManagerRunnerFuture = CompletableFuture.supplyAsync( CheckedSupplier.unchecked(() -> jobManagerRunnerFactory.createJobManagerRunner( jobGraph, configuration, rpcService, highAvailabilityServices, heartbeatServices, jobManagerSharedServices, new DefaultJobManagerJobMetricGroupFactory(jobManagerMetricGroup), fatalErrorHandler)), rpcService.getExecutor()); return jobManagerRunnerFuture.thenApply(FunctionUtils.uncheckedFunction(this::startJobManagerRunner)); } private JobManagerRunner startJobManagerRunner(JobManagerRunner jobManagerRunner) throws Exception { final JobID jobId = jobManagerRunner.getJobGraph().getJobID(); FutureUtils.assertNoException( jobManagerRunner.getResultFuture().handleAsync( (ArchivedExecutionGraph archivedExecutionGraph, Throwable throwable) -> { // check if we are still the active JobManagerRunner by checking the identity final CompletableFuture<JobManagerRunner> jobManagerRunnerFuture = jobManagerRunnerFutures.get(jobId); final JobManagerRunner currentJobManagerRunner = jobManagerRunnerFuture != null ? jobManagerRunnerFuture.getNow(null) : null; //noinspection ObjectEquality if (jobManagerRunner == currentJobManagerRunner) { if (archivedExecutionGraph != null) { jobReachedGloballyTerminalState(archivedExecutionGraph); } else { final Throwable strippedThrowable = ExceptionUtils.stripCompletionException(throwable); if (strippedThrowable instanceof JobNotFinishedException) { jobNotFinished(jobId); } else { jobMasterFailed(jobId, strippedThrowable); } } } else { log.debug("There is a newer JobManagerRunner for the job {}.", jobId); } return null; }, getMainThreadExecutor())); jobManagerRunner.start(); return jobManagerRunner; } @Override public CompletableFuture<Collection<JobID>> listJobs(Time timeout) { return CompletableFuture.completedFuture( Collections.unmodifiableSet(new HashSet<>(jobManagerRunnerFutures.keySet()))); } @Override public CompletableFuture<Acknowledge> disposeSavepoint(String savepointPath, Time timeout) { final ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); return CompletableFuture.supplyAsync( () -> { log.info("Disposing savepoint {}.", savepointPath); try { Checkpoints.disposeSavepoint(savepointPath, configuration, classLoader, log); } catch (IOException | FlinkException e) { throw new CompletionException(new FlinkException(String.format("Could not dispose savepoint %s.", savepointPath), e)); } return Acknowledge.get(); }, jobManagerSharedServices.getScheduledExecutorService()); } @Override public CompletableFuture<Acknowledge> cancelJob(JobID jobId, Time timeout) { final CompletableFuture<JobMasterGateway> jobMasterGatewayFuture = getJobMasterGatewayFuture(jobId); return jobMasterGatewayFuture.thenCompose((JobMasterGateway jobMasterGateway) -> jobMasterGateway.cancel(timeout)); } @Override public CompletableFuture<ClusterOverview> requestClusterOverview(Time timeout) { CompletableFuture<ResourceOverview> taskManagerOverviewFuture = runResourceManagerCommand(resourceManagerGateway -> resourceManagerGateway.requestResourceOverview(timeout)); final List<CompletableFuture<Optional<JobStatus>>> optionalJobInformation = queryJobMastersForInformation( (JobMasterGateway jobMasterGateway) -> jobMasterGateway.requestJobStatus(timeout)); CompletableFuture<Collection<Optional<JobStatus>>> allOptionalJobsFuture = FutureUtils.combineAll(optionalJobInformation); CompletableFuture<Collection<JobStatus>> allJobsFuture = allOptionalJobsFuture.thenApply(this::flattenOptionalCollection); final JobsOverview completedJobsOverview = archivedExecutionGraphStore.getStoredJobsOverview(); return allJobsFuture.thenCombine( taskManagerOverviewFuture, (Collection<JobStatus> runningJobsStatus, ResourceOverview resourceOverview) -> { final JobsOverview allJobsOverview = JobsOverview.create(runningJobsStatus).combine(completedJobsOverview); return new ClusterOverview(resourceOverview, allJobsOverview); }); } @Override public CompletableFuture<MultipleJobsDetails> requestMultipleJobDetails(Time timeout) { List<CompletableFuture<Optional<JobDetails>>> individualOptionalJobDetails = queryJobMastersForInformation( (JobMasterGateway jobMasterGateway) -> jobMasterGateway.requestJobDetails(timeout)); CompletableFuture<Collection<Optional<JobDetails>>> optionalCombinedJobDetails = FutureUtils.combineAll( individualOptionalJobDetails); CompletableFuture<Collection<JobDetails>> combinedJobDetails = optionalCombinedJobDetails.thenApply(this::flattenOptionalCollection); final Collection<JobDetails> completedJobDetails = archivedExecutionGraphStore.getAvailableJobDetails(); return combinedJobDetails.thenApply( (Collection<JobDetails> runningJobDetails) -> { final Collection<JobDetails> allJobDetails = new ArrayList<>(completedJobDetails.size() + runningJobDetails.size()); allJobDetails.addAll(runningJobDetails); allJobDetails.addAll(completedJobDetails); return new MultipleJobsDetails(allJobDetails); }); } @Override public CompletableFuture<JobStatus> requestJobStatus(JobID jobId, Time timeout) { final CompletableFuture<JobMasterGateway> jobMasterGatewayFuture = getJobMasterGatewayFuture(jobId); final CompletableFuture<JobStatus> jobStatusFuture = jobMasterGatewayFuture.thenCompose( (JobMasterGateway jobMasterGateway) -> jobMasterGateway.requestJobStatus(timeout)); return jobStatusFuture.exceptionally( (Throwable throwable) -> { final JobDetails jobDetails = archivedExecutionGraphStore.getAvailableJobDetails(jobId); // check whether it is a completed job if (jobDetails == null) { throw new CompletionException(ExceptionUtils.stripCompletionException(throwable)); } else { return jobDetails.getStatus(); } }); } @Override public CompletableFuture<OperatorBackPressureStatsResponse> requestOperatorBackPressureStats( final JobID jobId, final JobVertexID jobVertexId) { final CompletableFuture<JobMasterGateway> jobMasterGatewayFuture = getJobMasterGatewayFuture(jobId); return jobMasterGatewayFuture.thenCompose((JobMasterGateway jobMasterGateway) -> jobMasterGateway.requestOperatorBackPressureStats(jobVertexId)); } @Override public CompletableFuture<ArchivedExecutionGraph> requestJob(JobID jobId, Time timeout) { final CompletableFuture<JobMasterGateway> jobMasterGatewayFuture = getJobMasterGatewayFuture(jobId); final CompletableFuture<ArchivedExecutionGraph> archivedExecutionGraphFuture = jobMasterGatewayFuture.thenCompose( (JobMasterGateway jobMasterGateway) -> jobMasterGateway.requestJob(timeout)); return archivedExecutionGraphFuture.exceptionally( (Throwable throwable) -> { final ArchivedExecutionGraph serializableExecutionGraph = archivedExecutionGraphStore.get(jobId); // check whether it is a completed job if (serializableExecutionGraph == null) { throw new CompletionException(ExceptionUtils.stripCompletionException(throwable)); } else { return serializableExecutionGraph; } }); } @Override public CompletableFuture<JobResult> requestJobResult(JobID jobId, Time timeout) { final CompletableFuture<JobManagerRunner> jobManagerRunnerFuture = jobManagerRunnerFutures.get(jobId); if (jobManagerRunnerFuture == null) { final ArchivedExecutionGraph archivedExecutionGraph = archivedExecutionGraphStore.get(jobId); if (archivedExecutionGraph == null) { return FutureUtils.completedExceptionally(new FlinkJobNotFoundException(jobId)); } else { return CompletableFuture.completedFuture(JobResult.createFrom(archivedExecutionGraph)); } } else { return jobManagerRunnerFuture.thenCompose(JobManagerRunner::getResultFuture).thenApply(JobResult::createFrom); } } @Override public CompletableFuture<Collection<String>> requestMetricQueryServiceAddresses(Time timeout) { if (metricServiceQueryAddress != null) { return CompletableFuture.completedFuture(Collections.singleton(metricServiceQueryAddress)); } else { return CompletableFuture.completedFuture(Collections.emptyList()); } } @Override public CompletableFuture<Collection<Tuple2<ResourceID, String>>> requestTaskManagerMetricQueryServiceAddresses(Time timeout) { return runResourceManagerCommand(resourceManagerGateway -> resourceManagerGateway.requestTaskManagerMetricQueryServiceAddresses(timeout)); } @Override public CompletableFuture<Integer> getBlobServerPort(Time timeout) { return CompletableFuture.completedFuture(blobServer.getPort()); } @Override public CompletableFuture<String> triggerSavepoint( final JobID jobId, final String targetDirectory, final boolean cancelJob, final Time timeout) { final CompletableFuture<JobMasterGateway> jobMasterGatewayFuture = getJobMasterGatewayFuture(jobId); return jobMasterGatewayFuture.thenCompose( (JobMasterGateway jobMasterGateway) -> jobMasterGateway.triggerSavepoint(targetDirectory, cancelJob, timeout)); } @Override public CompletableFuture<String> stopWithSavepoint( final JobID jobId, final String targetDirectory, final boolean advanceToEndOfEventTime, final Time timeout) { final CompletableFuture<JobMasterGateway> jobMasterGatewayFuture = getJobMasterGatewayFuture(jobId); return jobMasterGatewayFuture.thenCompose( (JobMasterGateway jobMasterGateway) -> jobMasterGateway.stopWithSavepoint(targetDirectory, advanceToEndOfEventTime, timeout)); } @Override public CompletableFuture<Acknowledge> shutDownCluster() { closeAsync(); return CompletableFuture.completedFuture(Acknowledge.get()); } /** * Cleans up the job related data from the dispatcher. If cleanupHA is true, then * the data will also be removed from HA. * * @param jobId JobID identifying the job to clean up * @param cleanupHA True iff HA data shall also be cleaned up */ private void removeJobAndRegisterTerminationFuture(JobID jobId, boolean cleanupHA) { final CompletableFuture<Void> cleanupFuture = removeJob(jobId, cleanupHA); registerJobManagerRunnerTerminationFuture(jobId, cleanupFuture); } private void registerJobManagerRunnerTerminationFuture(JobID jobId, CompletableFuture<Void> jobManagerRunnerTerminationFuture) { Preconditions.checkState(!jobManagerTerminationFutures.containsKey(jobId)); jobManagerTerminationFutures.put(jobId, jobManagerRunnerTerminationFuture); // clean up the pending termination future jobManagerRunnerTerminationFuture.thenRunAsync( () -> { final CompletableFuture<Void> terminationFuture = jobManagerTerminationFutures.remove(jobId); //noinspection ObjectEquality if (terminationFuture != null && terminationFuture != jobManagerRunnerTerminationFuture) { jobManagerTerminationFutures.put(jobId, terminationFuture); } }, getUnfencedMainThreadExecutor()); } private CompletableFuture<Void> removeJob(JobID jobId, boolean cleanupHA) { CompletableFuture<JobManagerRunner> jobManagerRunnerFuture = jobManagerRunnerFutures.remove(jobId); final CompletableFuture<Void> jobManagerRunnerTerminationFuture; if (jobManagerRunnerFuture != null) { jobManagerRunnerTerminationFuture = jobManagerRunnerFuture.thenCompose(JobManagerRunner::closeAsync); } else { jobManagerRunnerTerminationFuture = CompletableFuture.completedFuture(null); } return jobManagerRunnerTerminationFuture.thenRunAsync( () -> cleanUpJobData(jobId, cleanupHA), getRpcService().getExecutor()); } private void cleanUpJobData(JobID jobId, boolean cleanupHA) { jobManagerMetricGroup.removeJob(jobId); boolean cleanupHABlobs = false; if (cleanupHA) { try { jobGraphStore.removeJobGraph(jobId); // only clean up the HA blobs if we could remove the job from HA storage cleanupHABlobs = true; } catch (Exception e) { log.warn("Could not properly remove job {} from submitted job graph store.", jobId, e); } try { runningJobsRegistry.clearJob(jobId); } catch (IOException e) { log.warn("Could not properly remove job {} from the running jobs registry.", jobId, e); } } else { try { jobGraphStore.releaseJobGraph(jobId); } catch (Exception e) { log.warn("Could not properly release job {} from submitted job graph store.", jobId, e); } } blobServer.cleanupJob(jobId, cleanupHABlobs); } /** * Terminate all currently running {@link JobManagerRunner}. */ private void terminateJobManagerRunners() { log.info("Stopping all currently running jobs of dispatcher {}.", getAddress()); final HashSet<JobID> jobsToRemove = new HashSet<>(jobManagerRunnerFutures.keySet()); for (JobID jobId : jobsToRemove) { removeJobAndRegisterTerminationFuture(jobId, false); } } private CompletableFuture<Void> terminateJobManagerRunnersAndGetTerminationFuture() { terminateJobManagerRunners(); final Collection<CompletableFuture<Void>> values = jobManagerTerminationFutures.values(); return FutureUtils.completeAll(values); } /** * Recovers all jobs persisted via the submitted job graph store. */ @VisibleForTesting Collection<JobGraph> recoverJobs() throws Exception { log.info("Recovering all persisted jobs."); final Collection<JobID> jobIds = jobGraphStore.getJobIds(); try { return recoverJobGraphs(jobIds); } catch (Exception e) { // release all recovered job graphs for (JobID jobId : jobIds) { try { jobGraphStore.releaseJobGraph(jobId); } catch (Exception ie) { e.addSuppressed(ie); } } throw e; } } @Nonnull private Collection<JobGraph> recoverJobGraphs(Collection<JobID> jobIds) throws Exception { final List<JobGraph> jobGraphs = new ArrayList<>(jobIds.size()); for (JobID jobId : jobIds) { final JobGraph jobGraph = recoverJob(jobId); if (jobGraph == null) { throw new FlinkJobNotFoundException(jobId); } jobGraphs.add(jobGraph); } return jobGraphs; } @Nullable private JobGraph recoverJob(JobID jobId) throws Exception { log.debug("Recover job {}.", jobId); return jobGraphStore.recoverJobGraph(jobId); } protected void onFatalError(Throwable throwable) { fatalErrorHandler.onFatalError(throwable); } protected void jobReachedGloballyTerminalState(ArchivedExecutionGraph archivedExecutionGraph) { Preconditions.checkArgument( archivedExecutionGraph.getState().isGloballyTerminalState(), "Job %s is in state %s which is not globally terminal.", archivedExecutionGraph.getJobID(), archivedExecutionGraph.getState()); log.info("Job {} reached globally terminal state {}.", archivedExecutionGraph.getJobID(), archivedExecutionGraph.getState()); archiveExecutionGraph(archivedExecutionGraph); final JobID jobId = archivedExecutionGraph.getJobID(); removeJobAndRegisterTerminationFuture(jobId, true); } private void archiveExecutionGraph(ArchivedExecutionGraph archivedExecutionGraph) { try { archivedExecutionGraphStore.put(archivedExecutionGraph); } catch (IOException e) { log.info( "Could not store completed job {}({}).", archivedExecutionGraph.getJobName(), archivedExecutionGraph.getJobID(), e); } final CompletableFuture<Acknowledge> executionGraphFuture = historyServerArchivist.archiveExecutionGraph(archivedExecutionGraph); executionGraphFuture.whenComplete( (Acknowledge ignored, Throwable throwable) -> { if (throwable != null) { log.info( "Could not archive completed job {}({}) to the history server.", archivedExecutionGraph.getJobName(), archivedExecutionGraph.getJobID(), throwable); } }); } protected void jobNotFinished(JobID jobId) { log.info("Job {} was not finished by JobManager.", jobId); removeJobAndRegisterTerminationFuture(jobId, false); } private void jobMasterFailed(JobID jobId, Throwable cause) { // we fail fatally in case of a JobMaster failure in order to restart the // dispatcher to recover the jobs again. This only works in HA mode, though onFatalError(new FlinkException(String.format("JobMaster for job %s failed.", jobId), cause)); } private CompletableFuture<JobMasterGateway> getJobMasterGatewayFuture(JobID jobId) { final CompletableFuture<JobManagerRunner> jobManagerRunnerFuture = jobManagerRunnerFutures.get(jobId); if (jobManagerRunnerFuture == null) { return FutureUtils.completedExceptionally(new FlinkJobNotFoundException(jobId)); } else { final CompletableFuture<JobMasterGateway> leaderGatewayFuture = jobManagerRunnerFuture.thenCompose(JobManagerRunner::getLeaderGatewayFuture); return leaderGatewayFuture.thenApplyAsync( (JobMasterGateway jobMasterGateway) -> { // check whether the retrieved JobMasterGateway belongs still to a running JobMaster if (jobManagerRunnerFutures.containsKey(jobId)) { return jobMasterGateway; } else { throw new CompletionException(new FlinkJobNotFoundException(jobId)); } }, getMainThreadExecutor()); } } private CompletableFuture<ResourceManagerGateway> getResourceManagerGateway() { return resourceManagerGatewayRetriever.getFuture(); } private <T> CompletableFuture<T> runResourceManagerCommand(Function<ResourceManagerGateway, CompletableFuture<T>> resourceManagerCommand) { return getResourceManagerGateway().thenApply(resourceManagerCommand).thenCompose(Function.identity()); } private <T> List<T> flattenOptionalCollection(Collection<Optional<T>> optionalCollection) { return optionalCollection.stream().filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList()); } @Nonnull private <T> List<CompletableFuture<Optional<T>>> queryJobMastersForInformation(Function<JobMasterGateway, CompletableFuture<T>> queryFunction) { final int numberJobsRunning = jobManagerRunnerFutures.size(); ArrayList<CompletableFuture<Optional<T>>> optionalJobInformation = new ArrayList<>( numberJobsRunning); for (JobID jobId : jobManagerRunnerFutures.keySet()) { final CompletableFuture<JobMasterGateway> jobMasterGatewayFuture = getJobMasterGatewayFuture(jobId); final CompletableFuture<Optional<T>> optionalRequest = jobMasterGatewayFuture .thenCompose(queryFunction::apply) .handle((T value, Throwable throwable) -> Optional.ofNullable(value)); optionalJobInformation.add(optionalRequest); } return optionalJobInformation; } //------------------------------------------------------ // Leader contender //------------------------------------------------------ /** * Callback method when current resourceManager is granted leadership. * * @param newLeaderSessionID unique leadershipID */ @Override public void grantLeadership(final UUID newLeaderSessionID) { runAsyncWithoutFencing( () -> { log.info("Dispatcher {} was granted leadership with fencing token {}", getAddress(), newLeaderSessionID); final CompletableFuture<Collection<JobGraph>> recoveredJobsFuture = recoveryOperation.thenApplyAsync( FunctionUtils.uncheckedFunction(ignored -> recoverJobs()), getRpcService().getExecutor()); final CompletableFuture<Boolean> fencingTokenFuture = recoveredJobsFuture.thenComposeAsync( (Collection<JobGraph> recoveredJobs) -> tryAcceptLeadershipAndRunJobs(newLeaderSessionID, recoveredJobs), getUnfencedMainThreadExecutor()); final CompletableFuture<Void> confirmationFuture = fencingTokenFuture.thenCombineAsync( recoveredJobsFuture, BiFunctionWithException.unchecked((Boolean confirmLeadership, Collection<JobGraph> recoveredJobs) -> { if (confirmLeadership) { leaderElectionService.confirmLeaderSessionID(newLeaderSessionID); } else { for (JobGraph recoveredJob : recoveredJobs) { jobGraphStore.releaseJobGraph(recoveredJob.getJobID()); } } return null; }), getRpcService().getExecutor()); confirmationFuture.whenComplete( (Void ignored, Throwable throwable) -> { if (throwable != null) { onFatalError( new DispatcherException( String.format("Failed to take leadership with session id %s.", newLeaderSessionID), (ExceptionUtils.stripCompletionException(throwable)))); } }); recoveryOperation = confirmationFuture; }); } private CompletableFuture<Boolean> tryAcceptLeadershipAndRunJobs(UUID newLeaderSessionID, Collection<JobGraph> recoveredJobs) { final DispatcherId dispatcherId = DispatcherId.fromUuid(newLeaderSessionID); if (leaderElectionService.hasLeadership(newLeaderSessionID)) { log.debug("Dispatcher {} accepted leadership with fencing token {}. Start recovered jobs.", getAddress(), dispatcherId); setNewFencingToken(dispatcherId); Collection<CompletableFuture<?>> runFutures = new ArrayList<>(recoveredJobs.size()); for (JobGraph recoveredJob : recoveredJobs) { final CompletableFuture<?> runFuture = waitForTerminatingJobManager(recoveredJob.getJobID(), recoveredJob, this::runJob); runFutures.add(runFuture); } return FutureUtils.waitForAll(runFutures).thenApply(ignored -> true); } else { log.debug("Dispatcher {} lost leadership before accepting it. Stop recovering jobs for fencing token {}.", getAddress(), dispatcherId); return CompletableFuture.completedFuture(false); } } private CompletableFuture<Void> waitForTerminatingJobManager(JobID jobId, JobGraph jobGraph, FunctionWithException<JobGraph, CompletableFuture<Void>, ?> action) { final CompletableFuture<Void> jobManagerTerminationFuture = getJobTerminationFuture(jobId) .exceptionally((Throwable throwable) -> { throw new CompletionException( new DispatcherException( String.format("Termination of previous JobManager for job %s failed. Cannot submit job under the same job id.", jobId), throwable)); }); return jobManagerTerminationFuture.thenComposeAsync( FunctionUtils.uncheckedFunction((ignored) -> { jobManagerTerminationFutures.remove(jobId); return action.apply(jobGraph); }), getMainThreadExecutor()); } CompletableFuture<Void> getJobTerminationFuture(JobID jobId) { if (jobManagerRunnerFutures.containsKey(jobId)) { return FutureUtils.completedExceptionally(new DispatcherException(String.format("Job with job id %s is still running.", jobId))); } else { return jobManagerTerminationFutures.getOrDefault(jobId, CompletableFuture.completedFuture(null)); } } @VisibleForTesting CompletableFuture<Void> getRecoveryOperation() { return recoveryOperation; } private void setNewFencingToken(@Nullable DispatcherId dispatcherId) { // clear the state if we've been the leader before if (getFencingToken() != null) { clearDispatcherState(); } setFencingToken(dispatcherId); } private void clearDispatcherState() { terminateJobManagerRunners(); } private void registerDispatcherMetrics(MetricGroup jobManagerMetricGroup) { jobManagerMetricGroup.gauge(MetricNames.NUM_RUNNING_JOBS, () -> (long) jobManagerRunnerFutures.size()); } /** * Callback method when current resourceManager loses leadership. */ @Override public void revokeLeadership() { runAsyncWithoutFencing( () -> { log.info("Dispatcher {} was revoked leadership.", getAddress()); setNewFencingToken(null); }); } /** * Handles error occurring in the leader election service. * * @param exception Exception being thrown in the leader election service */ @Override public void handleError(final Exception exception) { onFatalError(new DispatcherException("Received an error from the LeaderElectionService.", exception)); } //------------------------------------------------------ // JobGraphListener //------------------------------------------------------ @Override public void onAddedJobGraph(final JobID jobId) { runAsync( () -> { if (!jobManagerRunnerFutures.containsKey(jobId)) { // IMPORTANT: onAddedJobGraph can generate false positives and, thus, we must expect that // the specified job is already removed from the JobGraphStore. In this case, // JobGraphStore.recoverJob returns null. final CompletableFuture<Optional<JobGraph>> recoveredJob = recoveryOperation.thenApplyAsync( FunctionUtils.uncheckedFunction(ignored -> Optional.ofNullable(recoverJob(jobId))), getRpcService().getExecutor()); final DispatcherId dispatcherId = getFencingToken(); final CompletableFuture<Void> submissionFuture = recoveredJob.thenComposeAsync( (Optional<JobGraph> jobGraphOptional) -> jobGraphOptional.map( FunctionUtils.uncheckedFunction(jobGraph -> tryRunRecoveredJobGraph(jobGraph, dispatcherId).thenAcceptAsync( FunctionUtils.uncheckedConsumer((Boolean isRecoveredJobRunning) -> { if (!isRecoveredJobRunning) { jobGraphStore.releaseJobGraph(jobId); } }), getRpcService().getExecutor()))) .orElse(CompletableFuture.completedFuture(null)), getUnfencedMainThreadExecutor()); submissionFuture.whenComplete( (Void ignored, Throwable throwable) -> { if (throwable != null) { onFatalError( new DispatcherException( String.format("Could not start the added job %s", jobId), ExceptionUtils.stripCompletionException(throwable))); } }); recoveryOperation = submissionFuture; } }); } private CompletableFuture<Boolean> tryRunRecoveredJobGraph(JobGraph jobGraph, DispatcherId dispatcherId) throws Exception { if (leaderElectionService.hasLeadership(dispatcherId.toUUID())) { final JobID jobId = jobGraph.getJobID(); if (jobManagerRunnerFutures.containsKey(jobId)) { // we must not release the job graph lock since it can only be locked once and // is currently being executed. Once we support multiple locks, we must release // the JobGraph here log.debug("Ignore added JobGraph because the job {} is already running.", jobId); return CompletableFuture.completedFuture(true); } else if (runningJobsRegistry.getJobSchedulingStatus(jobId) != RunningJobsRegistry.JobSchedulingStatus.DONE) { return waitForTerminatingJobManager(jobId, jobGraph, this::runJob).thenApply(ignored -> true); } else { log.debug("Ignore added JobGraph because the job {} has already been completed.", jobId); } } return CompletableFuture.completedFuture(false); } @Override public void onRemovedJobGraph(final JobID jobId) { runAsync(() -> { try { removeJobAndRegisterTerminationFuture(jobId, false); } catch (final Exception e) { onFatalError(new DispatcherException(String.format("Could not remove job %s.", jobId), e)); } }); } }
package com.mongodb.client; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.mongodb.DB; import com.mongodb.client.event.ResultHandler; /** * The primary Java interface for working with NoSqlOrm. * Through this interface you can execute commands, get mappers and manage transactions. * * @info : Templet for MongoDB * @author: xiangping_yu * @data : 2014-6-22 * @since : 1.5 */ public interface MongoTemplet { Logger logger = LoggerFactory.getLogger(MongoTemplet.class); /** * Retrieve a single row mapped from the statement key. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to use. * @return Mapped object. */ <T> T selectOne(String statement); /** * Retrieve a single row mapped from the statement key and parameter. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to use. * @param parameter A parameter object to pass to the statement. * @return Mapped object. */ <T> T selectOne(String statement, Object parameter); /** * Retrieve a single row mapped from the statement key. * @param statement Unique identifier matching the statement to use. * @param handler ResultHandler that will handle each retrieved row */ void selectOne(String statement, ResultHandler handler); /** * Retrieve a single row mapped from the statement key and parameter. * @param statement Unique identifier matching the statement to use. * @param parameter A parameter object to pass to the statement. * @param handler ResultHandler that will handle each retrieved row. */ void selectOne(String statement, Object parameter, ResultHandler handler); /** * Retrieve a list of mapped objects from the statement key. * @param <T> the returned list element type. * @param statement Unique identifier matching the statement to use. * @return List of mapped object. */ <T> List<T> selectList(String statement); /** * Retrieve a list of mapped objects from the statement key and parameter. * @param <T> the returned list element type. * @param statement Unique identifier matching the statement to use. * @param parameter A parameter object to pass to the statement. * @return List of mapped object. */ <T> List<T> selectList(String statement, Object parameter); /** * Retrieve a list of mapped objects from the statement key and parameter. * @param <T> the returned list element type. * @param statement Unique identifier matching the statement to use. * @param limit Limit mapped objects. * @param skip Retrieve a list of mapped objects from skip index. * @return List of mapped object. */ <T> List<T> selectList(String statement, Object parameter, Integer limit, Integer skip); /** * Retrieve a list of mapped objects from the statement key. * @param statement Unique identifier matching the statement to use. * @param handler ResultHandler that will handle each retrieved row. */ void selectList(String statement, ResultHandler handler); /** * Retrieve a list of mapped objects from the statement key and parameter. * @param statement Unique identifier matching the statement to use. * @param parameter A parameter object to pass to the statement. * @param handler ResultHandler that will handle each retrieved row. */ void selectList(String statement, Object parameter, ResultHandler handler); /** * Retrieve a list of mapped objects from the statement key and parameter. * @param statement Unique identifier matching the statement to use. * @param limit Limit mapped objects. * @param skip Retrieve a list of mapped objects from skip index. * @param handler ResultHandler that will handle each retrieved row. */ void selectList(String statement, Object parameter, Integer limit, Integer skip, ResultHandler handler); /** * Count a list of mapped objects from the statement key. * @param statement Unique identifier matching the statement to use. * @return Count of mapped object. */ long count(String statement); /** * Count a list of mapped objects from the statement key and parameter. * @param statement Unique identifier matching the statement to use. * @param parameter A parameter object to pass to the statement. * @return Count of mapped object. */ long count(String statement, Object parameter); /** * Retrieve a distinct list of mapped objects from the statement key. * @param <T> the returned list element type. * @param statement Unique identifier matching the statement to use. * @param key The distinct key. * @return List of mapped object. */ <T> List<T> distinct(String statement, String key); /** * Retrieve a distinct list of mapped objects from the statement key and parameter. * @param <T> the returned list element type. * @param statement Unique identifier matching the statement to use. * @param key The distinct key. * @param parameter A parameter object to pass to the statement. * @return List of mapped object. */ <T> List<T> distinct(String statement, String key, Object parameter); /** * Retrieve a distinct list of mapped objects from the statement key. * @param statement Unique identifier matching the statement to use. * @param key The distinct key. * @param handler ResultHandler that will handle each retrieved row. */ void distinct(String statement, String key, ResultHandler handler); /** * Retrieve a distinct list of mapped objects from the statement key and parameter. * @param statement Unique identifier matching the statement to use. * @param key The distinct key. * @param parameter A parameter object to pass to the statement. * @param handler ResultHandler that will handle each retrieved row. */ void distinct(String statement, String key, Object parameter, ResultHandler handler); /** * Execute an insert statement. * @param statement Unique identifier matching the statement to execute. * @return String The rows id affected by the insert. */ String insert(String statement); /** * Execute an insert statement with the given parameter object. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @return String The rows id affected by the insert. */ String insert(String statement, Object parameter); /** * Execute an insert statement with the given list of parameter object. * @param statement Unique identifier matching the statement to execute. * @param list A list parameter object to pass to the statement. * @return List<String> The rows id affected by the insert. */ <T> List<String> insertBatch(String statement, List<T> list); /** * Find a single row mapped from the statement key and update it. If no rows returned from this * statement key, then execute this insert statement. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to use. * @return Mapped object of modified. */ <T> T findAndModify(String statement); /** * Find a single row mapped from the statement key and update it. If no rows returned from this * statement key, then execute this insert statement. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to use. * @param parameter A parameter object to pass to the statement. * @return Mapped object of modified. */ <T> T findAndModify(String statement, Object parameter); /** * Find a single row mapped from the statement key and update it. If no rows returned from this * statement key, then execute this insert statement. * @param statement Unique identifier matching the statement to use. * @param handler ResultHandler that will handle each retrieved row. */ void findAndModify(String statement, ResultHandler handler); /** * Find a single row mapped from the statement key and update it. If no rows returned from this * statement key, then execute this insert statement. * @param statement Unique identifier matching the statement to use. * @param parameter A parameter object to pass to the statement. * @param handler ResultHandler that will handle each retrieved row. */ void findAndModify(String statement, Object parameter, ResultHandler handler); /** * Execute an update statement. The number of rows affected will be returned. * @param statement Unique identifier matching the statement to execute. * @return int The number of rows affected by the update. */ int update(String statement); /** * Execute an update statement. The number of rows affected will be returned. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @return int The number of rows affected by the update. */ int update(String statement, Object parameter); /** * Execute a delete statement. The number of rows affected will be returned. * @param statement Unique identifier matching the statement to execute. * @return int The number of rows affected by the delete. */ int delete(String statement); /** * Execute a delete statement. The number of rows affected will be returned. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @return int The number of rows affected by the delete. */ int delete(String statement, Object parameter); /** * Run a command statement from the statement key. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to execute. * @return Mapped objects affected by the command. */ <T> T command(String statement); /** * Run a command statement from the statement key and parameter. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @return Mapped objects affected by the command. */ <T> T command(String statement, Object parameter); /** * Run a command statement from the statement key. * @param statement Unique identifier matching the statement to execute. * @param handler ResultHandler that will handle each retrieved row. */ void command(String statement, ResultHandler handler); /** * Run a command statement from the statement key and parameter. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @param handler ResultHandler that will handle each retrieved row. */ void command(String statement, Object parameter, ResultHandler handler); /** * Execute a group statement. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to execute. * @return Mapped objects affected by the command. */ <T> T group(String statement); /** * Execute a group statement from the statement key and parameter. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @return Mapped objects affected by the command. */ <T> T group(String statement, Object parameter); /** * Execute a group statement. * @param statement Unique identifier matching the statement to execute. * @param handler ResultHandler that will handle each retrieved row. */ void group(String statement, ResultHandler handler); /** * Execute a group statement from the statement key and parameter. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @param handler ResultHandler that will handle each retrieved row. */ void group(String statement, Object parameter, ResultHandler handler); /** * Execute a aggregate statement. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to execute. * @return Mapped objects affected by the command. */ <T> List<T> aggregate(String statement); /** * Execute a aggregate statement from the statement key and parameter. * @param <T> the returned object type. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @return Mapped objects affected by the command. */ <T> List<T> aggregate(String statement, Object[] parameter); /** * Execute a aggregate statement. * @param statement Unique identifier matching the statement to execute. * @param handler ResultHandler that will handle each retrieved row. */ void aggregate(String statement, ResultHandler handler); /** * Execute a aggregate statement from the statement key and parameter. * @param statement Unique identifier matching the statement to execute. * @param parameter A parameter object to pass to the statement. * @param handler ResultHandler that will handle each retrieved row. */ void aggregate(String statement, Object[] parameter, ResultHandler handler); /** * Retrieves mongo database. * @return Database. */ DB getDB(); /** * Retrieves mongo database. * @param dbName Retrieves database name. * @return Database. */ DB getDB(String dbName); }
/****************************************************************** * * Copyright 2015 Samsung Electronics All Rights Reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************/ package com.example.con_server; import java.util.EnumSet; import java.util.Vector; import org.iotivity.base.EntityHandlerResult; import org.iotivity.base.OcException; import org.iotivity.base.OcHeaderOption; import org.iotivity.base.OcPlatform; import org.iotivity.base.OcRepresentation; import org.iotivity.base.OcResourceRequest; import org.iotivity.base.OcResourceResponse; import org.iotivity.base.RequestHandlerFlag; import org.iotivity.base.RequestType; import org.iotivity.service.tm.IConfigurationListener; import org.iotivity.service.tm.IDiagnosticsListener; import org.iotivity.service.tm.OCStackResult; import org.iotivity.service.tm.ThingsManager; import android.os.Message; import android.util.Log; /* * For Creating the Resources [configurtion, Diagnostic & FactoryRest] & * for Handling of the Client's Request */ public class ConfigurationServer implements IDiagnosticsListener, IConfigurationListener, OcPlatform.EntityHandler { private final String LOG_TAG = "[CON-SERVER]" + this.getClass() .getSimpleName(); private ThingsManager thingsmanager = null; private ConfigurationResource conResource = null; private DiagnosticsResource diagResource = null; private FactorySetResource factorySetResource = null; // constructor public ConfigurationServer() { thingsmanager = new ThingsManager(); thingsmanager.setDiagnosticsListener(this); thingsmanager.setConfigurationListener(this); } public void DoBootStrap() { Log.i(LOG_TAG, "DoBootStrap: enter"); OCStackResult result = thingsmanager.doBootstrap(); if (OCStackResult.OC_STACK_ERROR == result) { Log.e(LOG_TAG, "doBootStrap returned error: " + OCStackResult.OC_STACK_ERROR.name()); } Log.i(LOG_TAG, "DoBootStrap: exit"); } // Creating resources : configuration, diagnostics, factoryReset public void CreateConfigurationResource() { Log.i(LOG_TAG, "CreateConfigurationResource: enter"); try { conResource = new ConfigurationResource(); conResource.createResource(this); diagResource = new DiagnosticsResource(); diagResource.createResource(this); factorySetResource = new FactorySetResource(); factorySetResource.createResource(this); } catch (OcException e) { Log.e(LOG_TAG, "OcException occured: " + e.toString()); } Log.i(LOG_TAG, "CreateConfigurationResource: exit"); String message = "Resources Created Successfully(Server is Ready)"; Message msg = Message.obtain(); msg.what = 0; MainActivity mainActivityObj = MainActivity.getMainActivityObject(); MainActivity.setmessage(message); mainActivityObj.getmHandler().sendMessage(msg); } // For deleting all the resources public void deleteResources() { if (null != conResource) conResource.deleteResource(); if (null != diagResource) diagResource.deleteResource(); if (null != factorySetResource) factorySetResource.deleteResource(); } // Callback Function for doBootStrap @Override public void onBootStrapCallback(Vector<OcHeaderOption> headerOptions, OcRepresentation rep, int errorValue) { String message; Log.i(LOG_TAG, "onBootStrapCallback"); // setting the default values received from bootstrap Server ConfigurationDefaultValues.defaultRegion = rep.getValueString("r"); ConfigurationDefaultValues.defaultSystemTime = rep.getValueString("st"); ConfigurationDefaultValues.defaultCurrency = rep.getValueString("c"); ConfigurationDefaultValues.defaultLocation = rep.getValueString("loc"); // forming the message to display on UI message = "URI : " + rep.getUri() + "\n"; message = message + "Region : " + ConfigurationDefaultValues.defaultRegion + "\n"; message = message + "System Time : " + ConfigurationDefaultValues.defaultSystemTime + "\n"; message = message + "Currency : " + ConfigurationDefaultValues.defaultCurrency + "\n"; message = message + "Location : " + ConfigurationDefaultValues.defaultLocation + "\n"; Log.i(LOG_TAG, "Resource URI: " + rep.getUri()); Log.i(LOG_TAG, "Region: " + ConfigurationDefaultValues.defaultRegion); Log.i(LOG_TAG, "System Time: " + ConfigurationDefaultValues.defaultSystemTime); Log.i(LOG_TAG, "Currency: " + ConfigurationDefaultValues.defaultCurrency); Log.i(LOG_TAG, "Location: " + ConfigurationDefaultValues.defaultLocation); // showing the formed message on the UI Message msg = Message.obtain(); msg.what = 0; MainActivity mainActivityObj = MainActivity.getMainActivityObject(); MainActivity.setmessage(message); mainActivityObj.getmHandler().sendMessage(msg); } // Callback Function for Reboot @Override public void onRebootCallback(Vector<OcHeaderOption> headerOptions, OcRepresentation rep, int errorValue) { Log.i(LOG_TAG, "onRebootCallback"); } // Callback Function for FactoryReset @Override public void onFactoryResetCallback(Vector<OcHeaderOption> headerOptions, OcRepresentation rep, int errorValue) { Log.i(LOG_TAG, "onFactoryResetCallback"); } // For Handling the Client's Request @Override public EntityHandlerResult handleEntity(OcResourceRequest request) { Log.i(LOG_TAG, "handleEntity: enter"); EntityHandlerResult result = EntityHandlerResult.ERROR; if (null == request) { Log.e(LOG_TAG, "handleEntity: Invalid OcResourceRequest!"); return result; } RequestType requestType = request.getRequestType(); EnumSet<RequestHandlerFlag> requestHandlerFlag = request .getRequestHandlerFlagSet(); Log.i(LOG_TAG, "prepareResponseForResource: request type: " + requestType.name()); Log.i(LOG_TAG, "prepareResponseForResource: request for resource: " + request.getResourceUri()); if (requestHandlerFlag.contains(RequestHandlerFlag.REQUEST)) { if (RequestType.GET == requestType) { sendResponse(request); } else if (RequestType.PUT == requestType) { OcRepresentation rep = request.getResourceRepresentation(); if (null == rep) { Log.e(LOG_TAG, "handleEntity: Invalid resource representation!"); return result; } if (request.getResourceUri().equalsIgnoreCase( conResource.getUri())) { conResource.setConfigurationRepresentation(rep); } else if (request.getResourceUri().equalsIgnoreCase( diagResource.getUri())) { String factorySetAtt = rep.getValueString("fr"); if (factorySetAtt.equalsIgnoreCase("true")) { conResource.factoryReset(); } diagResource.setDiagnosticsRepresentation(rep); } sendResponse(request); } } Log.i(LOG_TAG, "handleEntity: exit"); return result; } // For sending response to the client private void sendResponse(OcResourceRequest request) { Log.i(LOG_TAG, "sendResponse: enter"); OcResourceResponse response = new OcResourceResponse(); OcRepresentation rep = null; response.setRequestHandle(request.getRequestHandle()); response.setResourceHandle(request.getResourceHandle()); if (request.getResourceUri().equalsIgnoreCase(conResource.getUri())) { rep = conResource.getConfigurationRepresentation(); } else if (request.getResourceUri().equalsIgnoreCase( diagResource.getUri())) { rep = diagResource.getDiagnosticsRepresentation(); } response.setResourceRepresentation(rep, OcPlatform.DEFAULT_INTERFACE); response.setErrorCode(200); try { OcPlatform.sendResponse(response); } catch (OcException e) { Log.e(LOG_TAG, "sendResponse: OcException occured: " + e.toString()); } Log.i(LOG_TAG, "sendResponse: exit"); } @Override public void onUpdateConfigurationsCallback( Vector<OcHeaderOption> headerOptions, OcRepresentation rep, int errorValue) { // TODO Auto-generated method stub } @Override public void onGetConfigurationsCallback( Vector<OcHeaderOption> headerOptions, OcRepresentation rep, int errorValue) { // TODO Auto-generated method stub } } // Default values for Resources class ConfigurationDefaultValues { // configuration Resource default values public static String defaultLocation = new String(); public static String defaultRegion = new String(); public static String defaultSystemTime = new String(); public static String defaultCurrency = new String(); public static String ConURIPrefix = "/oic/con"; public static String ConResourceTypePrefix = "oic.con"; // Diagnostics Resource default values public static String diagURIPrefix = "/oic/diag"; public static String diagResourceTypePrefix = "oic.diag"; public static String diagnosticsValue = "false"; public static String defaultFactoryReset = "false"; public static String defaultReboot = "false"; public static String defaultStartCollection = "false"; }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.EnumType; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.ObjectType; import javax.annotation.Nullable; /** * Set the JSDocInfo on all types. * * Propagates JSDoc across the type graph, but not across the symbol graph. * This means that if you have: * <code> * var x = new Foo(); * x.bar; * </code> * then the JSType attached to x.bar may get associated JSDoc, but the * Node and Var will not. * * JSDoc is initially attached to AST Nodes at parse time. * There are 3 ways that JSDoc get propagated across the type system. * 1) Nominal types (e.g., constructors) may contain JSDocInfo for their * declaration. * 2) Object types have a JSDocInfo slot for each property on that type. * 3) Shape types (like structural functions) may have JSDocInfo. * * #1 and #2 should be self-explanatory, and non-controversial. #3 is * a bit trickier. It means that if you have: * <code> * /** @param {number} x / * Foo.prototype.bar = goog.abstractMethod; * </code> * the JSDocInfo will appear in two places in the type system: in the 'bar' * slot of Foo.prototype, and on the function expression type created by * this expression. * * @author nicksantos@google.com (Nick Santos) */ class InferJSDocInfo extends AbstractPostOrderCallback implements HotSwapCompilerPass { private final AbstractCompiler compiler; @SuppressWarnings("unused") private boolean inExterns; InferJSDocInfo(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { if (externs != null) { inExterns = true; NodeTraversal.traverse(compiler, externs, this); } if (root != null) { inExterns = false; NodeTraversal.traverse(compiler, root, this); } } @Override public void hotSwapScript(Node root, Node originalRoot) { Preconditions.checkNotNull(root); Preconditions.checkState(root.isScript()); inExterns = false; NodeTraversal.traverse(compiler, root, this); } @Override public void visit(NodeTraversal t, Node n, Node parent) { JSDocInfo docInfo; switch (n.getType()) { // Infer JSDocInfo on types of all type declarations on variables. case Token.NAME: if (parent == null) { return; } // Only allow JSDoc on VARs, function declarations, and assigns. if (!parent.isVar() && !NodeUtil.isFunctionDeclaration(parent) && !(parent.isAssign() && n == parent.getFirstChild())) { return; } // There are four places the doc info could live. // 1) A FUNCTION node. // /** ... */ function f() { ... } // 2) An ASSIGN parent. // /** ... */ x = function () { ... } // 3) A NAME parent. // var x, /** ... */ y = function() { ... } // 4) A VAR gramps. // /** ... */ var x = function() { ... } docInfo = n.getJSDocInfo(); if (docInfo == null && !(parent.isVar() && !parent.hasOneChild())) { docInfo = parent.getJSDocInfo(); } // Try to find the type of the NAME. JSType varType = n.getJSType(); if (varType == null && parent.isFunction()) { varType = parent.getJSType(); } // If we have no type to attach JSDocInfo to, then there's nothing // we can do. if (varType == null || docInfo == null) { return; } // Dereference the type. If the result is not an object, or already // has docs attached, then do nothing. ObjectType objType = dereferenceToObject(varType); if (objType == null || objType.getJSDocInfo() != null) { return; } attachJSDocInfoToNominalTypeOrShape(objType, docInfo, n.getString()); break; case Token.GETPROP: // Infer JSDocInfo on properties. // There are two ways to write doc comments on a property. // // 1) // /** @deprecated */ // obj.prop = ... // // 2) // /** @deprecated */ // obj.prop; if (parent.isExprResult() || (parent.isAssign() && parent.getFirstChild() == n)) { docInfo = n.getJSDocInfo(); if (docInfo == null) { docInfo = parent.getJSDocInfo(); } if (docInfo != null) { ObjectType lhsType = dereferenceToObject(n.getFirstChild().getJSType()); if (lhsType != null) { // Put the JSDoc in the property slot, if there is one. String propName = n.getLastChild().getString(); if (lhsType.hasOwnProperty(propName)) { lhsType.setPropertyJSDocInfo(propName, docInfo); } // Put the JSDoc in any constructors or function shapes as well. ObjectType propType = dereferenceToObject(lhsType.getPropertyType(propName)); if (propType != null) { attachJSDocInfoToNominalTypeOrShape( propType, docInfo, n.getQualifiedName()); } } } } break; } } /** * Dereferences the given type to an object, or returns null. */ private ObjectType dereferenceToObject(JSType type) { return ObjectType.cast(type == null ? null : type.dereference()); } /** * Handle cases #1 and #3 in the class doc. */ private void attachJSDocInfoToNominalTypeOrShape( ObjectType objType, JSDocInfo docInfo, @Nullable String qName) { if (objType.isConstructor() || objType.isEnumType() || objType.isInterface()) { // Named types. if (objType.hasReferenceName() && objType.getReferenceName().equals(qName)) { objType.setJSDocInfo(docInfo); if (objType.isConstructor() || objType.isInterface()) { JSType.toMaybeFunctionType(objType).getInstanceType().setJSDocInfo( docInfo); } else if (objType instanceof EnumType) { ((EnumType) objType).getElementsType().setJSDocInfo(docInfo); } } } else if (!objType.isNativeObjectType() && objType.isFunctionType()) { // Structural functions. objType.setJSDocInfo(docInfo); } } }
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.picasso; import android.graphics.Bitmap; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.widget.ImageView; import android.widget.RemoteViews; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import static android.graphics.Bitmap.Config.ARGB_8888; import static com.squareup.picasso.Picasso.LoadedFrom.MEMORY; import static com.squareup.picasso.Picasso.Priority.HIGH; import static com.squareup.picasso.Picasso.Priority.LOW; import static com.squareup.picasso.Picasso.Priority.NORMAL; import static com.squareup.picasso.Picasso.RequestTransformer.IDENTITY; import static com.squareup.picasso.RemoteViewsAction.AppWidgetAction; import static com.squareup.picasso.RemoteViewsAction.NotificationAction; import static com.squareup.picasso.TestUtils.STABLE_1; import static com.squareup.picasso.TestUtils.STABLE_URI_KEY_1; import static com.squareup.picasso.TestUtils.TRANSFORM_REQUEST_ANSWER; import static com.squareup.picasso.TestUtils.URI_1; import static com.squareup.picasso.TestUtils.URI_KEY_1; import static com.squareup.picasso.TestUtils.makeBitmap; import static com.squareup.picasso.TestUtils.mockCallback; import static com.squareup.picasso.TestUtils.mockFitImageViewTarget; import static com.squareup.picasso.TestUtils.mockImageViewTarget; import static com.squareup.picasso.TestUtils.mockNotification; import static com.squareup.picasso.TestUtils.mockRemoteViews; import static com.squareup.picasso.TestUtils.mockTarget; import static org.fest.assertions.api.Assertions.assertThat; import static org.fest.assertions.api.Assertions.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @RunWith(RobolectricTestRunner.class) @Config(manifest = Config.NONE) public class RequestCreatorTest { @Mock Picasso picasso; @Captor ArgumentCaptor<Action> actionCaptor; final Bitmap bitmap = makeBitmap(); @Before public void shutUp() { initMocks(this); when(picasso.transformRequest(any(Request.class))).thenAnswer(TRANSFORM_REQUEST_ANSWER); } @Test public void getOnMainCrashes() throws IOException { try { new RequestCreator(picasso, URI_1, 0).get(); fail("Calling get() on main thread should throw exception"); } catch (IllegalStateException ignored) { } } @Test public void loadWithShutdownCrashes() { picasso.shutdown = true; try { new RequestCreator(picasso, URI_1, 0).fetch(); fail("Should have crashed with a shutdown picasso."); } catch (IllegalStateException ignored) { } } @Test public void getReturnsNullIfNullUriAndResourceId() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final Bitmap[] result = new Bitmap[1]; new Thread(new Runnable() { @Override public void run() { try { result[0] = new RequestCreator(picasso, null, 0).get(); } catch (IOException e) { fail(e.getMessage()); } finally { latch.countDown(); } } }).start(); latch.await(); assertThat(result[0]).isNull(); verifyZeroInteractions(picasso); } @Test public void fetchSubmitsFetchRequest() { new RequestCreator(picasso, URI_1, 0).fetch(); verify(picasso).submit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(FetchAction.class); } @Test public void fetchWithFitThrows() { try { new RequestCreator(picasso, URI_1, 0).fit().fetch(); fail("Calling fetch() with fit() should throw an exception"); } catch (IllegalStateException ignored) { } } @Test public void fetchWithDefaultPriority() { new RequestCreator(picasso, URI_1, 0).fetch(); verify(picasso).submit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(LOW); } @Test public void fetchWithCustomPriority() { new RequestCreator(picasso, URI_1, 0).priority(HIGH).fetch(); verify(picasso).submit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(HIGH); } @Test public void fetchWithCache() { when(picasso.quickMemoryCacheCheck(URI_KEY_1)).thenReturn(bitmap); new RequestCreator(picasso, URI_1, 0).memoryPolicy(MemoryPolicy.NO_CACHE).fetch(); verify(picasso, never()).enqueueAndSubmit(any(Action.class)); } @Test public void fetchWithMemoryPolicyNoCache() { new RequestCreator(picasso, URI_1, 0).memoryPolicy(MemoryPolicy.NO_CACHE).fetch(); verify(picasso, never()).quickMemoryCacheCheck(URI_KEY_1); verify(picasso).submit(actionCaptor.capture()); } @Test public void intoTargetWithNullThrows() { try { new RequestCreator(picasso, URI_1, 0).into((Target) null); fail("Calling into() with null Target should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoTargetWithFitThrows() { try { Target target = mockTarget(); new RequestCreator(picasso, URI_1, 0).fit().into(target); fail("Calling into() target with fit() should throw exception"); } catch (IllegalStateException ignored) { } } public void intoTargetNoPlaceholderCallsWithNull() { Target target = mockTarget(); new RequestCreator(picasso, URI_1, 0).noPlaceholder().into(target); verify(target).onPrepareLoad(null); } @Test public void intoTargetWithNullUriAndResourceIdSkipsAndCancels() { Target target = mockTarget(); Drawable placeHolderDrawable = mock(Drawable.class); new RequestCreator(picasso, null, 0).placeholder(placeHolderDrawable).into(target); verify(picasso).cancelRequest(target); verify(target).onPrepareLoad(placeHolderDrawable); verifyNoMoreInteractions(picasso); } @Test public void intoTargetWithQuickMemoryCacheCheckDoesNotSubmit() { when(picasso.quickMemoryCacheCheck(URI_KEY_1)).thenReturn(bitmap); Target target = mockTarget(); new RequestCreator(picasso, URI_1, 0).into(target); verify(target).onBitmapLoaded(bitmap, MEMORY); verify(picasso).cancelRequest(target); verify(picasso, never()).enqueueAndSubmit(any(Action.class)); } @Test public void intoTargetAndSkipMemoryCacheDoesNotCheckMemoryCache() { Target target = mockTarget(); new RequestCreator(picasso, URI_1, 0).skipMemoryCache().into(target); verify(picasso, never()).quickMemoryCacheCheck(URI_KEY_1); } @Test public void intoTargetWithSkipMemoryPolicy() { Target target = mockTarget(); new RequestCreator(picasso, URI_1, 0).memoryPolicy(MemoryPolicy.NO_CACHE).into(target); verify(picasso, never()).quickMemoryCacheCheck(URI_KEY_1); } @Test public void intoTargetAndNotInCacheSubmitsTargetRequest() { Target target = mockTarget(); Drawable placeHolderDrawable = mock(Drawable.class); new RequestCreator(picasso, URI_1, 0).placeholder(placeHolderDrawable).into(target); verify(target).onPrepareLoad(placeHolderDrawable); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(TargetAction.class); } @Test public void targetActionWithDefaultPriority() { new RequestCreator(picasso, URI_1, 0).into(mockTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(NORMAL); } @Test public void targetActionWithCustomPriority() { new RequestCreator(picasso, URI_1, 0).priority(HIGH).into(mockTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(HIGH); } @Test public void targetActionWithDefaultTag() { new RequestCreator(picasso, URI_1, 0).into(mockTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo(actionCaptor.getValue()); } @Test public void targetActionWithCustomTag() { new RequestCreator(picasso, URI_1, 0).tag("tag").into(mockTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo("tag"); } @Test public void intoImageViewWithNullThrows() { try { new RequestCreator(picasso, URI_1, 0).into((ImageView) null); fail("Calling into() with null ImageView should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoImageViewWithNullUriAndResourceIdSkipsAndCancels() { ImageView target = mockImageViewTarget(); new RequestCreator(picasso, null, 0).into(target); verify(picasso).cancelRequest(target); verify(picasso, never()).quickMemoryCacheCheck(anyString()); verify(picasso, never()).enqueueAndSubmit(any(Action.class)); } @Test public void intoImageViewWithQuickMemoryCacheCheckDoesNotSubmit() { Picasso picasso = spy(new Picasso(Robolectric.application, mock(Dispatcher.class), Cache.NONE, null, IDENTITY, null, mock(Stats.class), ARGB_8888, false, false)); doReturn(bitmap).when(picasso).quickMemoryCacheCheck(URI_KEY_1); ImageView target = mockImageViewTarget(); Callback callback = mockCallback(); new RequestCreator(picasso, URI_1, 0).into(target, callback); verify(target).setImageDrawable(any(PicassoDrawable.class)); verify(callback).onSuccess(); verify(picasso).cancelRequest(target); verify(picasso, never()).enqueueAndSubmit(any(Action.class)); } @Test public void intoImageViewSetsPlaceholderDrawable() { Picasso picasso = spy(new Picasso(Robolectric.application, mock(Dispatcher.class), Cache.NONE, null, IDENTITY, null, mock(Stats.class), ARGB_8888, false, false)); ImageView target = mockImageViewTarget(); Drawable placeHolderDrawable = mock(Drawable.class); new RequestCreator(picasso, URI_1, 0).placeholder(placeHolderDrawable).into(target); verify(target).setImageDrawable(placeHolderDrawable); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(ImageViewAction.class); } @Test public void intoImageViewNoPlaceholderDrawable() { Picasso picasso = spy(new Picasso(Robolectric.application, mock(Dispatcher.class), Cache.NONE, null, IDENTITY, null, mock(Stats.class), ARGB_8888, false, false)); ImageView target = mockImageViewTarget(); new RequestCreator(picasso, URI_1, 0).noPlaceholder().into(target); verifyNoMoreInteractions(target); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(ImageViewAction.class); } @Test public void intoImageViewSetsPlaceholderWithResourceId() { Picasso picasso = spy(new Picasso(Robolectric.application, mock(Dispatcher.class), Cache.NONE, null, IDENTITY, null, mock(Stats.class), ARGB_8888, false, false)); ImageView target = mockImageViewTarget(); new RequestCreator(picasso, URI_1, 0).placeholder(android.R.drawable.picture_frame).into(target); ArgumentCaptor<Drawable> drawableCaptor = ArgumentCaptor.forClass(Drawable.class); verify(target).setImageDrawable(drawableCaptor.capture()); assertThat(Robolectric.shadowOf(drawableCaptor.getValue()).getCreatedFromResId()) // .isEqualTo(android.R.drawable.picture_frame); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(ImageViewAction.class); } @Test public void cancelNotOnMainThreadCrashes() throws InterruptedException { doCallRealMethod().when(picasso).cancelRequest(any(Target.class)); final CountDownLatch latch = new CountDownLatch(1); new Thread(new Runnable() { @Override public void run() { try { new RequestCreator(picasso, null, 0).into(mockTarget()); fail("Should have thrown IllegalStateException"); } catch (IllegalStateException ignored) { } finally { latch.countDown(); } } }).start(); latch.await(); } @Test public void intoNotOnMainThreadCrashes() throws InterruptedException { doCallRealMethod().when(picasso).enqueueAndSubmit(any(Action.class)); final CountDownLatch latch = new CountDownLatch(1); new Thread(new Runnable() { @Override public void run() { try { new RequestCreator(picasso, URI_1, 0).into(mockImageViewTarget()); fail("Should have thrown IllegalStateException"); } catch (IllegalStateException ignored) { } finally { latch.countDown(); } } }).start(); latch.await(); } @Test public void intoImageViewAndNotInCacheSubmitsImageViewRequest() { ImageView target = mockImageViewTarget(); new RequestCreator(picasso, URI_1, 0).into(target); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(ImageViewAction.class); } @Test public void intoImageViewWithFitAndNoDimensionsQueuesDeferredImageViewRequest() { ImageView target = mockFitImageViewTarget(true); when(target.getWidth()).thenReturn(0); when(target.getHeight()).thenReturn(0); new RequestCreator(picasso, URI_1, 0).fit().into(target); verify(picasso, never()).enqueueAndSubmit(any(Action.class)); verify(picasso).defer(eq(target), any(DeferredRequestCreator.class)); } @Test public void intoImageViewWithFitAndDimensionsQueuesImageViewRequest() { ImageView target = mockFitImageViewTarget(true); when(target.getWidth()).thenReturn(100); when(target.getHeight()).thenReturn(100); new RequestCreator(picasso, URI_1, 0).fit().into(target); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(ImageViewAction.class); } @Test public void intoImageViewAndSkipMemoryCacheDoesNotCheckMemoryCache() { ImageView target = mockImageViewTarget(); new RequestCreator(picasso, URI_1, 0).skipMemoryCache().into(target); verify(picasso, never()).quickMemoryCacheCheck(URI_KEY_1); } @Test public void intoImageViewWithSkipMemoryCachePolicy() { ImageView target = mockImageViewTarget(); new RequestCreator(picasso, URI_1, 0).memoryPolicy(MemoryPolicy.NO_CACHE).into(target); verify(picasso, never()).quickMemoryCacheCheck(URI_KEY_1); } @Test public void intoImageViewWithFitAndResizeThrows() { try { ImageView target = mockImageViewTarget(); new RequestCreator(picasso, URI_1, 0).fit().resize(10, 10).into(target); fail("Calling into() ImageView with fit() and resize() should throw exception"); } catch (IllegalStateException ignored) { } } @Test public void imageViewActionWithDefaultPriority() { new RequestCreator(picasso, URI_1, 0).into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(NORMAL); } @Test public void imageViewActionWithCustomPriority() { new RequestCreator(picasso, URI_1, 0).priority(HIGH).into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(HIGH); } @Test public void imageViewActionWithDefaultTag() { new RequestCreator(picasso, URI_1, 0).into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo(actionCaptor.getValue()); } @Test public void imageViewActionWithCustomTag() { new RequestCreator(picasso, URI_1, 0).tag("tag").into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo("tag"); } @Test public void intoRemoteViewsWidgetQueuesAppWidgetAction() { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, new int[] { 1, 2, 3 }); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(AppWidgetAction.class); } @Test public void intoRemoteViewsNotificationQueuesNotificationAction() { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, 0, mockNotification()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue()).isInstanceOf(NotificationAction.class); } @Test public void intoRemoteViewsNotificationWithNullRemoteViewsThrows() { try { new RequestCreator(picasso, URI_1, 0).into(null, 0, 0, mockNotification()); fail("Calling into() with null RemoteViews should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsWidgetWithPlaceholderDrawableThrows() { try { new RequestCreator(picasso, URI_1, 0).placeholder(new ColorDrawable(0)) .into(mockRemoteViews(), 0, new int[] { 1, 2, 3 }); fail("Calling into() with placeholder drawable should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsWidgetWithErrorDrawableThrows() { try { new RequestCreator(picasso, URI_1, 0).error(new ColorDrawable(0)) .into(mockRemoteViews(), 0, new int[] { 1, 2, 3 }); fail("Calling into() with error drawable should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsNotificationWithPlaceholderDrawableThrows() { try { new RequestCreator(picasso, URI_1, 0).placeholder(new ColorDrawable(0)) .into(mockRemoteViews(), 0, 0, mockNotification()); fail("Calling into() with error drawable should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsNotificationWithErrorDrawableThrows() { try { new RequestCreator(picasso, URI_1, 0).error(new ColorDrawable(0)) .into(mockRemoteViews(), 0, 0, mockNotification()); fail("Calling into() with error drawable should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsWidgetWithNullRemoteViewsThrows() { try { new RequestCreator(picasso, URI_1, 0).into(null, 0, new int[] { 1, 2, 3 }); fail("Calling into() with null RemoteViews should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsWidgetWithNullAppWidgetIdsThrows() { try { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, null); fail("Calling into() with null appWidgetIds should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsNotificationWithNullNotificationThrows() { try { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, 0, null); fail("Calling into() with null Notification should throw exception"); } catch (IllegalArgumentException ignored) { } } @Test public void intoRemoteViewsWidgetWithFitThrows() { try { RemoteViews remoteViews = mockRemoteViews(); new RequestCreator(picasso, URI_1, 0).fit().into(remoteViews, 1, new int[] { 1, 2, 3 }); fail("Calling fit() into remote views should throw exception"); } catch (IllegalStateException ignored) { } } @Test public void intoRemoteViewsNotificationWithFitThrows() { try { RemoteViews remoteViews = mockRemoteViews(); new RequestCreator(picasso, URI_1, 0).fit().into(remoteViews, 1, 1, mockNotification()); fail("Calling fit() into remote views should throw exception"); } catch (IllegalStateException ignored) { } } @Test public void intoTargetNoResizeWithCenterInsideOrCenterCropThrows() { try { new RequestCreator(picasso, URI_1, 0).centerInside().into(mockTarget()); fail("Center inside with unknown width should throw exception."); } catch (IllegalStateException ignored) { } try { new RequestCreator(picasso, URI_1, 0).centerCrop().into(mockTarget()); fail("Center inside with unknown height should throw exception."); } catch (IllegalStateException ignored) { } } @Test public void appWidgetActionWithDefaultPriority() throws Exception { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, new int[] { 1, 2, 3 }); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(NORMAL); } @Test public void appWidgetActionWithCustomPriority() { new RequestCreator(picasso, URI_1, 0).priority(HIGH) .into(mockRemoteViews(), 0, new int[]{1, 2, 3}); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(HIGH); } @Test public void notificationActionWithDefaultPriority() { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, 0, mockNotification()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(NORMAL); } @Test public void notificationActionWithCustomPriority() { new RequestCreator(picasso, URI_1, 0).priority(HIGH) .into(mockRemoteViews(), 0, 0, mockNotification()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getPriority()).isEqualTo(HIGH); } @Test public void appWidgetActionWithDefaultTag() { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, new int[] { 1, 2, 3 }); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo(actionCaptor.getValue()); } @Test public void appWidgetActionWithCustomTag() { new RequestCreator(picasso, URI_1, 0).tag("tag") .into(mockRemoteViews(), 0, new int[] { 1, 2, 3 }); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo("tag"); } @Test public void notificationActionWithDefaultTag() { new RequestCreator(picasso, URI_1, 0).into(mockRemoteViews(), 0, 0, mockNotification()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo(actionCaptor.getValue()); } @Test public void notificationActionWithCustomTag() { new RequestCreator(picasso, URI_1, 0).tag("tag") .into(mockRemoteViews(), 0, 0, mockNotification()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getTag()).isEqualTo("tag"); } @Test public void nullMemoryPolicy() { try { new RequestCreator().memoryPolicy(null); fail("Null memory policy should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void nullAdditionalMemoryPolicy() { try { new RequestCreator().memoryPolicy(MemoryPolicy.NO_CACHE, null); fail("Null additional memory policy should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void nullMemoryPolicyAssholeStyle() { try { new RequestCreator().memoryPolicy(MemoryPolicy.NO_CACHE, new MemoryPolicy[] { null }); fail("Null additional memory policy should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void nullNetworkPolicy() { try { new RequestCreator().networkPolicy(null); fail("Null network policy should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void nullAdditionalNetworkPolicy() { try { new RequestCreator().networkPolicy(NetworkPolicy.NO_CACHE, null); fail("Null additional network policy should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void nullNetworkPolicyAssholeStyle() { try { new RequestCreator().networkPolicy(NetworkPolicy.NO_CACHE, new NetworkPolicy[] { null }); fail("Null additional network policy should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void invalidResize() { try { new RequestCreator().resize(-1, 10); fail("Negative width should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().resize(10, -1); fail("Negative height should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().resize(0, 0); fail("Zero dimensions should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void invalidCenterCrop() { try { new RequestCreator().resize(10, 10).centerInside().centerCrop(); fail("Calling center crop after center inside should throw exception."); } catch (IllegalStateException ignored) { } } @Test public void invalidCenterInside() { try { new RequestCreator().resize(10, 10).centerInside().centerCrop(); fail("Calling center inside after center crop should throw exception."); } catch (IllegalStateException ignored) { } } @Test public void invalidPlaceholderImage() { try { new RequestCreator().placeholder(0); fail("Resource ID of zero should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().placeholder(1).placeholder(new ColorDrawable(0)); fail("Two placeholders should throw exception."); } catch (IllegalStateException ignored) { } try { new RequestCreator().placeholder(new ColorDrawable(0)).placeholder(1); fail("Two placeholders should throw exception."); } catch (IllegalStateException ignored) { } } @Test public void invalidNoPlaceholder() { try { new RequestCreator().noPlaceholder().placeholder(new ColorDrawable(0)); fail("Placeholder after no placeholder should throw exception."); } catch (IllegalStateException ignored) { } try { new RequestCreator().noPlaceholder().placeholder(1); fail("Placeholder after no placeholder should throw exception."); } catch (IllegalStateException ignored) { } try { new RequestCreator().placeholder(1).noPlaceholder(); fail("No placeholder after placeholder should throw exception."); } catch (IllegalStateException ignored) { } try { new RequestCreator().placeholder(new ColorDrawable(0)).noPlaceholder(); fail("No placeholder after placeholder should throw exception."); } catch (IllegalStateException ignored) { } } @Test public void invalidErrorImage() { try { new RequestCreator().error(0); fail("Resource ID of zero should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().error(null); fail("Null drawable should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().error(1).error(new ColorDrawable(0)); fail("Two placeholders should throw exception."); } catch (IllegalStateException ignored) { } try { new RequestCreator().error(new ColorDrawable(0)).error(1); fail("Two placeholders should throw exception."); } catch (IllegalStateException ignored) { } } @Test public void invalidPriority() { try { new RequestCreator().priority(null); fail("Null priority should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().priority(LOW).priority(HIGH); fail("Two priorities should throw exception."); } catch (IllegalStateException ignored) { } } @Test public void invalidTag() { try { new RequestCreator().tag(null); fail("Null tag should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().tag("tag1").tag("tag2"); fail("Two tags should throw exception."); } catch (IllegalStateException ignored) { } } @Test(expected = IllegalArgumentException.class) public void nullTransformationsInvalid() { new RequestCreator().transform((Transformation) null); } @Test(expected = IllegalArgumentException.class) public void nullTransformationListInvalid() { new RequestCreator().transform((List<Transformation>) null); } @Test(expected = IllegalArgumentException.class) public void nullKeyTransformationInvalid() { new RequestCreator().transform(new Transformation() { @Override public Bitmap transform(Bitmap source) { return source; } @Override public String key() { return null; } }); } @Test(expected = IllegalArgumentException.class) public void nullKeyInTransformationListInvalid() { List<? extends Transformation> transformations = Collections.singletonList(new Transformation() { @Override public Bitmap transform(Bitmap source) { return source; } @Override public String key() { return null; } }); new RequestCreator().transform(transformations); } @Test public void transformationListImplementationValid() { List<TestTransformation> transformations = Collections.singletonList(new TestTransformation("test")); new RequestCreator().transform(transformations); // TODO verify something! } @Test public void nullTargetsInvalid() { try { new RequestCreator().into((ImageView) null); fail("Null ImageView should throw exception."); } catch (IllegalArgumentException ignored) { } try { new RequestCreator().into((Target) null); fail("Null Target should throw exception."); } catch (IllegalArgumentException ignored) { } } @Test public void imageViewActionWithStableKey() throws Exception { new RequestCreator(picasso, URI_1, 0).stableKey(STABLE_1).into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getKey()).isEqualTo(STABLE_URI_KEY_1); } @Test public void imageViewActionWithStableKeyNull() throws Exception { new RequestCreator(picasso, URI_1, 0).stableKey(null).into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getKey()).isEqualTo(URI_KEY_1); } @Test public void notPurgeable() { new RequestCreator(picasso, URI_1, 0).into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getRequest().purgeable).isFalse(); } @Test public void purgeable() { new RequestCreator(picasso, URI_1, 0).purgeable().into(mockImageViewTarget()); verify(picasso).enqueueAndSubmit(actionCaptor.capture()); assertThat(actionCaptor.getValue().getRequest().purgeable).isTrue(); } }
/** * Copyright 2007-2008 University Of Southern California * * <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * <p>http://www.apache.org/licenses/LICENSE-2.0 * * <p>Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.XMLWriter; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.classes.Notifications; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * The Transformation Catalog object the represent the entries in the DAX transformation section. * * @author gmehta * @version $Revision$ */ public class Executable extends CatalogType { /** ARCH Types */ public static enum ARCH { X86, x86, X86_64, x86_64, PPC, ppc, PPC_64, ppc_64, IA64, ia64, SPARCV7, sparcv7, SPARCV9, sparcv9, ppc64le } /** OS Types */ public static enum OS { LINUX, linux, SUNOS, sunos, AIX, aix, MACOSX, macosx, WINDOWS, windows } /** Namespace of the executable */ protected String mNamespace; /** Name of the executable */ protected String mName; /** Version of the executable */ protected String mVersion; /** Architecture the executable is compiled for */ protected ARCH mArch; /** Os the executable is compiled for */ protected OS mOs; /** Os release the executable is compiled for */ protected String mOsRelease; /** OS version the executable is compiled for */ protected String mOsVersion; /** Glibc the executable is compiled for */ protected String mGlibc; /** Flag to mark if the executable is installed or can be staged. */ protected boolean mInstalled = true; /** List of Notification objects */ protected List<Invoke> mInvokes; /** Other executables this executable requires */ protected Set<Executable> mRequires; /** * Create a new executable * * @param name name */ public Executable(String name) { this("", name, ""); } /** * Copy Constructor * * @param e executable to copy from */ public Executable(Executable e) { super(e); this.mNamespace = e.mNamespace; this.mName = e.mName; this.mVersion = e.mVersion; this.mArch = e.mArch; this.mOs = e.mOs; this.mOsRelease = e.mOsRelease; this.mOsVersion = e.mOsVersion; this.mGlibc = e.mGlibc; this.mInstalled = e.mInstalled; this.mInvokes = new LinkedList<Invoke>(e.mInvokes); } /** * Create a new Executable * * @param namespace the namespace * @param name the name * @param version the version */ public Executable(String namespace, String name, String version) { super(); mNamespace = (namespace == null) ? "" : namespace; mName = (name == null) ? "" : name; mVersion = (version == null) ? "" : version; mInvokes = new LinkedList<Invoke>(); mRequires = new HashSet<Executable>(); } /** * Get the name of the executable * * @return String */ public String getName() { return mName; } /** * Get the namespace of the executable * * @return namespace */ public String getNamespace() { return mNamespace; } /** * Get the version of the executable * * @return version */ public String getVersion() { return mVersion; } /** * Return the list of Notification objects * * @return List of Invoke objects */ public List<Invoke> getInvoke() { return mInvokes; } /** * Return the list of Notification objects (same as getInvoke) * * @return List of Invoke objects */ public List<Invoke> getNotification() { return getInvoke(); } /** * Add a Notification for this Executable same as addNotification * * @param when when to invoke * @param what what executable to invoke including the arguments * @return Executable */ public Executable addInvoke(Invoke.WHEN when, String what) { Invoke i = new Invoke(when, what); mInvokes.add(i); return this; } /** * Add a Notification for this Executable same as addInvoke * * @param when when to invoke * @param what what executable to invoke including the arguments * @return Executable */ public Executable addNotification(Invoke.WHEN when, String what) { return addInvoke(when, what); } /** * Add a Notification for this Executable Same as add Notification * * @param invoke the invoke object containing the notification * @return Executable */ public Executable addInvoke(Invoke invoke) { mInvokes.add(invoke.clone()); return this; } /** * Add a Notification for this Executable Same as addInvoke * * @param invoke the invoke object containing the notification * @return Executable */ public Executable addNotification(Invoke invoke) { return addInvoke(invoke); } /** * Add a List of Notifications for this Executable Same as addNotifications * * @param invokes list of notification objects * @return Executable */ public Executable addInvokes(List<Invoke> invokes) { for (Invoke invoke : invokes) { this.addInvoke(invoke); } return this; } /** * Add a List of Notifications for this Executable. Same as addInvokes * * @param invokes list of notification objects * @return Executable */ public Executable addNotifications(List<Invoke> invokes) { return addInvokes(invokes); } /** * Set the architecture the executable is compiled for * * @param arch the architecture * @return the Executable object that was modified */ public Executable setArchitecture(ARCH arch) { mArch = arch; return this; } /** * Set the OS the executable is compiled for * * @param os the OS * @return the Executable object that was modified */ public Executable setOS(OS os) { mOs = os; return this; } /** * Set the osrelease the executable is compiled for * * @param osrelease the os release * @return the Executable object that was modified */ public Executable setOSRelease(String osrelease) { mOsRelease = osrelease; return this; } /** * Set the osversion the executable is compiled for * * @param osversion os version * @return the Executable object that was modified */ public Executable setOSVersion(String osversion) { mOsVersion = osversion; return this; } /** * Set the glibc this executable is compiled for * * @param glibc glibc version * @return the Executable object that was modified */ public Executable setGlibc(String glibc) { mGlibc = glibc; return this; } /** * set the installed flag on the executable. Default is installed * * @return the Executable object that was modified */ public Executable setInstalled() { mInstalled = true; return this; } /** * Unset the installed flag on the executable. Default is installed. * * @return the Executable object that was modified */ public Executable unsetInstalled() { mInstalled = false; return this; } /** * Set the installed flag on the executable.Default is installed * * @param installed the installed flag * @return the Executable object that was modified */ public Executable setInstalled(boolean installed) { mInstalled = installed; return this; } /** * Check if the executable is of type installed. * * @return Boolean */ public boolean getInstalled() { return mInstalled; } /** * Get the architecture the Executable is compiled for * * @return Architecture */ public ARCH getArchitecture() { return mArch; } /** * Get the OS the Executable is compiled for * * @return the OS */ public OS getOS() { return mOs; } /** * Get the OS release set for this executable. Returns empty string if not set * * @return String */ public String getOsRelease() { return (mOsRelease == null) ? "" : mOsRelease; } /** * Get the OS version set for this executable. * * @return String */ public String getOsVersion() { return (mOsVersion == null) ? "" : mOsVersion; } /** * Get the Glibc version if any set for this file. Returns empty string if not set * * @return String */ public String getGlibc() { return (mGlibc == null) ? "" : mGlibc; } /** * Return boolean indicating whether executable or not * * @return boolean */ public boolean isExecutable() { return true; } /** * Get the set of executables that this executable requires * * @return Set of Executable this main executable requires */ public Set<Executable> getRequirements() { return this.mRequires; } /** * Add another executable as a requirement to this executable * * @param e dependent executable * @return instance to the Executable that was modified */ public Executable addRequirement(Executable e) { this.mRequires.add(e); return this; } /** * Compares whether an object is equal to this instance of Executable or not * * @param obj object to compare against * @return boolean */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Executable other = (Executable) obj; if ((this.mNamespace == null) ? (other.mNamespace != null) : !this.mNamespace.equals(other.mNamespace)) { return false; } if ((this.mName == null) ? (other.mName != null) : !this.mName.equals(other.mName)) { return false; } if ((this.mVersion == null) ? (other.mVersion != null) : !this.mVersion.equals(other.mVersion)) { return false; } if (this.mArch != other.mArch) { return false; } if (this.mOs != other.mOs) { return false; } if ((this.mOsRelease == null) ? (other.mOsRelease != null) : !this.mOsRelease.equals(other.mOsRelease)) { return false; } if ((this.mOsVersion == null) ? (other.mOsVersion != null) : !this.mOsVersion.equals(other.mOsVersion)) { return false; } if ((this.mGlibc == null) ? (other.mGlibc != null) : !this.mGlibc.equals(other.mGlibc)) { return false; } if (this.mInstalled != other.mInstalled) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 53 * hash + (this.mNamespace != null ? this.mNamespace.hashCode() : 0); hash = 53 * hash + (this.mName != null ? this.mName.hashCode() : 0); hash = 53 * hash + (this.mVersion != null ? this.mVersion.hashCode() : 0); hash = 53 * hash + (this.mArch != null ? this.mArch.hashCode() : 0); hash = 53 * hash + (this.mOs != null ? this.mOs.hashCode() : 0); hash = 53 * hash + (this.mOsRelease != null ? this.mOsRelease.hashCode() : 0); hash = 53 * hash + (this.mOsVersion != null ? this.mOsVersion.hashCode() : 0); hash = 53 * hash + (this.mGlibc != null ? this.mGlibc.hashCode() : 0); hash = 53 * hash + (this.mInstalled ? 1 : 0); return hash; } @Override public String toString() { return mNamespace + "::" + mName + ":" + mVersion; } @Override public void toXML(XMLWriter writer) { toXML(writer, 0); } @Override public void toXML(XMLWriter writer, int indent) { if (mProfiles.isEmpty() && mPFNs.isEmpty() && mMetadata.isEmpty()) { mLogger.log( "The executable element for " + mName + " must have atleast 1 profile, 1 pfn or 1 metadata entry. Skipping empty executable element", LogManager.WARNING_MESSAGE_LEVEL); } else { writer.startElement("executable", indent); if (mNamespace != null && !mNamespace.isEmpty()) { writer.writeAttribute("namespace", mNamespace); } writer.writeAttribute("name", mName); if (mVersion != null && !mVersion.isEmpty()) { writer.writeAttribute("version", mVersion); } if (mInstalled) { writer.writeAttribute("installed", "true"); } else { writer.writeAttribute("installed", "false"); } if (mArch != null) { writer.writeAttribute("arch", mArch.toString().toLowerCase()); } if (mOs != null) { writer.writeAttribute("os", mOs.toString().toLowerCase()); } if (mOsRelease != null && !mOsRelease.isEmpty()) { writer.writeAttribute("osrelease", mOsRelease); } if (mOsVersion != null && !mOsVersion.isEmpty()) { writer.writeAttribute("osversion", mOsVersion); } if (mGlibc != null && !mGlibc.isEmpty()) { writer.writeAttribute("glibc", mGlibc); } super.toXML(writer, indent); for (Invoke i : mInvokes) { i.toXML(writer, indent + 1); } writer.endElement(indent); } } /** * Converts the executable into transformation catalog entries * * @return transformation catalog entries */ public List<TransformationCatalogEntry> toTransformationCatalogEntries() { List<TransformationCatalogEntry> tceList = new ArrayList<>(); for (PFN pfn : this.getPhysicalFiles()) { TransformationCatalogEntry tce = new TransformationCatalogEntry( this.getNamespace(), this.getName(), this.getVersion()); SysInfo sysinfo = new SysInfo(); sysinfo.setArchitecture( SysInfo.Architecture.valueOf(this.getArchitecture().toString().toLowerCase())); sysinfo.setOS(SysInfo.OS.valueOf(this.getOS().toString().toLowerCase())); sysinfo.setOSRelease(this.getOsRelease()); sysinfo.setOSVersion(this.getOsVersion()); sysinfo.setGlibc(this.getGlibc()); tce.setSysInfo(sysinfo); tce.setType(this.getInstalled() ? TCType.INSTALLED : TCType.STAGEABLE); tce.setResourceId(pfn.getSite()); tce.setPhysicalTransformation(pfn.getURL()); for (Executable e : this.mRequires) { tce.addRequirement(e); } Notifications notifications = new Notifications(); for (Invoke invoke : this.getInvoke()) { notifications.add(new Invoke(invoke)); } tce.addNotifications(notifications); for (edu.isi.pegasus.planner.dax.Profile profile : this.getProfiles()) { tce.addProfile( new edu.isi.pegasus.planner.classes.Profile( profile.getNameSpace(), profile.getKey(), profile.getValue())); } for (MetaData md : this.getMetaData()) { // convert to metadata profile object for planner to use tce.addProfile( new edu.isi.pegasus.planner.classes.Profile( edu.isi.pegasus.planner.classes.Profile.METADATA, md.getKey(), md.getValue())); } tceList.add(tce); } return tceList; } }
/** * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spectator.sandbox; import com.netflix.spectator.api.Clock; import com.netflix.spectator.api.Id; import com.netflix.spectator.api.Measurement; import com.netflix.spectator.api.Meter; import com.netflix.spectator.api.Registry; import com.netflix.spectator.api.Spectator; import com.netflix.spectator.api.Statistic; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; /** * Experiment for supporting a distribution summary type that accepts floating point values instead * of just long values. */ public class DoubleDistributionSummary implements Meter { private static final ConcurrentHashMap<Id, DoubleDistributionSummary> INSTANCES = new ConcurrentHashMap<>(); // https://github.com/Netflix/spectator/issues/43 private static final long RESET_FREQ = 60000L; /** * Get or create a double distribution summary with the specified id. * * @param id * Identifier for the metric being registered. * @return * Distribution summary corresponding to the id. */ public static DoubleDistributionSummary get(Id id) { return get(Spectator.globalRegistry(), id); } /** * Get or create a double distribution summary with the specified id. * * @param registry * Registry to use. * @param id * Identifier for the metric being registered. * @return * Distribution summary corresponding to the id. */ static DoubleDistributionSummary get(Registry registry, Id id) { DoubleDistributionSummary instance = INSTANCES.get(id); if (instance == null) { final Clock c = registry.clock(); DoubleDistributionSummary tmp = new DoubleDistributionSummary(c, id, RESET_FREQ); instance = INSTANCES.putIfAbsent(id, tmp); if (instance == null) { instance = tmp; registry.register(tmp); } } return instance; } private static final long ZERO = Double.doubleToLongBits(0.0); private final Clock clock; private final Id id; private final long resetFreq; private final AtomicLong lastResetTime; private final AtomicLong count; private final AtomicLong totalAmount; private final AtomicLong totalOfSquares; private final AtomicLong max; private final Id countId; private final Id totalAmountId; private final Id totalOfSquaresId; private final Id maxId; /** * Create a new instance. */ DoubleDistributionSummary(Clock clock, Id id, long resetFreq) { this.clock = clock; this.id = id; this.resetFreq = resetFreq; lastResetTime = new AtomicLong(clock.wallTime()); count = new AtomicLong(0L); totalAmount = new AtomicLong(ZERO); totalOfSquares = new AtomicLong(ZERO); max = new AtomicLong(ZERO); countId = id.withTag(Statistic.count); totalAmountId = id.withTag(Statistic.totalAmount); totalOfSquaresId = id.withTag(Statistic.totalOfSquares); maxId = id.withTag(Statistic.max); } private void add(AtomicLong num, double amount) { long v; double d; long next; do { v = num.get(); d = Double.longBitsToDouble(v); next = Double.doubleToLongBits(d + amount); } while (!num.compareAndSet(v, next)); } private void max(AtomicLong num, double amount) { long n = Double.doubleToLongBits(amount); long v; double d; do { v = num.get(); d = Double.longBitsToDouble(v); } while (amount > d && !num.compareAndSet(v, n)); } private double toRateLong(AtomicLong num, long deltaMillis, boolean reset) { final long v = reset ? num.getAndSet(0L) : num.get(); final double delta = deltaMillis / 1000.0; return v / delta; } private double toRateDouble(AtomicLong num, long deltaMillis, boolean reset) { final long v = reset ? num.getAndSet(ZERO) : num.get(); final double delta = deltaMillis / 1000.0; return Double.longBitsToDouble(v) / delta; } private double toDouble(AtomicLong num, boolean reset) { final long v = reset ? num.getAndSet(ZERO) : num.get(); return Double.longBitsToDouble(v); } @Override public Id id() { return id; } @Override public boolean hasExpired() { return false; } @Override public Iterable<Measurement> measure() { final long now = clock.wallTime(); final long prev = lastResetTime.get(); final long delta = now - prev; final boolean reset = delta > resetFreq; if (reset) { lastResetTime.set(now); } final List<Measurement> ms = new ArrayList<>(3); if (delta > 1000L) { ms.add(new Measurement(countId, now, toRateLong(count, delta, reset))); ms.add(new Measurement(totalAmountId, now, toRateDouble(totalAmount, delta, reset))); ms.add(new Measurement(totalOfSquaresId, now, toRateDouble(totalOfSquares, delta, reset))); ms.add(new Measurement(maxId, now, toDouble(max, reset))); } return ms; } /** * Updates the statistics kept by the summary with the specified amount. * * @param amount * Amount for an event being measured. For example, if the size in bytes of responses * from a server. If the amount is less than 0 the value will be dropped. */ public void record(double amount) { if (amount >= 0.0) { add(totalAmount, amount); add(totalOfSquares, amount * amount); max(max, amount); count.incrementAndGet(); } } /** The number of times that record has been called since this timer was created. */ public long count() { return count.get(); } /** The total amount of all recorded events since this summary was created. */ public double totalAmount() { return Double.longBitsToDouble(totalAmount.get()); } }
package com.oracle.ptsdemo.healthcare.wsclient.osc.salesparty.generated; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlType; import javax.xml.datatype.XMLGregorianCalendar; /** * <p>Java class for OrganizationContactRole complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="OrganizationContactRole"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="OrgContactRoleId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="OrigSystemReference" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="CreatedBy" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="RoleType" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="OrgContactId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="CreationDate" type="{http://xmlns.oracle.com/adf/svc/types/}dateTime-Timestamp" minOccurs="0"/> * &lt;element name="RoleLevel" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="PrimaryFlag" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/> * &lt;element name="LastUpdateDate" type="{http://xmlns.oracle.com/adf/svc/types/}dateTime-Timestamp" minOccurs="0"/> * &lt;element name="LastUpdatedBy" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="LastUpdateLogin" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="PrimaryContactPerRoleType" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="RequestId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="Status" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="ObjectVersionNumber" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/> * &lt;element name="CreatedByModule" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="OriginalSystemReference" type="{http://xmlns.oracle.com/apps/cdm/foundation/parties/partyService/}OriginalSystemReference" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "OrganizationContactRole", namespace = "http://xmlns.oracle.com/apps/cdm/foundation/parties/relationshipService/", propOrder = { "orgContactRoleId", "origSystemReference", "createdBy", "roleType", "orgContactId", "creationDate", "roleLevel", "primaryFlag", "lastUpdateDate", "lastUpdatedBy", "lastUpdateLogin", "primaryContactPerRoleType", "requestId", "status", "objectVersionNumber", "createdByModule", "originalSystemReference" }) public class OrganizationContactRole { @XmlElement(name = "OrgContactRoleId") protected Long orgContactRoleId; @XmlElement(name = "OrigSystemReference") protected String origSystemReference; @XmlElement(name = "CreatedBy") protected String createdBy; @XmlElement(name = "RoleType") protected String roleType; @XmlElement(name = "OrgContactId") protected Long orgContactId; @XmlElement(name = "CreationDate") protected XMLGregorianCalendar creationDate; @XmlElementRef(name = "RoleLevel", namespace = "http://xmlns.oracle.com/apps/cdm/foundation/parties/relationshipService/", type = JAXBElement.class) protected JAXBElement<String> roleLevel; @XmlElement(name = "PrimaryFlag") protected Boolean primaryFlag; @XmlElement(name = "LastUpdateDate") protected XMLGregorianCalendar lastUpdateDate; @XmlElement(name = "LastUpdatedBy") protected String lastUpdatedBy; @XmlElementRef(name = "LastUpdateLogin", namespace = "http://xmlns.oracle.com/apps/cdm/foundation/parties/relationshipService/", type = JAXBElement.class) protected JAXBElement<String> lastUpdateLogin; @XmlElement(name = "PrimaryContactPerRoleType") protected String primaryContactPerRoleType; @XmlElementRef(name = "RequestId", namespace = "http://xmlns.oracle.com/apps/cdm/foundation/parties/relationshipService/", type = JAXBElement.class) protected JAXBElement<Long> requestId; @XmlElement(name = "Status") protected String status; @XmlElement(name = "ObjectVersionNumber") protected Integer objectVersionNumber; @XmlElement(name = "CreatedByModule") protected String createdByModule; @XmlElement(name = "OriginalSystemReference") protected List<OriginalSystemReference> originalSystemReference; /** * Gets the value of the orgContactRoleId property. * * @return * possible object is * {@link Long } * */ public Long getOrgContactRoleId() { return orgContactRoleId; } /** * Sets the value of the orgContactRoleId property. * * @param value * allowed object is * {@link Long } * */ public void setOrgContactRoleId(Long value) { this.orgContactRoleId = value; } /** * Gets the value of the origSystemReference property. * * @return * possible object is * {@link String } * */ public String getOrigSystemReference() { return origSystemReference; } /** * Sets the value of the origSystemReference property. * * @param value * allowed object is * {@link String } * */ public void setOrigSystemReference(String value) { this.origSystemReference = value; } /** * Gets the value of the createdBy property. * * @return * possible object is * {@link String } * */ public String getCreatedBy() { return createdBy; } /** * Sets the value of the createdBy property. * * @param value * allowed object is * {@link String } * */ public void setCreatedBy(String value) { this.createdBy = value; } /** * Gets the value of the roleType property. * * @return * possible object is * {@link String } * */ public String getRoleType() { return roleType; } /** * Sets the value of the roleType property. * * @param value * allowed object is * {@link String } * */ public void setRoleType(String value) { this.roleType = value; } /** * Gets the value of the orgContactId property. * * @return * possible object is * {@link Long } * */ public Long getOrgContactId() { return orgContactId; } /** * Sets the value of the orgContactId property. * * @param value * allowed object is * {@link Long } * */ public void setOrgContactId(Long value) { this.orgContactId = value; } /** * Gets the value of the creationDate property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getCreationDate() { return creationDate; } /** * Sets the value of the creationDate property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setCreationDate(XMLGregorianCalendar value) { this.creationDate = value; } /** * Gets the value of the roleLevel property. * * @return * possible object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public JAXBElement<String> getRoleLevel() { return roleLevel; } /** * Sets the value of the roleLevel property. * * @param value * allowed object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public void setRoleLevel(JAXBElement<String> value) { this.roleLevel = ((JAXBElement<String> ) value); } /** * Gets the value of the primaryFlag property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPrimaryFlag() { return primaryFlag; } /** * Sets the value of the primaryFlag property. * * @param value * allowed object is * {@link Boolean } * */ public void setPrimaryFlag(Boolean value) { this.primaryFlag = value; } /** * Gets the value of the lastUpdateDate property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getLastUpdateDate() { return lastUpdateDate; } /** * Sets the value of the lastUpdateDate property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setLastUpdateDate(XMLGregorianCalendar value) { this.lastUpdateDate = value; } /** * Gets the value of the lastUpdatedBy property. * * @return * possible object is * {@link String } * */ public String getLastUpdatedBy() { return lastUpdatedBy; } /** * Sets the value of the lastUpdatedBy property. * * @param value * allowed object is * {@link String } * */ public void setLastUpdatedBy(String value) { this.lastUpdatedBy = value; } /** * Gets the value of the lastUpdateLogin property. * * @return * possible object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public JAXBElement<String> getLastUpdateLogin() { return lastUpdateLogin; } /** * Sets the value of the lastUpdateLogin property. * * @param value * allowed object is * {@link JAXBElement }{@code <}{@link String }{@code >} * */ public void setLastUpdateLogin(JAXBElement<String> value) { this.lastUpdateLogin = ((JAXBElement<String> ) value); } /** * Gets the value of the primaryContactPerRoleType property. * * @return * possible object is * {@link String } * */ public String getPrimaryContactPerRoleType() { return primaryContactPerRoleType; } /** * Sets the value of the primaryContactPerRoleType property. * * @param value * allowed object is * {@link String } * */ public void setPrimaryContactPerRoleType(String value) { this.primaryContactPerRoleType = value; } /** * Gets the value of the requestId property. * * @return * possible object is * {@link JAXBElement }{@code <}{@link Long }{@code >} * */ public JAXBElement<Long> getRequestId() { return requestId; } /** * Sets the value of the requestId property. * * @param value * allowed object is * {@link JAXBElement }{@code <}{@link Long }{@code >} * */ public void setRequestId(JAXBElement<Long> value) { this.requestId = ((JAXBElement<Long> ) value); } /** * Gets the value of the status property. * * @return * possible object is * {@link String } * */ public String getStatus() { return status; } /** * Sets the value of the status property. * * @param value * allowed object is * {@link String } * */ public void setStatus(String value) { this.status = value; } /** * Gets the value of the objectVersionNumber property. * * @return * possible object is * {@link Integer } * */ public Integer getObjectVersionNumber() { return objectVersionNumber; } /** * Sets the value of the objectVersionNumber property. * * @param value * allowed object is * {@link Integer } * */ public void setObjectVersionNumber(Integer value) { this.objectVersionNumber = value; } /** * Gets the value of the createdByModule property. * * @return * possible object is * {@link String } * */ public String getCreatedByModule() { return createdByModule; } /** * Sets the value of the createdByModule property. * * @param value * allowed object is * {@link String } * */ public void setCreatedByModule(String value) { this.createdByModule = value; } /** * Gets the value of the originalSystemReference property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the originalSystemReference property. * * <p> * For example, to add a new item, do as follows: * <pre> * getOriginalSystemReference().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link OriginalSystemReference } * * */ public List<OriginalSystemReference> getOriginalSystemReference() { if (originalSystemReference == null) { originalSystemReference = new ArrayList<OriginalSystemReference>(); } return this.originalSystemReference; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws.swf; import java.util.Map; import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflow; import com.amazonaws.services.simpleworkflow.flow.ActivitySchedulingOptions; import com.amazonaws.services.simpleworkflow.flow.DataConverter; import com.amazonaws.services.simpleworkflow.flow.WorkflowTypeRegistrationOptions; import com.amazonaws.services.simpleworkflow.flow.worker.ActivityTypeExecutionOptions; import com.amazonaws.services.simpleworkflow.flow.worker.ActivityTypeRegistrationOptions; import org.apache.camel.RuntimeCamelException; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; @UriParams public class SWFConfiguration implements Cloneable { @UriPath(enums = "activity,workflow") @Metadata(required = true) private String type; @UriParam private AmazonSimpleWorkflow amazonSWClient; @UriParam(label = "security", secret = true) private String accessKey; @UriParam(label = "security", secret = true) private String secretKey; @UriParam(label = "producer,workflow", defaultValue = "START", enums = "SIGNAL,CANCEL,TERMINATE,GET_STATE,START,DESCRIBE,GET_HISTORY") private String operation = "START"; @UriParam(label = "common") private String region; @UriParam private String domainName; @UriParam(label = "consumer,activity") private String activityList; @UriParam(label = "consumer,workflow") private String workflowList; @UriParam private String eventName; @UriParam private String version; @UriParam(label = "producer,workflow") private String signalName; @UriParam(label = "producer,workflow") private String childPolicy; @UriParam(label = "producer,workflow") private String terminationReason; @UriParam(label = "producer,workflow") private String stateResultType; @UriParam(label = "producer,workflow") private String terminationDetails; @UriParam(label = "producer,workflow", defaultValue = "3600") private String executionStartToCloseTimeout = "3600"; @UriParam(label = "producer,workflow", defaultValue = "600") private String taskStartToCloseTimeout = "600"; @UriParam private DataConverter dataConverter; @UriParam(label = "producer,activity") private ActivitySchedulingOptions activitySchedulingOptions; @UriParam(label = "consumer,activity") private ActivityTypeExecutionOptions activityTypeExecutionOptions; @UriParam(label = "consumer,activity") private ActivityTypeRegistrationOptions activityTypeRegistrationOptions; @UriParam(label = "consumer,workflow") private WorkflowTypeRegistrationOptions workflowTypeRegistrationOptions; @UriParam(label = "consumer,activity", defaultValue = "100") private int activityThreadPoolSize = 100; // aws-sdk default @UriParam(label = "advanced", prefix = "clientConfiguration.", multiValue = true) private Map<String, Object> clientConfigurationParameters; @UriParam(label = "advanced", prefix = "sWClient.", multiValue = true) private Map<String, Object> sWClientParameters; @UriParam(label = "advanced", prefix = "startWorkflowOptions.", multiValue = true) private Map<String, Object> startWorkflowOptionsParameters; public String getAccessKey() { return accessKey; } /** * Amazon AWS Access Key. */ public void setAccessKey(String accessKey) { this.accessKey = accessKey; } public String getSecretKey() { return secretKey; } /** * Amazon AWS Secret Key. */ public void setSecretKey(String secretKey) { this.secretKey = secretKey; } public String getRegion() { return region; } /** * Amazon AWS Region. When using this parameter, the configuration will expect the capitalized name of the region * (for example AP_EAST_1) You'll need to use the name Regions.EU_WEST_1.name() */ public void setRegion(String region) { this.region = region; } public String getDomainName() { return domainName; } /** * The workflow domain to use. */ public void setDomainName(String domainName) { this.domainName = domainName; } public String getActivityList() { return activityList; } /** * The list name to consume activities from. */ public void setActivityList(String activityList) { this.activityList = activityList; } public String getWorkflowList() { return workflowList; } /** * The list name to consume workflows from. */ public void setWorkflowList(String workflowList) { this.workflowList = workflowList; } public String getEventName() { return eventName; } /** * The workflow or activity event name to use. */ public void setEventName(String eventName) { this.eventName = eventName; } public String getVersion() { return version; } /** * The workflow or activity event version to use. */ public void setVersion(String version) { this.version = version; } public String getType() { return type; } /** * Activity or workflow */ public void setType(String type) { this.type = type; } public Map<String, Object> getClientConfigurationParameters() { return clientConfigurationParameters; } /** * To configure the ClientConfiguration using the key/values from the Map. */ public void setClientConfigurationParameters(Map<String, Object> clientConfigurationParameters) { this.clientConfigurationParameters = clientConfigurationParameters; } public Map<String, Object> getSWClientParameters() { return sWClientParameters; } /** * To configure the AmazonSimpleWorkflowClient using the key/values from the Map. */ public void setSWClientParameters(Map<String, Object> sWClientParameters) { this.sWClientParameters = sWClientParameters; } public AmazonSimpleWorkflow getAmazonSWClient() { return amazonSWClient; } /** * To use the given AmazonSimpleWorkflowClient as client */ public void setAmazonSWClient(AmazonSimpleWorkflow amazonSWClient) { this.amazonSWClient = amazonSWClient; } public Map<String, Object> getStartWorkflowOptionsParameters() { return startWorkflowOptionsParameters; } /** * To configure the StartWorkflowOptions using the key/values from the Map. * * @param startWorkflowOptionsParameters */ public void setStartWorkflowOptionsParameters(Map<String, Object> startWorkflowOptionsParameters) { this.startWorkflowOptionsParameters = startWorkflowOptionsParameters; } public String getOperation() { return operation; } /** * Workflow operation */ public void setOperation(String operation) { this.operation = operation; } public String getSignalName() { return signalName; } /** * The name of the signal to send to the workflow. */ public void setSignalName(String signalName) { this.signalName = signalName; } public String getChildPolicy() { return childPolicy; } /** * The policy to use on child workflows when terminating a workflow. */ public void setChildPolicy(String childPolicy) { this.childPolicy = childPolicy; } public String getTerminationReason() { return terminationReason; } /** * The reason for terminating a workflow. */ public void setTerminationReason(String terminationReason) { this.terminationReason = terminationReason; } public String getStateResultType() { return stateResultType; } /** * The type of the result when a workflow state is queried. */ public void setStateResultType(String stateResultType) { this.stateResultType = stateResultType; } public String getTerminationDetails() { return terminationDetails; } /** * Details for terminating a workflow. */ public void setTerminationDetails(String terminationDetails) { this.terminationDetails = terminationDetails; } public ActivityTypeExecutionOptions getActivityTypeExecutionOptions() { return activityTypeExecutionOptions; } /** * Activity execution options */ public void setActivityTypeExecutionOptions(ActivityTypeExecutionOptions activityTypeExecutionOptions) { this.activityTypeExecutionOptions = activityTypeExecutionOptions; } public ActivityTypeRegistrationOptions getActivityTypeRegistrationOptions() { return activityTypeRegistrationOptions; } /** * Activity registration options */ public void setActivityTypeRegistrationOptions(ActivityTypeRegistrationOptions activityTypeRegistrationOptions) { this.activityTypeRegistrationOptions = activityTypeRegistrationOptions; } public DataConverter getDataConverter() { return dataConverter; } /** * An instance of com.amazonaws.services.simpleworkflow.flow.DataConverter to use for serializing/deserializing the * data. */ public void setDataConverter(DataConverter dataConverter) { this.dataConverter = dataConverter; } public WorkflowTypeRegistrationOptions getWorkflowTypeRegistrationOptions() { return workflowTypeRegistrationOptions; } /** * Workflow registration options */ public void setWorkflowTypeRegistrationOptions(WorkflowTypeRegistrationOptions workflowTypeRegistrationOptions) { this.workflowTypeRegistrationOptions = workflowTypeRegistrationOptions; } public ActivitySchedulingOptions getActivitySchedulingOptions() { return activitySchedulingOptions; } /** * Activity scheduling options */ public void setActivitySchedulingOptions(ActivitySchedulingOptions activitySchedulingOptions) { this.activitySchedulingOptions = activitySchedulingOptions; } public int getActivityThreadPoolSize() { return activityThreadPoolSize; } /** * Maximum number of threads in work pool for activity. */ public void setActivityThreadPoolSize(int activityThreadPoolSize) { this.activityThreadPoolSize = activityThreadPoolSize; } public String getExecutionStartToCloseTimeout() { return executionStartToCloseTimeout; } /** * Set the execution start to close timeout. */ public void setExecutionStartToCloseTimeout(String executionStartToCloseTimeout) { this.executionStartToCloseTimeout = executionStartToCloseTimeout; } public String getTaskStartToCloseTimeout() { return taskStartToCloseTimeout; } /** * Set the task start to close timeout. */ public void setTaskStartToCloseTimeout(String taskStartToCloseTimeout) { this.taskStartToCloseTimeout = taskStartToCloseTimeout; } // ************************************************* // // ************************************************* public SWFConfiguration copy() { try { return (SWFConfiguration) super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeCamelException(e); } } }
/* * Copyright (c) 2009-2012 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.material; import com.jme3.asset.AssetKey; import com.jme3.asset.AssetManager; import com.jme3.asset.CloneableSmartAsset; import com.jme3.export.*; import com.jme3.light.*; import com.jme3.material.RenderState.BlendMode; import com.jme3.material.RenderState.FaceCullMode; import com.jme3.material.TechniqueDef.LightMode; import com.jme3.material.TechniqueDef.ShadowMode; import com.jme3.math.*; import com.jme3.renderer.Caps; import com.jme3.renderer.RenderManager; import com.jme3.renderer.Renderer; import com.jme3.renderer.queue.RenderQueue.Bucket; import com.jme3.scene.Geometry; import com.jme3.shader.Shader; import com.jme3.shader.Uniform; import com.jme3.shader.UniformBindingManager; import com.jme3.shader.VarType; import com.jme3.texture.Image; import com.jme3.texture.Texture; import com.jme3.texture.image.ColorSpace; import com.jme3.util.ListMap; import com.jme3.util.SafeArrayList; import java.io.IOException; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; /** * <code>Material</code> describes the rendering style for a given * {@link Geometry}. * <p>A material is essentially a list of {@link MatParam parameters}, * those parameters map to uniforms which are defined in a shader. * Setting the parameters can modify the behavior of a * shader. * <p/> * * @author Kirill Vainer */ public class Material implements CloneableSmartAsset, Cloneable, Savable { // Version #2: Fixed issue with RenderState.apply*** flags not getting exported public static final int SAVABLE_VERSION = 2; private static final Logger logger = Logger.getLogger(Material.class.getName()); private AssetKey key; private String name; private MaterialDef def; private ListMap<String, MatParam> paramValues = new ListMap<String, MatParam>(); private Technique technique; private HashMap<String, Technique> techniques = new HashMap<String, Technique>(); private RenderState additionalState = null; private RenderState mergedRenderState = new RenderState(); private boolean transparent = false; private boolean receivesShadows = false; private int sortingId = -1; public Material(MaterialDef def) { if (def == null) { throw new NullPointerException("Material definition cannot be null"); } this.def = def; // Load default values from definition (if any) for (MatParam param : def.getMaterialParams()) { if (param.getValue() != null) { setParam(param.getName(), param.getVarType(), param.getValue()); } } } public Material(AssetManager contentMan, String defName) { this((MaterialDef) contentMan.loadAsset(new AssetKey(defName))); } /** * Do not use this constructor. Serialization purposes only. */ public Material() { } /** * Returns the asset key name of the asset from which this material was loaded. * * <p>This value will be <code>null</code> unless this material was loaded * from a .j3m file. * * @return Asset key name of the j3m file */ public String getAssetName() { return key != null ? key.getName() : null; } /** * @return the name of the material (not the same as the asset name), the returned value can be null */ public String getName() { return name; } /** * This method sets the name of the material. * The name is not the same as the asset name. * It can be null and there is no guarantee of its uniqueness. * @param name the name of the material */ public void setName(String name) { this.name = name; } public void setKey(AssetKey key) { this.key = key; } public AssetKey getKey() { return key; } /** * Returns the sorting ID or sorting index for this material. * * <p>The sorting ID is used internally by the system to sort rendering * of geometries. It sorted to reduce shader switches, if the shaders * are equal, then it is sorted by textures. * * @return The sorting ID used for sorting geometries for rendering. */ public int getSortId() { if (sortingId == -1 && technique != null) { sortingId = technique.getSortId() << 16; int texturesSortId = 17; for (int i = 0; i < paramValues.size(); i++) { MatParam param = paramValues.getValue(i); if (!param.getVarType().isTextureType()) { continue; } Texture texture = (Texture) param.getValue(); if (texture == null) { continue; } Image image = texture.getImage(); if (image == null) { continue; } int textureId = image.getId(); if (textureId == -1) { textureId = 0; } texturesSortId = texturesSortId * 23 + textureId; } sortingId |= texturesSortId & 0xFFFF; } return sortingId; } /** * Clones this material. The result is returned. */ @Override public Material clone() { try { Material mat = (Material) super.clone(); if (additionalState != null) { mat.additionalState = additionalState.clone(); } mat.technique = null; mat.techniques = new HashMap<String, Technique>(); mat.paramValues = new ListMap<String, MatParam>(); for (int i = 0; i < paramValues.size(); i++) { Map.Entry<String, MatParam> entry = paramValues.getEntry(i); mat.paramValues.put(entry.getKey(), entry.getValue().clone()); } mat.sortingId = -1; return mat; } catch (CloneNotSupportedException ex) { throw new AssertionError(ex); } } /** * Compares two materials and returns true if they are equal. * This methods compare definition, parameters, additional render states. * Since materials are mutable objects, implementing equals() properly is not possible, * hence the name contentEquals(). * * @param otherObj the material to compare to this material * @return true if the materials are equal. */ public boolean contentEquals(Object otherObj) { if (!(otherObj instanceof Material)) { return false; } Material other = (Material) otherObj; // Early exit if the material are the same object if (this == other) { return true; } // Check material definition if (this.getMaterialDef() != other.getMaterialDef()) { return false; } // Early exit if the size of the params is different if (this.paramValues.size() != other.paramValues.size()) { return false; } // Checking technique if (this.technique != null || other.technique != null) { // Techniques are considered equal if their names are the same // E.g. if user chose custom technique for one material but // uses default technique for other material, the materials // are not equal. String thisDefName = this.technique != null ? this.technique.getDef().getName() : TechniqueDef.DEFAULT_TECHNIQUE_NAME; String otherDefName = other.technique != null ? other.technique.getDef().getName() : TechniqueDef.DEFAULT_TECHNIQUE_NAME; if (!thisDefName.equals(otherDefName)) { return false; } } // Comparing parameters try { for(int i=0;i<paramValues.size();i++) { String paramKey = paramValues.getKey(i); MatParam thisParam = this.getParam(paramKey); MatParam otherParam = other.getParam(paramKey); // This param does not exist in compared mat if (otherParam == null) { return false; } if (!otherParam.equals(thisParam)) { return false; } } } catch(Exception e) { // parameters changed in another thread, just return false for equals logger.log(Level.WARNING, "Material contentEquals parameter exception: {0}", e.toString()); return false; } // Comparing additional render states if (additionalState == null) { if (other.additionalState != null) { return false; } } else { if (!additionalState.equals(other.additionalState)) { return false; } } return true; } /** * Works like {@link Object#hashCode() } except it may change together with the material as the material is mutable by definition. */ public int contentHashCode() { int hash = 7; hash = 29 * hash + (this.def != null ? this.def.hashCode() : 0); hash = 29 * hash + (this.paramValues != null ? this.paramValues.hashCode() : 0); hash = 29 * hash + (this.technique != null ? this.technique.getDef().getName().hashCode() : 0); hash = 29 * hash + (this.additionalState != null ? this.additionalState.contentHashCode() : 0); return hash; } /** * Returns the currently active technique. * <p> * The technique is selected automatically by the {@link RenderManager} * based on system capabilities. Users may select their own * technique by using * {@link #selectTechnique(java.lang.String, com.jme3.renderer.RenderManager) }. * * @return the currently active technique. * * @see #selectTechnique(java.lang.String, com.jme3.renderer.RenderManager) */ public Technique getActiveTechnique() { return technique; } /** * Check if the transparent value marker is set on this material. * @return True if the transparent value marker is set on this material. * @see #setTransparent(boolean) */ public boolean isTransparent() { return transparent; } /** * Set the transparent value marker. * * <p>This value is merely a marker, by itself it does nothing. * Generally model loaders will use this marker to indicate further * up that the material is transparent and therefore any geometries * using it should be put into the {@link Bucket#Transparent transparent * bucket}. * * @param transparent the transparent value marker. */ public void setTransparent(boolean transparent) { this.transparent = transparent; } /** * Check if the material should receive shadows or not. * * @return True if the material should receive shadows. * * @see Material#setReceivesShadows(boolean) */ public boolean isReceivesShadows() { return receivesShadows; } /** * Set if the material should receive shadows or not. * * <p>This value is merely a marker, by itself it does nothing. * Generally model loaders will use this marker to indicate * the material should receive shadows and therefore any * geometries using it should have the {@link ShadowMode#Receive} set * on them. * * @param receivesShadows if the material should receive shadows or not. */ public void setReceivesShadows(boolean receivesShadows) { this.receivesShadows = receivesShadows; } /** * Acquire the additional {@link RenderState render state} to apply * for this material. * * <p>The first call to this method will create an additional render * state which can be modified by the user to apply any render * states in addition to the ones used by the renderer. Only render * states which are modified in the additional render state will be applied. * * @return The additional render state. */ public RenderState getAdditionalRenderState() { if (additionalState == null) { additionalState = RenderState.ADDITIONAL.clone(); } return additionalState; } /** * Get the material definition (j3md file info) that <code>this</code> * material is implementing. * * @return the material definition this material implements. */ public MaterialDef getMaterialDef() { return def; } /** * Returns the parameter set on this material with the given name, * returns <code>null</code> if the parameter is not set. * * @param name The parameter name to look up. * @return The MatParam if set, or null if not set. */ public MatParam getParam(String name) { return paramValues.get(name); } /** * Returns the texture parameter set on this material with the given name, * returns <code>null</code> if the parameter is not set. * * @param name The parameter name to look up. * @return The MatParamTexture if set, or null if not set. */ public MatParamTexture getTextureParam(String name) { MatParam param = paramValues.get(name); if (param instanceof MatParamTexture) { return (MatParamTexture) param; } return null; } /** * Returns a collection of all parameters set on this material. * * @return a collection of all parameters set on this material. * * @see #setParam(java.lang.String, com.jme3.shader.VarType, java.lang.Object) */ public Collection<MatParam> getParams() { return paramValues.values(); } /** * Returns the ListMap of all parameters set on this material. * * @return a ListMap of all parameters set on this material. * * @see #setParam(java.lang.String, com.jme3.shader.VarType, java.lang.Object) */ public ListMap<String, MatParam> getParamsMap() { return paramValues; } /** * Check if setting the parameter given the type and name is allowed. * @param type The type that the "set" function is designed to set * @param name The name of the parameter */ private void checkSetParam(VarType type, String name) { MatParam paramDef = def.getMaterialParam(name); if (paramDef == null) { throw new IllegalArgumentException("Material parameter is not defined: " + name); } if (type != null && paramDef.getVarType() != type) { logger.log(Level.WARNING, "Material parameter being set: {0} with " + "type {1} doesn''t match definition types {2}", new Object[]{name, type.name(), paramDef.getVarType()}); } } /** * Pass a parameter to the material shader. * * @param name the name of the parameter defined in the material definition (j3md) * @param type the type of the parameter {@link VarType} * @param value the value of the parameter */ public void setParam(String name, VarType type, Object value) { checkSetParam(type, name); if (type.isTextureType()) { setTextureParam(name, type, (Texture)value); } else { MatParam val = getParam(name); if (val == null) { MatParam paramDef = def.getMaterialParam(name); paramValues.put(name, new MatParam(type, name, value)); } else { val.setValue(value); } if (technique != null) { technique.notifyParamChanged(name, type, value); } } } /** * Clear a parameter from this material. The parameter must exist * @param name the name of the parameter to clear */ public void clearParam(String name) { checkSetParam(null, name); MatParam matParam = getParam(name); if (matParam == null) { return; } paramValues.remove(name); if (matParam instanceof MatParamTexture) { sortingId = -1; } if (technique != null) { technique.notifyParamChanged(name, null, null); } } /** * Set a texture parameter. * * @param name The name of the parameter * @param type The variable type {@link VarType} * @param value The texture value of the parameter. * * @throws IllegalArgumentException is value is null */ public void setTextureParam(String name, VarType type, Texture value) { if (value == null) { throw new IllegalArgumentException(); } checkSetParam(type, name); MatParamTexture val = getTextureParam(name); if (val == null) { MatParamTexture paramDef = (MatParamTexture) def.getMaterialParam(name); if (paramDef.getColorSpace() != null && paramDef.getColorSpace() != value.getImage().getColorSpace()) { value.getImage().setColorSpace(paramDef.getColorSpace()); logger.log(Level.FINE, "Material parameter {0} needs a {1} texture, " + "texture {2} was switched to {3} color space.", new Object[]{name, paramDef.getColorSpace().toString(), value.getName(), value.getImage().getColorSpace().name()}); } else if (paramDef.getColorSpace() == null && value.getName() != null && value.getImage().getColorSpace() == ColorSpace.Linear) { logger.log(Level.WARNING, "The texture {0} has linear color space, but the material " + "parameter {2} specifies no color space requirement, this may " + "lead to unexpected behavior.\nCheck if the image " + "was not set to another material parameter with a linear " + "color space, or that you did not set the ColorSpace to " + "Linear using texture.getImage.setColorSpace().", new Object[]{value.getName(), value.getImage().getColorSpace().name(), name}); } paramValues.put(name, new MatParamTexture(type, name, value, null)); } else { val.setTextureValue(value); } if (technique != null) { technique.notifyParamChanged(name, type, value); } // need to recompute sort ID sortingId = -1; } /** * Pass a texture to the material shader. * * @param name the name of the texture defined in the material definition * (j3md) (for example Texture for Lighting.j3md) * @param value the Texture object previously loaded by the asset manager */ public void setTexture(String name, Texture value) { if (value == null) { // clear it clearParam(name); return; } VarType paramType = null; switch (value.getType()) { case TwoDimensional: paramType = VarType.Texture2D; break; case TwoDimensionalArray: paramType = VarType.TextureArray; break; case ThreeDimensional: paramType = VarType.Texture3D; break; case CubeMap: paramType = VarType.TextureCubeMap; break; default: throw new UnsupportedOperationException("Unknown texture type: " + value.getType()); } setTextureParam(name, paramType, value); } /** * Pass a Matrix4f to the material shader. * * @param name the name of the matrix defined in the material definition (j3md) * @param value the Matrix4f object */ public void setMatrix4(String name, Matrix4f value) { setParam(name, VarType.Matrix4, value); } /** * Pass a boolean to the material shader. * * @param name the name of the boolean defined in the material definition (j3md) * @param value the boolean value */ public void setBoolean(String name, boolean value) { setParam(name, VarType.Boolean, value); } /** * Pass a float to the material shader. * * @param name the name of the float defined in the material definition (j3md) * @param value the float value */ public void setFloat(String name, float value) { setParam(name, VarType.Float, value); } /** * Pass a float to the material shader. This version avoids auto-boxing * if the value is already a Float. * * @param name the name of the float defined in the material definition (j3md) * @param value the float value */ public void setFloat(String name, Float value) { setParam(name, VarType.Float, value); } /** * Pass an int to the material shader. * * @param name the name of the int defined in the material definition (j3md) * @param value the int value */ public void setInt(String name, int value) { setParam(name, VarType.Int, value); } /** * Pass a Color to the material shader. * * @param name the name of the color defined in the material definition (j3md) * @param value the ColorRGBA value */ public void setColor(String name, ColorRGBA value) { setParam(name, VarType.Vector4, value); } /** * Pass a Vector2f to the material shader. * * @param name the name of the Vector2f defined in the material definition (j3md) * @param value the Vector2f value */ public void setVector2(String name, Vector2f value) { setParam(name, VarType.Vector2, value); } /** * Pass a Vector3f to the material shader. * * @param name the name of the Vector3f defined in the material definition (j3md) * @param value the Vector3f value */ public void setVector3(String name, Vector3f value) { setParam(name, VarType.Vector3, value); } /** * Pass a Vector4f to the material shader. * * @param name the name of the Vector4f defined in the material definition (j3md) * @param value the Vector4f value */ public void setVector4(String name, Vector4f value) { setParam(name, VarType.Vector4, value); } /** * Select the technique to use for rendering this material. * <p> * Any candidate technique for selection (either default or named) * must be verified to be compatible with the system, for that, the * <code>renderManager</code> is queried for capabilities. * * @param name The name of the technique to select, pass * {@link TechniqueDef#DEFAULT_TECHNIQUE_NAME} to select one of the default * techniques. * @param renderManager The {@link RenderManager render manager} * to query for capabilities. * * @throws IllegalArgumentException If no technique exists with the given * name. * @throws UnsupportedOperationException If no candidate technique supports * the system capabilities. */ public void selectTechnique(String name, RenderManager renderManager) { // check if already created Technique tech = techniques.get(name); // When choosing technique, we choose one that // supports all the caps. if (tech == null) { EnumSet<Caps> rendererCaps = renderManager.getRenderer().getCaps(); List<TechniqueDef> techDefs = def.getTechniqueDefs(name); if (techDefs == null || techDefs.isEmpty()) { throw new IllegalArgumentException( String.format("The requested technique %s is not available on material %s", name, def.getName())); } TechniqueDef lastTech = null; for (TechniqueDef techDef : techDefs) { if (rendererCaps.containsAll(techDef.getRequiredCaps())) { // use the first one that supports all the caps tech = new Technique(this, techDef); techniques.put(name, tech); if (tech.getDef().getLightMode() == renderManager.getPreferredLightMode() || tech.getDef().getLightMode() == LightMode.Disable) { break; } } lastTech = techDef; } if (tech == null) { throw new UnsupportedOperationException( String.format("No technique '%s' on material " + "'%s' is supported by the video hardware. " + "The capabilities %s are required.", name, def.getName(), lastTech.getRequiredCaps())); } } else if (technique == tech) { // attempting to switch to an already // active technique. return; } technique = tech; tech.notifyTechniqueSwitched(); // shader was changed sortingId = -1; } private int applyOverrides(Renderer renderer, Shader shader, List<MatParamOverride> overrides, int unit) { //for (MatParamOverride override : overrides) { for(int i=0;i<overrides.size();i++) { MatParamOverride override = overrides.get(i); VarType type = override.getVarType(); MatParam paramDef = def.getMaterialParam(override.getName()); if (paramDef == null || paramDef.getVarType() != type || !override.isEnabled()) { continue; } Uniform uniform = shader.getUniform(override.getPrefixedName()); if (override.getValue() != null) { if (type.isTextureType()) { renderer.setTexture(unit, (Texture) override.getValue()); uniform.setValue(VarType.Int, unit); unit++; } else { uniform.setValue(type, override.getValue()); } } else { uniform.clearValue(); } } return unit; } private int updateShaderMaterialParameters(Renderer renderer, Shader shader, SafeArrayList<MatParamOverride> worldOverrides, SafeArrayList<MatParamOverride> forcedOverrides) { int unit = 0; if (worldOverrides != null) { unit = applyOverrides(renderer, shader, worldOverrides, unit); } if (forcedOverrides != null) { unit = applyOverrides(renderer, shader, forcedOverrides, unit); } for (int i = 0; i < paramValues.size(); i++) { MatParam param = paramValues.getValue(i); VarType type = param.getVarType(); Uniform uniform = shader.getUniform(param.getPrefixedName()); if (uniform.isSetByCurrentMaterial()) { continue; } if (type.isTextureType()) { renderer.setTexture(unit, (Texture) param.getValue()); uniform.setValue(VarType.Int, unit); unit++; } else { uniform.setValue(type, param.getValue()); } } //TODO HACKY HACK remove this when texture unit is handled by the uniform. return unit; } private void updateRenderState(RenderManager renderManager, Renderer renderer, TechniqueDef techniqueDef) { if (renderManager.getForcedRenderState() != null) { renderer.applyRenderState(renderManager.getForcedRenderState()); } else { if (techniqueDef.getRenderState() != null) { renderer.applyRenderState(techniqueDef.getRenderState().copyMergedTo(additionalState, mergedRenderState)); } else { renderer.applyRenderState(RenderState.DEFAULT.copyMergedTo(additionalState, mergedRenderState)); } } } /** * Preloads this material for the given render manager. * <p> * Preloading the material can ensure that when the material is first * used for rendering, there won't be any delay since the material has * been already been setup for rendering. * * @param renderManager The render manager to preload for */ public void preload(RenderManager renderManager) { if (technique == null) { selectTechnique(TechniqueDef.DEFAULT_TECHNIQUE_NAME, renderManager); } TechniqueDef techniqueDef = technique.getDef(); Renderer renderer = renderManager.getRenderer(); EnumSet<Caps> rendererCaps = renderer.getCaps(); if (techniqueDef.isNoRender()) { return; } Shader shader = technique.makeCurrent(renderManager, null, null, null, rendererCaps); updateShaderMaterialParameters(renderer, shader, null, null); renderManager.getRenderer().setShader(shader); } private void clearUniformsSetByCurrent(Shader shader) { ListMap<String, Uniform> uniforms = shader.getUniformMap(); int size = uniforms.size(); for (int i = 0; i < size; i++) { Uniform u = uniforms.getValue(i); u.clearSetByCurrentMaterial(); } } private void resetUniformsNotSetByCurrent(Shader shader) { ListMap<String, Uniform> uniforms = shader.getUniformMap(); int size = uniforms.size(); for (int i = 0; i < size; i++) { Uniform u = uniforms.getValue(i); if (!u.isSetByCurrentMaterial()) { if (u.getName().charAt(0) != 'g') { // Don't reset world globals! // The benefits gained from this are very minimal // and cause lots of matrix -> FloatBuffer conversions. u.clearValue(); } } } } /** * Called by {@link RenderManager} to render the geometry by * using this material. * <p> * The material is rendered as follows: * <ul> * <li>Determine which technique to use to render the material - * either what the user selected via * {@link #selectTechnique(java.lang.String, com.jme3.renderer.RenderManager) * Material.selectTechnique()}, * or the first default technique that the renderer supports * (based on the technique's {@link TechniqueDef#getRequiredCaps() requested rendering capabilities})<ul> * <li>If the technique has been changed since the last frame, then it is notified via * {@link Technique#makeCurrent(com.jme3.asset.AssetManager, boolean, java.util.EnumSet) * Technique.makeCurrent()}. * If the technique wants to use a shader to render the model, it should load it at this part - * the shader should have all the proper defines as declared in the technique definition, * including those that are bound to material parameters. * The technique can re-use the shader from the last frame if * no changes to the defines occurred.</li></ul> * <li>Set the {@link RenderState} to use for rendering. The render states are * applied in this order (later RenderStates override earlier RenderStates):<ol> * <li>{@link TechniqueDef#getRenderState() Technique Definition's RenderState} * - i.e. specific renderstate that is required for the shader.</li> * <li>{@link #getAdditionalRenderState() Material Instance Additional RenderState} * - i.e. ad-hoc renderstate set per model</li> * <li>{@link RenderManager#getForcedRenderState() RenderManager's Forced RenderState} * - i.e. renderstate requested by a {@link com.jme3.post.SceneProcessor} or * post-processing filter.</li></ol> * <li>If the technique {@link TechniqueDef#isUsingShaders() uses a shader}, then the uniforms of the shader must be updated.<ul> * <li>Uniforms bound to material parameters are updated based on the current material parameter values.</li> * <li>Uniforms bound to world parameters are updated from the RenderManager. * Internally {@link UniformBindingManager} is used for this task.</li> * <li>Uniforms bound to textures will cause the texture to be uploaded as necessary. * The uniform is set to the texture unit where the texture is bound.</li></ul> * <li>If the technique uses a shader, the model is then rendered according * to the lighting mode specified on the technique definition.<ul> * <li>{@link LightMode#SinglePass single pass light mode} fills the shader's light uniform arrays * with the first 4 lights and renders the model once.</li> * <li>{@link LightMode#MultiPass multi pass light mode} light mode renders the model multiple times, * for the first light it is rendered opaque, on subsequent lights it is * rendered with {@link BlendMode#AlphaAdditive alpha-additive} blending and depth writing disabled.</li> * </ul> * <li>For techniques that do not use shaders, * fixed function OpenGL is used to render the model (see {@link GL1Renderer} interface):<ul> * <li>OpenGL state ({@link FixedFuncBinding}) that is bound to material parameters is updated. </li> * <li>The texture set on the material is uploaded and bound. * Currently only 1 texture is supported for fixed function techniques.</li> * <li>If the technique uses lighting, then OpenGL lighting state is updated * based on the light list on the geometry, otherwise OpenGL lighting is disabled.</li> * <li>The mesh is uploaded and rendered.</li> * </ul> * </ul> * * @param geometry The geometry to render * @param lights Presorted and filtered light list to use for rendering * @param renderManager The render manager requesting the rendering */ public void render(Geometry geometry, LightList lights, RenderManager renderManager) { if (technique == null) { selectTechnique(TechniqueDef.DEFAULT_TECHNIQUE_NAME, renderManager); } TechniqueDef techniqueDef = technique.getDef(); Renderer renderer = renderManager.getRenderer(); EnumSet<Caps> rendererCaps = renderer.getCaps(); if (techniqueDef.isNoRender()) { return; } // Apply render state updateRenderState(renderManager, renderer, techniqueDef); // Get world overrides SafeArrayList<MatParamOverride> overrides = geometry.getWorldMatParamOverrides(); // Select shader to use Shader shader = technique.makeCurrent(renderManager, overrides, renderManager.getForcedMatParams(), lights, rendererCaps); // Begin tracking which uniforms were changed by material. clearUniformsSetByCurrent(shader); // Set uniform bindings renderManager.updateUniformBindings(shader); // Set material parameters //TODO RRemove the unit when texture units are handled in the Uniform int unit = updateShaderMaterialParameters(renderer, shader, overrides, renderManager.getForcedMatParams()); // Clear any uniforms not changed by material. resetUniformsNotSetByCurrent(shader); // Delegate rendering to the technique technique.render(renderManager, shader, geometry, lights, unit); } /** * Called by {@link RenderManager} to render the geometry by * using this material. * * Note that this version of the render method * does not perform light filtering. * * @param geom The geometry to render * @param rm The render manager requesting the rendering */ public void render(Geometry geom, RenderManager rm) { render(geom, geom.getWorldLightList(), rm); } public void write(JmeExporter ex) throws IOException { OutputCapsule oc = ex.getCapsule(this); oc.write(def.getAssetName(), "material_def", null); oc.write(additionalState, "render_state", null); oc.write(transparent, "is_transparent", false); oc.write(name, "name", null); oc.writeStringSavableMap(paramValues, "parameters", null); } @Override public String toString() { return "Material[name=" + name + ", def=" + (def != null ? def.getName() : null) + ", tech=" + (technique != null && technique.getDef() != null ? technique.getDef().getName() : null) + "]"; } public void read(JmeImporter im) throws IOException { InputCapsule ic = im.getCapsule(this); name = ic.readString("name", null); additionalState = (RenderState) ic.readSavable("render_state", null); transparent = ic.readBoolean("is_transparent", false); // Load the material def String defName = ic.readString("material_def", null); HashMap<String, MatParam> params = (HashMap<String, MatParam>) ic.readStringSavableMap("parameters", null); boolean enableVcolor = false; boolean separateTexCoord = false; boolean applyDefaultValues = false; boolean guessRenderStateApply = false; int ver = ic.getSavableVersion(Material.class); if (ver < 1) { applyDefaultValues = true; } if (ver < 2) { guessRenderStateApply = true; } if (im.getFormatVersion() == 0) { // Enable compatibility with old models if (defName.equalsIgnoreCase("Common/MatDefs/Misc/VertexColor.j3md")) { // Using VertexColor, switch to Unshaded and set VertexColor=true enableVcolor = true; defName = "Common/MatDefs/Misc/Unshaded.j3md"; } else if (defName.equalsIgnoreCase("Common/MatDefs/Misc/SimpleTextured.j3md") || defName.equalsIgnoreCase("Common/MatDefs/Misc/SolidColor.j3md")) { // Using SimpleTextured/SolidColor, just switch to Unshaded defName = "Common/MatDefs/Misc/Unshaded.j3md"; } else if (defName.equalsIgnoreCase("Common/MatDefs/Misc/WireColor.j3md")) { // Using WireColor, set wireframe renderstate = true and use Unshaded getAdditionalRenderState().setWireframe(true); defName = "Common/MatDefs/Misc/Unshaded.j3md"; } else if (defName.equalsIgnoreCase("Common/MatDefs/Misc/Unshaded.j3md")) { // Uses unshaded, ensure that the proper param is set MatParam value = params.get("SeperateTexCoord"); if (value != null && ((Boolean) value.getValue()) == true) { params.remove("SeperateTexCoord"); separateTexCoord = true; } } assert applyDefaultValues && guessRenderStateApply; } def = (MaterialDef) im.getAssetManager().loadAsset(new AssetKey(defName)); paramValues = new ListMap<String, MatParam>(); // load the textures and update nextTexUnit for (Map.Entry<String, MatParam> entry : params.entrySet()) { MatParam param = entry.getValue(); if (param instanceof MatParamTexture) { MatParamTexture texVal = (MatParamTexture) param; // the texture failed to load for this param // do not add to param values if (texVal.getTextureValue() == null || texVal.getTextureValue().getImage() == null) { continue; } } if (im.getFormatVersion() == 0 && param.getName().startsWith("m_")) { // Ancient version of jME3 ... param.setName(param.getName().substring(2)); } if (def.getMaterialParam(param.getName()) == null) { logger.log(Level.WARNING, "The material parameter is not defined: {0}. Ignoring..", param.getName()); } else { checkSetParam(param.getVarType(), param.getName()); paramValues.put(param.getName(), param); } } if (applyDefaultValues) { // compatability with old versions where default vars were // not available for (MatParam param : def.getMaterialParams()) { if (param.getValue() != null && paramValues.get(param.getName()) == null) { setParam(param.getName(), param.getVarType(), param.getValue()); } } } if (guessRenderStateApply && additionalState != null) { // Try to guess values of "apply" render state based on defaults // if value != default then set apply to true additionalState.applyPolyOffset = additionalState.offsetEnabled; additionalState.applyBlendMode = additionalState.blendMode != BlendMode.Off; additionalState.applyColorWrite = !additionalState.colorWrite; additionalState.applyCullMode = additionalState.cullMode != FaceCullMode.Back; additionalState.applyDepthTest = !additionalState.depthTest; additionalState.applyDepthWrite = !additionalState.depthWrite; additionalState.applyStencilTest = additionalState.stencilTest; additionalState.applyWireFrame = additionalState.wireframe; } if (enableVcolor) { setBoolean("VertexColor", true); } if (separateTexCoord) { setBoolean("SeparateTexCoord", true); } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.1.9-03/31/2009 04:14 PM(snajper)-fcs // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2009.05.19 at 10:15:07 AM CEST // package org.openehealth.ipf.commons.ihe.xds.core.stub.ebrs21.query; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}RegistryObjectQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}RegistryEntryQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}AssociationQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}AuditableEventQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}ClassificationQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}ClassificationNodeQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}ClassificationSchemeQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}RegistryPackageQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}ExtrinsicObjectQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}OrganizationQuery"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}ServiceQuery"/> * &lt;/choice> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "registryObjectQuery", "registryEntryQuery", "associationQuery", "auditableEventQuery", "classificationQuery", "classificationNodeQuery", "classificationSchemeQuery", "registryPackageQuery", "extrinsicObjectQuery", "organizationQuery", "serviceQuery" }) @XmlRootElement(name = "FilterQuery") public class FilterQuery { @XmlElement(name = "RegistryObjectQuery") protected RegistryObjectQueryType registryObjectQuery; @XmlElement(name = "RegistryEntryQuery") protected RegistryEntryQueryType registryEntryQuery; @XmlElement(name = "AssociationQuery") protected AssociationQueryType associationQuery; @XmlElement(name = "AuditableEventQuery") protected AuditableEventQueryType auditableEventQuery; @XmlElement(name = "ClassificationQuery") protected ClassificationQueryType classificationQuery; @XmlElement(name = "ClassificationNodeQuery") protected ClassificationNodeQueryType classificationNodeQuery; @XmlElement(name = "ClassificationSchemeQuery") protected ClassificationSchemeQueryType classificationSchemeQuery; @XmlElement(name = "RegistryPackageQuery") protected RegistryPackageQueryType registryPackageQuery; @XmlElement(name = "ExtrinsicObjectQuery") protected ExtrinsicObjectQueryType extrinsicObjectQuery; @XmlElement(name = "OrganizationQuery") protected OrganizationQueryType organizationQuery; @XmlElement(name = "ServiceQuery") protected ServiceQueryType serviceQuery; /** * Gets the value of the registryObjectQuery property. * * @return * possible object is * {@link RegistryObjectQueryType } * */ public RegistryObjectQueryType getRegistryObjectQuery() { return registryObjectQuery; } /** * Sets the value of the registryObjectQuery property. * * @param value * allowed object is * {@link RegistryObjectQueryType } * */ public void setRegistryObjectQuery(RegistryObjectQueryType value) { this.registryObjectQuery = value; } /** * Gets the value of the registryEntryQuery property. * * @return * possible object is * {@link RegistryEntryQueryType } * */ public RegistryEntryQueryType getRegistryEntryQuery() { return registryEntryQuery; } /** * Sets the value of the registryEntryQuery property. * * @param value * allowed object is * {@link RegistryEntryQueryType } * */ public void setRegistryEntryQuery(RegistryEntryQueryType value) { this.registryEntryQuery = value; } /** * Gets the value of the associationQuery property. * * @return * possible object is * {@link AssociationQueryType } * */ public AssociationQueryType getAssociationQuery() { return associationQuery; } /** * Sets the value of the associationQuery property. * * @param value * allowed object is * {@link AssociationQueryType } * */ public void setAssociationQuery(AssociationQueryType value) { this.associationQuery = value; } /** * Gets the value of the auditableEventQuery property. * * @return * possible object is * {@link AuditableEventQueryType } * */ public AuditableEventQueryType getAuditableEventQuery() { return auditableEventQuery; } /** * Sets the value of the auditableEventQuery property. * * @param value * allowed object is * {@link AuditableEventQueryType } * */ public void setAuditableEventQuery(AuditableEventQueryType value) { this.auditableEventQuery = value; } /** * Gets the value of the classificationQuery property. * * @return * possible object is * {@link ClassificationQueryType } * */ public ClassificationQueryType getClassificationQuery() { return classificationQuery; } /** * Sets the value of the classificationQuery property. * * @param value * allowed object is * {@link ClassificationQueryType } * */ public void setClassificationQuery(ClassificationQueryType value) { this.classificationQuery = value; } /** * Gets the value of the classificationNodeQuery property. * * @return * possible object is * {@link ClassificationNodeQueryType } * */ public ClassificationNodeQueryType getClassificationNodeQuery() { return classificationNodeQuery; } /** * Sets the value of the classificationNodeQuery property. * * @param value * allowed object is * {@link ClassificationNodeQueryType } * */ public void setClassificationNodeQuery(ClassificationNodeQueryType value) { this.classificationNodeQuery = value; } /** * Gets the value of the classificationSchemeQuery property. * * @return * possible object is * {@link ClassificationSchemeQueryType } * */ public ClassificationSchemeQueryType getClassificationSchemeQuery() { return classificationSchemeQuery; } /** * Sets the value of the classificationSchemeQuery property. * * @param value * allowed object is * {@link ClassificationSchemeQueryType } * */ public void setClassificationSchemeQuery(ClassificationSchemeQueryType value) { this.classificationSchemeQuery = value; } /** * Gets the value of the registryPackageQuery property. * * @return * possible object is * {@link RegistryPackageQueryType } * */ public RegistryPackageQueryType getRegistryPackageQuery() { return registryPackageQuery; } /** * Sets the value of the registryPackageQuery property. * * @param value * allowed object is * {@link RegistryPackageQueryType } * */ public void setRegistryPackageQuery(RegistryPackageQueryType value) { this.registryPackageQuery = value; } /** * Gets the value of the extrinsicObjectQuery property. * * @return * possible object is * {@link ExtrinsicObjectQueryType } * */ public ExtrinsicObjectQueryType getExtrinsicObjectQuery() { return extrinsicObjectQuery; } /** * Sets the value of the extrinsicObjectQuery property. * * @param value * allowed object is * {@link ExtrinsicObjectQueryType } * */ public void setExtrinsicObjectQuery(ExtrinsicObjectQueryType value) { this.extrinsicObjectQuery = value; } /** * Gets the value of the organizationQuery property. * * @return * possible object is * {@link OrganizationQueryType } * */ public OrganizationQueryType getOrganizationQuery() { return organizationQuery; } /** * Sets the value of the organizationQuery property. * * @param value * allowed object is * {@link OrganizationQueryType } * */ public void setOrganizationQuery(OrganizationQueryType value) { this.organizationQuery = value; } /** * Gets the value of the serviceQuery property. * * @return * possible object is * {@link ServiceQueryType } * */ public ServiceQueryType getServiceQuery() { return serviceQuery; } /** * Sets the value of the serviceQuery property. * * @param value * allowed object is * {@link ServiceQueryType } * */ public void setServiceQuery(ServiceQueryType value) { this.serviceQuery = value; } }
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * /* @test * @bug 8014377 * @summary Test for interference when two sockets are bound to the same * port but joined to different multicast groups * @build Promiscuous NetworkConfiguration * @run main Promiscuous * @run main/othervm -Djava.net.preferIPv4Stack=true Promiscuous */ import java.nio.ByteBuffer; import java.nio.channels.*; import java.net.*; import static java.net.StandardProtocolFamily.*; import java.util.*; import java.io.IOException; public class Promiscuous { static final Random rand = new Random(); static final ProtocolFamily UNSPEC = new ProtocolFamily() { public String name() { return "UNSPEC"; } }; /** * Sends a datagram to the given multicast group */ static int sendDatagram(NetworkInterface nif, InetAddress group, int port) throws IOException { ProtocolFamily family = (group instanceof Inet6Address) ? StandardProtocolFamily.INET6 : StandardProtocolFamily.INET; DatagramChannel dc = DatagramChannel.open(family) .setOption(StandardSocketOptions.IP_MULTICAST_IF, nif); int id = rand.nextInt(); byte[] msg = Integer.toString(id).getBytes("UTF-8"); ByteBuffer buf = ByteBuffer.wrap(msg); System.out.format("Send message -> group %s (id=0x%x)\n", group.getHostAddress(), id); dc.send(buf, new InetSocketAddress(group, port)); dc.close(); return id; } /** * Wait (with timeout) for datagram. The {@code datagramExepcted} * parameter indicates whether a datagram is expected, and if * {@true} then {@code id} is the identifier in the payload. */ static void receiveDatagram(DatagramChannel dc, String name, boolean datagramExepcted, int id) throws IOException { System.out.println("Checking if received by " + name); Selector sel = Selector.open(); dc.configureBlocking(false); dc.register(sel, SelectionKey.OP_READ); ByteBuffer buf = ByteBuffer.allocateDirect(100); try { for (;;) { System.out.println("Waiting to receive message"); sel.select(5*1000); SocketAddress sa = dc.receive(buf); // no datagram received if (sa == null) { if (datagramExepcted) { throw new RuntimeException("Expected message not recieved"); } System.out.println("No message received (correct)"); return; } // datagram received InetAddress sender = ((InetSocketAddress)sa).getAddress(); buf.flip(); byte[] bytes = new byte[buf.remaining()]; buf.get(bytes); int receivedId = Integer.parseInt(new String(bytes)); System.out.format("Received message from %s (id=0x%x)\n", sender, receivedId); if (!datagramExepcted) { if (receivedId == id) throw new RuntimeException("Message not expected"); System.out.println("Message ignored (has wrong id)"); } else { if (receivedId == id) { System.out.println("Message expected"); return; } System.out.println("Message ignored (wrong sender)"); } sel.selectedKeys().clear(); buf.rewind(); } } finally { sel.close(); } } static void test(ProtocolFamily family, NetworkInterface nif, InetAddress group1, InetAddress group2) throws IOException { System.out.format("%nTest family=%s%n", family.name()); DatagramChannel dc1 = (family == UNSPEC) ? DatagramChannel.open() : DatagramChannel.open(family); DatagramChannel dc2 = (family == UNSPEC) ? DatagramChannel.open() : DatagramChannel.open(family); try { dc1.setOption(StandardSocketOptions.SO_REUSEADDR, true); dc2.setOption(StandardSocketOptions.SO_REUSEADDR, true); dc1.bind(new InetSocketAddress(0)); int port = dc1.socket().getLocalPort(); dc2.bind(new InetSocketAddress(port)); System.out.format("dc1 joining [%s]:%d @ %s\n", group1.getHostAddress(), port, nif.getName()); System.out.format("dc2 joining [%s]:%d @ %s\n", group2.getHostAddress(), port, nif.getName()); dc1.join(group1, nif); dc2.join(group2, nif); int id = sendDatagram(nif, group1, port); receiveDatagram(dc1, "dc1", true, id); receiveDatagram(dc2, "dc2", false, id); id = sendDatagram(nif, group2, port); receiveDatagram(dc1, "dc1", false, id); receiveDatagram(dc2, "dc2", true, id); } finally { dc1.close(); dc2.close(); } } public static void main(String[] args) throws IOException { String os = System.getProperty("os.name"); // Requires IP_MULTICAST_ALL on Linux (new since 2.6.31) so skip // on older kernels. Note that we skip on <= version 3 to keep the // parsing simple if (os.equals("Linux")) { String osversion = System.getProperty("os.version"); String[] vers = osversion.split("\\.", 0); int major = Integer.parseInt(vers[0]); if (major < 3) { System.out.format("Kernel version is %s, test skipped%n", osversion); return; } } // get local network configuration to use NetworkConfiguration config = NetworkConfiguration.probe(); // multicast groups used for the test InetAddress ip4Group1 = InetAddress.getByName("225.4.5.6"); InetAddress ip4Group2 = InetAddress.getByName("225.4.6.6"); for (NetworkInterface nif: config.ip4Interfaces()) { InetAddress source = config.ip4Addresses(nif).iterator().next(); test(INET, nif, ip4Group1, ip4Group2); // Solaris and Linux allow IPv6 sockets join IPv4 multicast groups if (os.equals("SunOS") || os.equals("Linux")) test(UNSPEC, nif, ip4Group1, ip4Group2); } } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static com.facebook.buck.util.BuckConstant.BIN_DIR; import static com.facebook.buck.util.BuckConstant.GEN_DIR; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import com.facebook.buck.dalvik.ZipSplitter; import com.facebook.buck.java.JavaLibraryRule; import com.facebook.buck.java.Keystore; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.model.BuildTargetPattern; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.DependencyGraph; import com.facebook.buck.rules.FakeAbstractBuildRuleBuilderParams; import com.facebook.buck.rules.FileSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.shell.BashStep; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.MkdirAndSymlinkFileStep; import com.facebook.buck.testutil.MoreAsserts; import com.facebook.buck.testutil.RuleMap; import com.facebook.buck.util.DirectoryTraversal; import com.facebook.buck.util.DirectoryTraverser; import com.facebook.buck.util.Paths; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Predicates; import com.google.common.base.Strings; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import org.junit.Test; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Set; public class AndroidBinaryRuleTest { /** * Directory where native libraries are expected to put their output. */ final String nativeOutDir = "buck-out/bin/__native_zips__fbandroid_with_dash_debug_fbsign__/"; @Test public void testAndroidBinaryNoDx() { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // Two android_library deps, neither with an assets directory. JavaLibraryRule libraryOne = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryOne", ruleResolver, null, /* resDirectory */ null, /* assetDirectory */ null /* nativeLibsDirectory */); JavaLibraryRule libraryTwo = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryTwo", ruleResolver, null, /* resDirectory */ null, /* assetDirectory */ null /* nativeLibsDirectory */); // One android_binary rule that depends on the two android_library rules. BuildTarget binaryBuildTarget = BuildTargetFactory.newInstance( "//java/src/com/facebook/base:apk"); AndroidBinaryRule androidBinary = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(binaryBuildTarget) .addClasspathDep(libraryOne.getBuildTarget()) .addClasspathDep(libraryTwo.getBuildTarget()) .addBuildRuleToExcludeFromDex( BuildTargetFactory.newInstance("//java/src/com/facebook/base:libraryTwo")) .setManifest("java/src/com/facebook/base/AndroidManifest.xml") .setTarget("Google Inc.:Google APIs:16") .setKeystore(addKeystoreRule(ruleResolver)) .setPackageType("debug")); DependencyGraph graph = RuleMap.createGraphFromBuildRules(ruleResolver); AndroidTransitiveDependencies transitiveDependencies = androidBinary.findTransitiveDependencies(graph); AndroidDexTransitiveDependencies dexTransitiveDependencies = androidBinary.findDexTransitiveDependencies(graph); ImmutableList.Builder<Step> commands = ImmutableList.builder(); BuildContext context = createMock(BuildContext.class); replay(context); androidBinary.addProguardCommands( context, dexTransitiveDependencies.classpathEntriesToDex, transitiveDependencies.proguardConfigs, commands, ImmutableSet.<String>of()); verify(context); MakeCleanDirectoryStep expectedClean = new MakeCleanDirectoryStep("buck-out/gen/java/src/com/facebook/base/.proguard/apk"); GenProGuardConfigStep expectedGenProguard = new GenProGuardConfigStep( "buck-out/bin/java/src/com/facebook/base/__manifest_apk__/AndroidManifest.xml", ImmutableSet.<String>of(), "buck-out/gen/java/src/com/facebook/base/.proguard/apk/proguard.txt"); ProGuardObfuscateStep expectedObfuscation = new ProGuardObfuscateStep( "buck-out/gen/java/src/com/facebook/base/.proguard/apk/proguard.txt", ImmutableSet.<String>of(), false, ImmutableMap.of( "buck-out/gen/java/src/com/facebook/base/lib__libraryOne__output/libraryOne.jar", "buck-out/gen/java/src/com/facebook/base/.proguard/apk/buck-out/gen/java/src/com/" + "facebook/base/lib__libraryOne__output/libraryOne-obfuscated.jar"), ImmutableSet.of("buck-out/gen/java/src/com/facebook/base/lib__libraryTwo__output/libraryTwo.jar"), "buck-out/gen/java/src/com/facebook/base/.proguard/apk"); assertEquals( ImmutableList.of(expectedClean, expectedGenProguard, expectedObfuscation), commands.build()); } /** * Tests an android_binary with zero dependent android_library rules that contains an assets * directory. */ @Test public void testCreateAllAssetsDirectoryWithZeroAssetsDirectories() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // Two android_library deps, neither with an assets directory. JavaLibraryRule libraryOne = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryOne", ruleResolver, null, /* resDirectory */ null, /* assetDirectory */ null /* nativeLibsDirectory */); JavaLibraryRule libraryTwo = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryTwo", ruleResolver, null, /* resDirectory */ null, /* assetDirectory */ null /* nativeLibsDirectory */); // One android_binary rule that depends on the two android_library rules. BuildTarget binaryBuildTarget = BuildTargetFactory.newInstance( "//java/src/com/facebook/base:apk"); AndroidBinaryRule androidBinary = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(binaryBuildTarget) .addClasspathDep(libraryOne.getBuildTarget()) .addClasspathDep(libraryTwo.getBuildTarget()) .setManifest("java/src/com/facebook/base/AndroidManifest.xml") .setTarget("Google Inc.:Google APIs:16") .setKeystore(addKeystoreRule(ruleResolver)) .setPackageType("debug")); // Build up the parameters needed to invoke createAllAssetsDirectory(). Set<String> assetsDirectories = ImmutableSet.of(); ImmutableList.Builder<Step> commands = ImmutableList.builder(); DirectoryTraverser traverser = new DirectoryTraverser() { @Override public void traverse(DirectoryTraversal traversal) { throw new RuntimeException("Unexpected: no assets directories to traverse!"); } }; // Invoke createAllAssetsDirectory(), the method under test. Optional<String> allAssetsDirectory = androidBinary.createAllAssetsDirectory( assetsDirectories, ImmutableMap.<String, File>of(), commands, traverser); // Verify that no assets/ directory is used. assertFalse("There should not be an assets/ directory to pass to aapt.", allAssetsDirectory.isPresent()); assertTrue("There should not be any commands to build up an assets/ directory.", commands.build().isEmpty()); } /** * Tests an android_binary with one dependent android_library rule that contains an assets * directory. */ @Test public void testCreateAllAssetsDirectoryWithOneAssetsDirectory() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // Two android_library deps, one of which has an assets directory. JavaLibraryRule libraryOne = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryOne", ruleResolver, null, /* resDirectory */ null, /* assetDirectory */ null /* nativeLibsDirectory */); JavaLibraryRule libraryTwo = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryTwo", ruleResolver, null, /* resDirectory */ "java/src/com/facebook/base/assets2", null /* nativeLibsDirectory */); AndroidResourceRule resourceOne = (AndroidResourceRule) ruleResolver .get(BuildTargetFactory.newInstance("//java/src/com/facebook/base:libraryTwo_resources")); // One android_binary rule that depends on the two android_library rules. BuildTarget binaryBuildTarget = BuildTargetFactory.newInstance( "//java/src/com/facebook/base:apk"); AndroidBinaryRule androidBinary = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(binaryBuildTarget) .addClasspathDep(libraryOne.getBuildTarget()) .addClasspathDep(libraryTwo.getBuildTarget()) .setManifest("java/src/com/facebook/base/AndroidManifest.xml") .setTarget("Google Inc.:Google APIs:16") .setKeystore(addKeystoreRule(ruleResolver)) .setPackageType("debug")); // Build up the parameters needed to invoke createAllAssetsDirectory(). Set<String> assetsDirectories = ImmutableSet.of(resourceOne.getAssets()); ImmutableList.Builder<Step> commands = ImmutableList.builder(); DirectoryTraverser traverser = new DirectoryTraverser() { @Override public void traverse(DirectoryTraversal traversal) throws IOException { String rootPath = Paths.normalizePathSeparator(traversal.getRoot().getPath()); if ("java/src/com/facebook/base/assets2".equals(rootPath)) { traversal.visit( new File("java/src/com/facebook/base/assets2", "fonts/Theinhardt-Medium.otf"), "fonts/Theinhardt-Medium.otf"); traversal.visit( new File("java/src/com/facebook/base/assets2", "fonts/Theinhardt-Regular.otf"), "fonts/Theinhardt-Regular.otf"); } else { throw new RuntimeException("Unexpected path: " + rootPath); } } }; // Invoke createAllAssetsDirectory(), the method under test. Optional<String> allAssetsDirectory = androidBinary.createAllAssetsDirectory( assetsDirectories, ImmutableMap.<String, File>of(), commands, traverser); // Verify that the existing assets/ directory will be passed to aapt. assertTrue(allAssetsDirectory.isPresent()); assertEquals( "Even though there is only one assets directory, the one in " + BIN_DIR + " should be used.", androidBinary.getPathToAllAssetsDirectory(), allAssetsDirectory.get()); } /** * Tests an android_binary with multiple dependent android_library rules, each with its own assets * directory. */ @Test public void testCreateAllAssetsDirectoryWithMultipleAssetsDirectories() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // Two android_library deps, each with an assets directory. JavaLibraryRule libraryOne = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryOne", ruleResolver, null, /* resDirectory */ "java/src/com/facebook/base/assets1", null /* nativeLibsDirectory */); JavaLibraryRule libraryTwo = createAndroidLibraryRule( "//java/src/com/facebook/base:libraryTwo", ruleResolver, null, /* resDirectory */ "java/src/com/facebook/base/assets2", null /* nativeLibsDirectory */); // One android_binary rule that depends on the two android_library rules. BuildTarget binaryBuildTarget = BuildTargetFactory.newInstance( "//java/src/com/facebook/base:apk"); AndroidBinaryRule androidBinary = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(binaryBuildTarget) .addClasspathDep(libraryOne.getBuildTarget()) .addClasspathDep(libraryTwo.getBuildTarget()) .setManifest("java/src/com/facebook/base/AndroidManifest.xml") .setTarget("Google Inc.:Google APIs:16") .setKeystore(addKeystoreRule(ruleResolver)) .setPackageType("debug")); AndroidResourceRule resourceOne = (AndroidResourceRule) ruleResolver.get( BuildTargetFactory.newInstance("//java/src/com/facebook/base:libraryOne_resources")); AndroidResourceRule resourceTwo = (AndroidResourceRule) ruleResolver.get( BuildTargetFactory.newInstance("//java/src/com/facebook/base:libraryTwo_resources")); // Build up the parameters needed to invoke createAllAssetsDirectory(). Set<String> assetsDirectories = ImmutableSet.of( resourceOne.getAssets(), resourceTwo.getAssets()); ImmutableList.Builder<Step> commands = ImmutableList.builder(); DirectoryTraverser traverser = new DirectoryTraverser() { @Override public void traverse(DirectoryTraversal traversal) throws IOException { String rootPath = Paths.normalizePathSeparator(traversal.getRoot().getPath()); if ("java/src/com/facebook/base/assets1".equals(rootPath)) { traversal.visit( new File("java/src/com/facebook/base/assets1", "guava-10.0.1-fork.dex.1.jar"), "guava-10.0.1-fork.dex.1.jar"); } else if ("java/src/com/facebook/base/assets2".equals(rootPath)) { traversal.visit( new File("java/src/com/facebook/base/assets2", "fonts/Theinhardt-Medium.otf"), "fonts/Theinhardt-Medium.otf"); traversal.visit( new File("java/src/com/facebook/base/assets2", "fonts/Theinhardt-Regular.otf"), "fonts/Theinhardt-Regular.otf"); } else { throw new RuntimeException("Unexpected path: " + rootPath); } } }; // Invoke createAllAssetsDirectory(), the method under test. Optional<String> allAssetsDirectory = androidBinary.createAllAssetsDirectory( assetsDirectories, ImmutableMap.<String, File>of(), commands, traverser); // Verify that an assets/ directory will be created and passed to aapt. assertTrue(allAssetsDirectory.isPresent()); assertEquals(BIN_DIR + "/java/src/com/facebook/base/__assets_apk__", allAssetsDirectory.get()); List<? extends Step> expectedCommands = ImmutableList.of( new MakeCleanDirectoryStep(BIN_DIR + "/java/src/com/facebook/base/__assets_apk__"), new MkdirAndSymlinkFileStep( "java/src/com/facebook/base/assets1/guava-10.0.1-fork.dex.1.jar", BIN_DIR + "/java/src/com/facebook/base/__assets_apk__/guava-10.0.1-fork.dex.1.jar"), new MkdirAndSymlinkFileStep( "java/src/com/facebook/base/assets2/fonts/Theinhardt-Medium.otf", BIN_DIR + "/java/src/com/facebook/base/__assets_apk__/fonts/Theinhardt-Medium.otf"), new MkdirAndSymlinkFileStep( "java/src/com/facebook/base/assets2/fonts/Theinhardt-Regular.otf", BIN_DIR + "/java/src/com/facebook/base/__assets_apk__/fonts/Theinhardt-Regular.otf")); MoreAsserts.assertListEquals(expectedCommands, commands.build()); } private JavaLibraryRule createAndroidLibraryRule(String buildTarget, BuildRuleResolver ruleResolver, String resDirectory, String assetDirectory, String nativeLibsDirectory) { BuildTarget libraryOnebuildTarget = BuildTargetFactory.newInstance(buildTarget); AndroidLibraryRule.Builder androidLibraryRuleBuilder = AndroidLibraryRule .newAndroidLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .addSrc(buildTarget.split(":")[1] + ".java") .setBuildTarget(libraryOnebuildTarget); if (!Strings.isNullOrEmpty(resDirectory) || !Strings.isNullOrEmpty(assetDirectory)) { BuildTarget resourceOnebuildTarget = BuildTargetFactory.newInstance(buildTarget); AndroidResourceRule androidResourceRule = ruleResolver.buildAndAddToIndex( AndroidResourceRule.newAndroidResourceRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setAssetsDirectory(assetDirectory) .setRes(resDirectory) .setBuildTarget(resourceOnebuildTarget)); androidLibraryRuleBuilder.addDep(androidResourceRule.getBuildTarget()); } if (!Strings.isNullOrEmpty(resDirectory) || !Strings.isNullOrEmpty(assetDirectory)) { BuildTarget resourceOnebuildTarget = BuildTargetFactory.newInstance(buildTarget + "_resources"); AndroidResourceRule androidResourceRule = ruleResolver.buildAndAddToIndex( AndroidResourceRule.newAndroidResourceRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setAssetsDirectory(assetDirectory) .setRes(resDirectory) .setBuildTarget(resourceOnebuildTarget)); androidLibraryRuleBuilder.addDep(androidResourceRule.getBuildTarget()); } if (!Strings.isNullOrEmpty(nativeLibsDirectory)) { BuildTarget nativeLibOnebuildTarget = BuildTargetFactory.newInstance(buildTarget + "_native_libs"); BuildRule nativeLibsRule = ruleResolver.buildAndAddToIndex( PrebuiltNativeLibrary.newPrebuiltNativeLibrary( new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(nativeLibOnebuildTarget) .setNativeLibsDirectory(nativeLibsDirectory)); androidLibraryRuleBuilder.addDep(nativeLibsRule.getBuildTarget()); } JavaLibraryRule androidLibraryRule = ruleResolver.buildAndAddToIndex( androidLibraryRuleBuilder); return androidLibraryRule; } @Test public void testGetInputsToCompareToOutput() { BuildRuleResolver ruleResolver = new BuildRuleResolver(); AndroidBinaryRule.Builder androidBinaryRuleBuilder = AndroidBinaryRule .newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/src/com/facebook:app")) .setManifest("java/src/com/facebook/AndroidManifest.xml") .setTarget("Google Inc.:Google APIs:16") .setKeystore(addKeystoreRule(ruleResolver)); BuildContext context = createMock(BuildContext.class); replay(context); MoreAsserts.assertListEquals( "getInputsToCompareToOutput() should include manifest.", ImmutableList.of("java/src/com/facebook/AndroidManifest.xml"), ruleResolver.buildAndAddToIndex(androidBinaryRuleBuilder) .getInputsToCompareToOutput()); SourcePath proguardConfig = new FileSourcePath("java/src/com/facebook/proguard.cfg"); androidBinaryRuleBuilder.setProguardConfig(Optional.of(proguardConfig)); MoreAsserts.assertListEquals( "getInputsToCompareToOutput() should include Proguard config, if present.", ImmutableList.of( "java/src/com/facebook/AndroidManifest.xml", "java/src/com/facebook/proguard.cfg"), ruleResolver.buildAndAddToIndex(androidBinaryRuleBuilder) .getInputsToCompareToOutput()); verify(context); } @Test public void testGetUnsignedApkPath() { BuildRuleResolver ruleResolver = new BuildRuleResolver(); AndroidBinaryRule ruleInRootDirectory = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//:fb4a")) .setManifest("AndroidManifest.xml") .setKeystore(addKeystoreRule(ruleResolver)) .setTarget("Google Inc.:Google APIs:16")); assertEquals(GEN_DIR + "/fb4a.apk", ruleInRootDirectory.getApkPath()); AndroidBinaryRule ruleInNonRootDirectory = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example:fb4a")) .setManifest("AndroidManifest.xml") .setKeystore(addKeystoreRule(ruleResolver)) .setTarget("Google Inc.:Google APIs:16")); assertEquals(GEN_DIR + "/java/com/example/fb4a.apk", ruleInNonRootDirectory.getApkPath()); } @Test public void testGetProguardOutputFromInputClasspath() { BuildRuleResolver ruleResolver = new BuildRuleResolver(); AndroidBinaryRule rule = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//:fbandroid_with_dash_debug_fbsign")) .setManifest("AndroidManifest.xml") .setKeystore(addKeystoreRule(ruleResolver)) .setTarget("Google Inc.:Google APIs:16")); String proguardDir = rule.getProguardOutputFromInputClasspath( BIN_DIR + "/first-party/orca/lib-base/lib__lib-base__classes"); assertEquals(GEN_DIR + "/.proguard/fbandroid_with_dash_debug_fbsign/" + BIN_DIR + "/first-party/orca/lib-base/lib__lib-base__classes-obfuscated.jar", proguardDir); } private void assertCommandsInOrder(List<Step> steps, List<Class<?>> expectedCommands) { Iterable<Class<?>> filteredObservedCommands = FluentIterable .from(steps) .transform(new Function<Step, Class<?>>() { @Override public Class<?> apply(Step command) { return command.getClass(); } }) .filter(Predicates.in(Sets.newHashSet(expectedCommands))); MoreAsserts.assertIterablesEquals(expectedCommands, filteredObservedCommands); } @Test public void testDexingCommand() { BuildRuleResolver ruleResolver = new BuildRuleResolver(); AndroidBinaryRule splitDexRule = ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//:fbandroid_with_dash_debug_fbsign")) .setManifest("AndroidManifest.xml") .setKeystore(addKeystoreRule(ruleResolver)) .setTarget("Google Inc.:Google APIs:16") .setDexSplitMode(new DexSplitMode( /* shouldSplitDex */ true, ZipSplitter.DexSplitStrategy.MAXIMIZE_PRIMARY_DEX_SIZE, DexStore.JAR, /* useLinearAllocSplitDex */ false))); Set<String> classpath = Sets.newHashSet(); ImmutableSet.Builder<String> secondaryDexDirectories = ImmutableSet.builder(); ImmutableList.Builder<Step> commandsBuilder = ImmutableList.builder(); String primaryDexPath = BIN_DIR + "/.dex/classes.dex"; splitDexRule.addDexingCommands(classpath, secondaryDexDirectories, commandsBuilder, primaryDexPath, /* sourcePathResolver */ new Function<SourcePath, Path>() { @Override public Path apply(SourcePath input) { throw new UnsupportedOperationException("This resolver should not be used."); } }); assertEquals("Expected 2 new assets paths (one for metadata.txt and the other for the " + "secondary zips)", 2, secondaryDexDirectories.build().size()); List<Step> steps = commandsBuilder.build(); assertCommandsInOrder(steps, ImmutableList.<Class<?>>of(SplitZipStep.class, SmartDexingStep.class)); } @Test public void testCopyNativeLibraryCommandWithoutCpuFilter() { createAndroidBinaryRuleAndTestCopyNativeLibraryCommand( ImmutableSet.<String>of() /* cpuFilters */, "/path/to/source", "/path/to/destination/", ImmutableList.of( "bash -c cp -R /path/to/source/* /path/to/destination/")); } @Test public void testCopyNativeLibraryCommand() { createAndroidBinaryRuleAndTestCopyNativeLibraryCommand( ImmutableSet.of("armv7"), "/path/to/source", "/path/to/destination/", ImmutableList.of( "bash -c " + "[ -d /path/to/source/armeabi-v7a ] && " + "cp -R /path/to/source/armeabi-v7a /path/to/destination/ || " + "exit 0")); } @Test public void testCopyNativeLibraryCommandWithMultipleCpuFilters() { createAndroidBinaryRuleAndTestCopyNativeLibraryCommand( ImmutableSet.of("arm", "x86"), "/path/to/source", "/path/to/destination/", ImmutableList.of( "bash -c [ -d /path/to/source/armeabi ] && " + "cp -R /path/to/source/armeabi /path/to/destination/ || exit 0", "bash -c [ -d /path/to/source/x86 ] && " + "cp -R /path/to/source/x86 /path/to/destination/ || exit 0")); } private void createAndroidBinaryRuleAndTestCopyNativeLibraryCommand( ImmutableSet<String> cpuFilters, String sourceDir, String destinationDir, ImmutableList<String> expectedShellCommands) { BuildRuleResolver ruleResolver = new BuildRuleResolver(); AndroidBinaryRule.Builder builder = AndroidBinaryRule.newAndroidBinaryRuleBuilder( new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//:fbandroid_with_dash_debug_fbsign")) .setManifest("AndroidManifest.xml") .setKeystore(addKeystoreRule(ruleResolver)) .setTarget("Google Inc:Google APIs:16"); for (String filter: cpuFilters) { builder.addCpuFilter(filter); } ImmutableList.Builder<Step> commands = ImmutableList.builder(); AndroidBinaryRule buildRule = ruleResolver.buildAndAddToIndex(builder); buildRule.copyNativeLibrary(sourceDir, destinationDir, commands); ImmutableList<Step> steps = commands.build(); assertEquals(steps.size(), expectedShellCommands.size()); ExecutionContext context = createMock(ExecutionContext.class); replay(context); for (int i = 0; i < steps.size(); ++i) { Iterable<String> observedArgs = ((BashStep)steps.get(i)).getShellCommand(context); String observedCommand = Joiner.on(' ').join(observedArgs); assertEquals(expectedShellCommands.get(i), observedCommand); } verify(context); } private BuildTarget addKeystoreRule(BuildRuleResolver ruleResolver) { BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//keystore:debug"); ruleResolver.buildAndAddToIndex( Keystore.newKeystoreBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(keystoreTarget) .setStore("keystore/debug.keystore") .setProperties("keystore/debug.keystore.properties") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); return keystoreTarget; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.stellar.common; import com.google.common.collect.ImmutableMap; import org.apache.commons.lang3.tuple.Pair; import org.apache.metron.stellar.dsl.ParseException; import org.apache.metron.stellar.dsl.Token; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.HashMap; import java.util.Map; import static org.apache.metron.stellar.common.utils.StellarProcessorUtils.run; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @SuppressWarnings("unchecked") public class StellarArithmeticTest { @Rule public final ExpectedException exception = ExpectedException.none(); @Test public void addingLongsShouldYieldLong() throws Exception { final long timestamp = 1452013350000L; String query = "TO_EPOCH_TIMESTAMP('2016-01-05 17:02:30', 'yyyy-MM-dd HH:mm:ss', 'UTC') + 2"; assertEquals(timestamp + 2, run(query, new HashMap<>())); } @Test public void addingIntegersShouldYieldAnInteger() throws Exception { String query = "1 + 2"; assertEquals(3, run(query, new HashMap<>())); } @Test public void addingDoublesShouldYieldADouble() throws Exception { String query = "1.0 + 2.0"; assertEquals(3.0, run(query, new HashMap<>())); } @Test public void addingDoubleAndIntegerWhereSubjectIsDoubleShouldYieldADouble() throws Exception { String query = "2.1 + 1"; assertEquals(3.1, run(query, new HashMap<>())); } @Test public void addingDoubleAndIntegerWhereSubjectIsIntegerShouldYieldADouble() throws Exception { String query = "1 + 2.1"; assertEquals(3.1, run(query, new HashMap<>())); } @Test public void testArithmetic() { assertEquals(3, run("1 + 2", new HashMap<>())); assertEquals(3.2, run("1.2 + 2", new HashMap<>())); assertEquals(1.2e-3 + 2, run("1.2e-3 + 2", new HashMap<>())); assertEquals(1.2f + 3.7, run("1.2f + 3.7", new HashMap<>())); assertEquals(12L * (1.2f + 7), run("12L*(1.2f + 7)", new HashMap<>())); assertEquals(12.2f * (1.2f + 7L), run("TO_FLOAT(12.2) * (1.2f + 7L)", new HashMap<>())); } @Test public void testNumericOperations() { { String query = "TO_INTEGER(1 + 2*2 + 3 - 4 - 0.5)"; assertEquals(3, (Integer) run(query, new HashMap<>()), 1e-6); } { String query = "1 + 2*2 + 3 - 4 - 0.5"; assertEquals(3.5, (Double) run(query, new HashMap<>()), 1e-6); } { String query = "2*one*(1 + 2*2 + 3 - 4)"; assertEquals(8, run(query, ImmutableMap.of("one", 1))); } { String query = "2*(1 + 2 + 3 - 4)"; assertEquals(4, (Integer) run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.000001)), 1e-6); } { String query = "1 + 2 + 3 - 4 - 2"; assertEquals(0, (Integer) run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.000001)), 1e-6); } { String query = "1 + 2 + 3 + 4"; assertEquals(10, (Integer) run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.000001)), 1e-6); } { String query = "(one + 2)*3"; assertEquals(9, (Integer) run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.000001)), 1e-6); } { String query = "TO_INTEGER((one + 2)*3.5)"; assertEquals(10, (Integer) run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.000001)), 1e-6); } { String query = "1 + 2*3"; assertEquals(7, (Integer) run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.000001)), 1e-6); } { String query = "TO_LONG(foo)"; Assert.assertNull(run(query, ImmutableMap.of("foo", "not a number"))); } { String query = "TO_LONG(foo)"; assertEquals(232321L, run(query, ImmutableMap.of("foo", "00232321"))); } { String query = "TO_LONG(foo)"; assertEquals(Long.MAX_VALUE, run(query, ImmutableMap.of("foo", Long.toString(Long.MAX_VALUE)))); } } @Test public void verifyExpectedReturnTypes() throws Exception { Token<Integer> integer = mock(Token.class); when(integer.getValue()).thenReturn(1); Token<Long> lng = mock(Token.class); when(lng.getValue()).thenReturn(1L); Token<Double> dbl = mock(Token.class); when(dbl.getValue()).thenReturn(1.0D); Token<Float> flt = mock(Token.class); when(flt.getValue()).thenReturn(1.0F); Map<Pair<String, String>, Class<? extends Number>> expectedReturnTypeMappings = new HashMap<Pair<String, String>, Class<? extends Number>>() {{ put(Pair.of("TO_FLOAT(3.0)", "TO_LONG(1)"), Float.class); put(Pair.of("TO_FLOAT(3)", "3.0"), Double.class); put(Pair.of("TO_FLOAT(3)", "TO_FLOAT(3)"), Float.class); put(Pair.of("TO_FLOAT(3)", "3"), Float.class); put(Pair.of("TO_LONG(1)", "TO_LONG(1)"), Long.class); put(Pair.of("TO_LONG(1)", "3.0"), Double.class); put(Pair.of("TO_LONG(1)", "TO_FLOAT(3)"), Float.class); put(Pair.of("TO_LONG(1)", "3"), Long.class); put(Pair.of("3.0", "TO_LONG(1)"), Double.class); put(Pair.of("3.0", "3.0"), Double.class); put(Pair.of("3.0", "TO_FLOAT(3)"), Double.class); put(Pair.of("3.0", "3"), Double.class); put(Pair.of("3", "TO_LONG(1)"), Long.class); put(Pair.of("3", "3.0"), Double.class); put(Pair.of("3", "TO_FLOAT(3)"), Float.class); put(Pair.of("3", "3"), Integer.class); }}; expectedReturnTypeMappings.forEach((pair, expectedClass) -> { assertTrue(run(pair.getLeft() + " * " + pair.getRight(), ImmutableMap.of()).getClass() == expectedClass); assertTrue(run(pair.getLeft() + " + " + pair.getRight(), ImmutableMap.of()).getClass() == expectedClass); assertTrue(run(pair.getLeft() + " - " + pair.getRight(), ImmutableMap.of()).getClass() == expectedClass); assertTrue(run(pair.getLeft() + " / " + pair.getRight(), ImmutableMap.of()).getClass() == expectedClass); }); } @Test public void happyPathFloatArithmetic() throws Exception { Object run = run(".0f * 1", ImmutableMap.of()); assertEquals(.0f * 1, run); assertEquals(Float.class, run.getClass()); Object run1 = run("0.f / 1F", ImmutableMap.of()); assertEquals(0.f / 1F, run1); assertEquals(Float.class, run1.getClass()); Object run2 = run(".0F + 1.0f", ImmutableMap.of()); assertEquals(.0F + 1.0f, run2); assertEquals(Float.class, run2.getClass()); Object run3 = run("0.0f - 0.1f", ImmutableMap.of()); assertEquals(0.0f - 0.1f, run3); assertEquals(Float.class, run2.getClass()); } @SuppressWarnings("PointlessArithmeticExpression") @Test public void happyPathLongArithmetic() throws Exception { assertEquals(0L * 1L, run("0L * 1L", ImmutableMap.of())); assertEquals(0l / 1L, run("0l / 1L", ImmutableMap.of())); assertEquals(1L - 1l, run("1L - 1l", ImmutableMap.of())); assertEquals(2147483648L + 1L, run("2147483648L + 1L", ImmutableMap.of())); } @SuppressWarnings("NumericOverflow") @Test public void checkInterestingCases() throws Exception { assertEquals((((((1L) + .5d)))) * 6.f, run("(((((1L) + .5d)))) * 6.f", ImmutableMap.of())); assertEquals((((((1L) + .5d)))) * 6.f / 0.f, run("(((((1L) + .5d)))) * 6.f / 0.f", ImmutableMap.of())); assertEquals(Double.class, run("(((((1L) + .5d)))) * 6.f / 0.f", ImmutableMap.of()).getClass()); } @Test public void makeSureStellarProperlyEvaluatesLiteralsToExpectedTypes() throws Exception { { assertEquals(Float.class, run("6.f", ImmutableMap.of()).getClass()); assertEquals(Float.class, run(".0f", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("6.0F", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("6f", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("6e-6f", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("6e+6f", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("6e6f", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("TO_FLOAT(1231)", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("TO_FLOAT(12.31)", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("TO_FLOAT(12.31f)", ImmutableMap.of()).getClass()); assertEquals(Float.class, run("TO_FLOAT(12L)", ImmutableMap.of()).getClass()); } { assertEquals(Double.class, run("6.d", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("6.D", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("6.0d", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("6D", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("6e5D", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("6e-5D", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("6e+5D", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("TO_DOUBLE(1231)", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("TO_DOUBLE(12.31)", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("TO_DOUBLE(12.31f)", ImmutableMap.of()).getClass()); assertEquals(Double.class, run("TO_DOUBLE(12L)", ImmutableMap.of()).getClass()); } { assertEquals(Integer.class, run("6", ImmutableMap.of()).getClass()); assertEquals(Integer.class, run("60000000", ImmutableMap.of()).getClass()); assertEquals(Integer.class, run("-0", ImmutableMap.of()).getClass()); assertEquals(Integer.class, run("-60000000", ImmutableMap.of()).getClass()); assertEquals(Integer.class, run("TO_INTEGER(1231)", ImmutableMap.of()).getClass()); assertEquals(Integer.class, run("TO_INTEGER(12.31)", ImmutableMap.of()).getClass()); assertEquals(Integer.class, run("TO_INTEGER(12.31f)", ImmutableMap.of()).getClass()); assertEquals(Integer.class, run("TO_INTEGER(12L)", ImmutableMap.of()).getClass()); } { assertEquals(Long.class, run("12345678910l", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("0l", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("-0l", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("-60000000L", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("-60000000L", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("TO_LONG(1231)", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("TO_LONG(12.31)", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("TO_LONG(12.31f)", ImmutableMap.of()).getClass()); assertEquals(Long.class, run("TO_LONG(12L)", ImmutableMap.of()).getClass()); } } @Test public void parseExceptionMultipleLeadingZerosOnInteger() throws Exception { exception.expect(ParseException.class); run("000000", ImmutableMap.of()); } @Test public void parseExceptionMultipleLeadingZerosOnLong() throws Exception { exception.expect(ParseException.class); run("000000l", ImmutableMap.of()); } @Test public void parseExceptionMultipleLeadingZerosOnDouble() throws Exception { exception.expect(ParseException.class); run("000000d", ImmutableMap.of()); } @Test public void parseExceptionMultipleLeadingZerosOnFloat() throws Exception { exception.expect(ParseException.class); run("000000f", ImmutableMap.of()); } @Test public void parseExceptionMultipleLeadingNegativeSignsFloat() throws Exception { exception.expect(ParseException.class); run("--000000f", ImmutableMap.of()); } @Test public void parseExceptionMultipleLeadingNegativeSignsDouble() throws Exception { exception.expect(ParseException.class); run("--000000D", ImmutableMap.of()); } @Test public void parseExceptionMultipleLeadingNegativeSignsLong() throws Exception { exception.expect(ParseException.class); run("--000000L", ImmutableMap.of()); } @Test(expected = ParseException.class) public void unableToDivideByZeroWithIntegers() throws Exception { run("0/0", ImmutableMap.of()); } @Test(expected = ParseException.class) public void unableToDivideByZeroWithLongs() throws Exception { run("0L/0L", ImmutableMap.of()); } @Test public void ableToDivideByZero() throws Exception { assertEquals(0F/0F, run("0F/0F", ImmutableMap.of())); assertEquals(0D/0D, run("0D/0D", ImmutableMap.of())); assertEquals(0D/0F, run("0D/0F", ImmutableMap.of())); assertEquals(0F/0D, run("0F/0D", ImmutableMap.of())); assertEquals(0F/0, run("0F/0", ImmutableMap.of())); assertEquals(0D/0, run("0D/0", ImmutableMap.of())); assertEquals(0/0D, run("0/0D", ImmutableMap.of())); assertEquals(0/0F, run("0/0F", ImmutableMap.of())); } }
package org.codehaus.mojo.build; /** * The MIT License * * Copyright (c) 2015 Learning Commons, University of Calgary * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and * associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, * sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT * NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.text.MessageFormat; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import org.apache.maven.execution.MavenSession; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.apache.maven.scm.ScmException; import org.apache.maven.scm.ScmFile; import org.apache.maven.scm.ScmFileSet; import org.apache.maven.scm.ScmResult; import org.apache.maven.scm.command.info.InfoItem; import org.apache.maven.scm.command.info.InfoScmResult; import org.apache.maven.scm.command.status.StatusScmResult; import org.apache.maven.scm.command.update.UpdateScmResult; import org.apache.maven.scm.command.update.UpdateScmResultWithRevision; import org.apache.maven.scm.log.ScmLogDispatcher; import org.apache.maven.scm.log.ScmLogger; import org.apache.maven.scm.manager.ScmManager; import org.apache.maven.scm.provider.ScmProvider; import org.apache.maven.scm.provider.git.gitexe.command.branch.GitBranchCommand; import org.apache.maven.scm.provider.git.repository.GitScmProviderRepository; import org.apache.maven.scm.provider.hg.HgScmProvider; import org.apache.maven.scm.provider.hg.HgUtils; import org.apache.maven.scm.repository.ScmRepository; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.StringUtils; import static java.lang.Boolean.parseBoolean; import static java.lang.Integer.parseInt; /** * This mojo is designed to give you a build number. So when you might make 100 builds of version 1.0-SNAPSHOT, you can * differentiate between them all. * <p> * The build number is based on the revision number retrieved from SCM. It is known to work with Subversion, GIT, and * Mercurial. * <p> * This mojo can also check to make sure that you have checked everything into SCM, before issuing the build number. * That behaviour can be suppressed, and then the latest local build number is used. * <p> * Build numbers are not automatically reflected in your artifact's filename, but can be added to the metadata. You can * access the build number in your pom with ${buildNumber}. You can also access ${timestamp} and the SCM branch of the * build (if applicable) in ${SCMBranch} * <p> * Note that there are several <code><strong>doFoo</strong></code> parameters. These parameters (doCheck, doUpdate, etc) * are the first thing evaluated. If there is no matching expression, we get the default-value. If there is (ie * <code>-Dmaven.buildNumber.doUpdate=false</code>), we get that value. So if the XML contains * <tt>&lt;doCheck&gt;true&lt;/doCheck&gt;</tt>, then normally that's the final value of the param in question. However, * this mojo reverses that behaviour, such that the command line parameters get the last say. * * @author <a href="mailto:woodj@ucalgary.ca">Julian Wood</a> * @version $Id$ */ @Mojo( name = "create", defaultPhase = LifecyclePhase.INITIALIZE, requiresProject = true, threadSafe = true ) public class CreateMojo extends AbstractScmMojo { private static final String DEFAULT_BRANCH_NAME = "UNKNOWN_BRANCH"; /** * You can rename the buildNumber property name to another property name if desired. * * @since 1.0-beta-1 */ @Parameter( property = "maven.buildNumber.buildNumberPropertyName", defaultValue = "buildNumber" ) private String buildNumberPropertyName; /** * You can rename the timestamp property name to another property name if desired. * * @since 1.0-beta-1 */ @Parameter( property = "maven.buildNumber.timestampPropertyName", defaultValue = "timestamp" ) private String timestampPropertyName; /** * If this is made true, we check for modified files, and if there are any, we fail the build. Note that this used * to be inverted (skipCheck), but needed to be changed to allow releases to work. This corresponds to 'svn status'. * * @since 1.0-beta-1 */ @Parameter( property = "maven.buildNumber.doCheck", defaultValue = "false" ) private boolean doCheck; /** * If this is made true, then the revision will be updated to the latest in the repo, otherwise it will remain what * it is locally. Note that this used to be inverted (skipUpdate), but needed to be changed to allow releases to * work. This corresponds to 'svn update'. * * @since 1.0-beta-1 */ @Parameter( property = "maven.buildNumber.doUpdate", defaultValue = "false" ) private boolean doUpdate; /** * Specify a message as specified by java.text.MessageFormat. This triggers "items" configuration to be read * * @since 1.0-beta-1 */ @Parameter( property = "maven.buildNumber.format" ) private String format; /** * Properties file to be created when "format" is not null and item has "buildNumber". See Usage for details * * @since 1.0-beta-2 */ @Parameter( defaultValue = "${basedir}/buildNumber.properties" ) private File buildNumberPropertiesFileLocation; /** * Specify the corresponding items for the format message, as specified by java.text.MessageFormat. Special item * values are "scmVersion", "timestamp" and "buildNumber[digits]", where [digits] are optional digits added to the * end of the number to select a property. * * @since 1.0-beta-1 */ @Parameter private List<?> items; /** * The locale used for date and time formatting. The locale name should be in the format defined in * {@link Locale#toString()}. The default locale is the platform default returned by {@link Locale#getDefault()}. * * @since 1.0-beta-2 */ @Parameter( property = "maven.buildNumber.locale" ) private String locale; /** * whether to retrieve the revision for the last commit, or the last revision of the repository. * * @since 1.0-beta-2 */ @Parameter( property = "maven.buildNumber.useLastCommittedRevision", defaultValue = "false" ) private boolean useLastCommittedRevision; /** * Apply this java.text.MessageFormat to the timestamp only (as opposed to the <code>format</code> parameter). * * @since 1.0-beta-2 */ @Parameter( property = "maven.buildNumber.timestampFormat" ) private String timestampFormat; /** * Selects alternative SCM provider implementations. Each map key denotes the original provider type as given in the * SCM URL like "cvs" or "svn", the map value specifies the provider type of the desired implementation to use * instead. In other words, this map configures a substitution mapping for SCM providers. * * @since 1.0-beta-3 */ @Parameter private Map<String, String> providerImplementations; /** * If set to true, will get the scm revision once for all modules of a multi-module project instead of fetching once * for each module. * * @since 1.0-beta-3 */ @Parameter( property = "maven.buildNumber.getRevisionOnlyOnce", defaultValue = "false" ) private boolean getRevisionOnlyOnce; /** * You can rename the buildScmBranch property name to another property name if desired. * * @since 1.0-beta-4 */ @Parameter( property = "maven.buildNumber.scmBranchPropertyName", defaultValue = "scmBranch" ) private String scmBranchPropertyName; // ////////////////////////////////////// internal maven components /////////////////////////////////// /** * Contains the full list of projects in the reactor. * * @since 1.0-beta-3 */ @Parameter( defaultValue = "${reactorProjects}", readonly = true, required = true ) private List<MavenProject> reactorProjects; @Parameter( defaultValue = "${session}", readonly = true, required = true ) private MavenSession session; // ////////////////////////////////////// internal variables /////////////////////////////////// private ScmLogDispatcher logger; private String revision; private boolean useScm; public void execute() throws MojoExecutionException, MojoFailureException { if ( skip ) { getLog().info( "Skipping execution." ); return; } if ( providerImplementations != null ) { for ( Entry<String, String> entry : providerImplementations.entrySet() ) { String providerType = entry.getKey(); String providerImplementation = entry.getValue(); getLog().info( "Change the default '" + providerType + "' provider implementation to '" + providerImplementation + "'." ); scmManager.setScmProviderImplementation( providerType, providerImplementation ); } } Date now = Calendar.getInstance().getTime(); if ( format != null ) { if ( items == null ) { throw new MojoExecutionException( " if you set a format, you must provide at least one item, please check documentation " ); } // needs to be an array // look for special values Object[] itemAry = new Object[items.size()]; for ( int i = 0; i < items.size(); i++ ) { Object item = items.get( i ); if ( item instanceof String ) { String s = (String) item; if ( s.equals( "timestamp" ) ) { itemAry[i] = now; } else if ( s.startsWith( "scmVersion" ) ) { useScm = true; itemAry[i] = getRevision(); } else if ( s.startsWith( "buildNumber" ) ) { // check for properties file File propertiesFile = this.buildNumberPropertiesFileLocation; // create if not exists if ( !propertiesFile.exists() ) { try { if ( !propertiesFile.getParentFile().exists() ) { propertiesFile.getParentFile().mkdirs(); } propertiesFile.createNewFile(); } catch ( IOException e ) { throw new MojoExecutionException( "Couldn't create properties file: " + propertiesFile, e ); } } Properties properties = new Properties(); String buildNumberString = null; FileInputStream inputStream = null; FileOutputStream outputStream = null; try { // get the number for the buildNumber specified inputStream = new FileInputStream( propertiesFile ); properties.load( inputStream ); buildNumberString = properties.getProperty( s ); if ( buildNumberString == null ) { buildNumberString = "0"; } int buildNumber = parseInt( buildNumberString ); // store the increment properties.setProperty( s, String.valueOf( ++buildNumber ) ); outputStream = new FileOutputStream( propertiesFile ); properties.store( outputStream, "maven.buildNumber.plugin properties file" ); // use in the message (format) itemAry[i] = new Integer( buildNumber ); } catch ( NumberFormatException e ) { throw new MojoExecutionException( "Couldn't parse buildNumber in properties file to an Integer: " + buildNumberString ); } catch ( IOException e ) { throw new MojoExecutionException( "Couldn't load properties file: " + propertiesFile, e ); } finally { IOUtil.close( inputStream ); IOUtil.close( outputStream ); } } else { itemAry[i] = item; } } else { itemAry[i] = item; } } revision = format( itemAry ); } else { // Check if the plugin has already run. revision = project.getProperties().getProperty( this.buildNumberPropertyName ); if ( this.getRevisionOnlyOnce && revision != null ) { getLog().debug( "Revision available from previous execution" ); return; } if ( doCheck ) { // we fail if there are local mods checkForLocalModifications(); } else { getLog().debug( "Checking for local modifications: skipped." ); } if ( session.getSettings().isOffline() ) { getLog().info( "maven is executed in offline mode, Updating project files from SCM: skipped." ); } else { if ( doUpdate ) { // we update your local repo // even after you commit, your revision stays the same until you update, thus this // action List<ScmFile> changedFiles = update(); for ( ScmFile file : changedFiles ) { getLog().debug( "Updated: " + file ); } if ( changedFiles.size() == 0 ) { getLog().debug( "No files needed updating." ); } } else { getLog().debug( "Updating project files from SCM: skipped." ); } } revision = getRevision(); } if ( project != null ) { String timestamp = String.valueOf( now.getTime() ); if ( timestampFormat != null ) { timestamp = MessageFormat.format( timestampFormat, new Object[] { now } ); } getLog().info( MessageFormat.format( "Storing buildNumber: {0} at timestamp: {1}", new Object[] { revision, timestamp } ) ); if ( revision != null ) { project.getProperties().put( buildNumberPropertyName, revision ); } project.getProperties().put( timestampPropertyName, timestamp ); String scmBranch = getScmBranch(); getLog().info( "Storing buildScmBranch: " + scmBranch ); project.getProperties().put( scmBranchPropertyName, scmBranch ); // Add the revision and timestamp properties to each project in the reactor if ( getRevisionOnlyOnce && reactorProjects != null ) { Iterator<MavenProject> projIter = reactorProjects.iterator(); while ( projIter.hasNext() ) { MavenProject nextProj = (MavenProject) projIter.next(); if ( revision != null ) { nextProj.getProperties().put( this.buildNumberPropertyName, revision ); } nextProj.getProperties().put( this.timestampPropertyName, timestamp ); nextProj.getProperties().put( this.scmBranchPropertyName, scmBranch ); } } } } /** * Formats the given argument using the configured format template and locale. * * @param arguments arguments to be formatted @ @return formatted result */ private String format( Object[] arguments ) { Locale l = Locale.getDefault(); if ( locale != null ) { String[] parts = locale.split( "_", 3 ); if ( parts.length <= 1 ) { l = new Locale( locale ); } else if ( parts.length == 2 ) { l = new Locale( parts[0], parts[1] ); } else { l = new Locale( parts[0], parts[1], parts[2] ); } } return new MessageFormat( format, l ).format( arguments ); } private void checkForLocalModifications() throws MojoExecutionException { getLog().debug( "Verifying there are no local modifications ..." ); List<ScmFile> changedFiles; try { changedFiles = getStatus(); } catch ( ScmException e ) { throw new MojoExecutionException( "An error has occurred while checking scm status.", e ); } if ( !changedFiles.isEmpty() ) { StringBuilder message = new StringBuilder(); String ls = System.getProperty( "line.separator" ); for ( ScmFile file : changedFiles ) { message.append( file.toString() ); message.append( ls ); } throw new MojoExecutionException( "Cannot create the build number because you have local modifications : \n" + message ); } } public List<ScmFile> update() throws MojoExecutionException { try { ScmRepository repository = getScmRepository(); ScmProvider scmProvider = scmManager.getProviderByRepository( repository ); UpdateScmResult result = scmProvider.update( repository, new ScmFileSet( scmDirectory ) ); if ( result == null ) { return Collections.emptyList(); } checkResult( result ); if ( result instanceof UpdateScmResultWithRevision ) { String revision = ( (UpdateScmResultWithRevision) result ).getRevision(); getLog().info( "Got a revision during update: " + revision ); this.revision = revision; } return result.getUpdatedFiles(); } catch ( ScmException e ) { throw new MojoExecutionException( "Couldn't update project. " + e.getMessage(), e ); } } public List<ScmFile> getStatus() throws ScmException { ScmRepository repository = getScmRepository(); ScmProvider scmProvider = scmManager.getProviderByRepository( repository ); StatusScmResult result = scmProvider.status( repository, new ScmFileSet( scmDirectory ) ); if ( result == null ) { return Collections.emptyList(); } checkResult( result ); return result.getChangedFiles(); } /** * Get the branch info for this revision from the repository. For svn, it is in svn info. * * @return * @throws MojoExecutionException * @throws MojoExecutionException */ public String getScmBranch() throws MojoExecutionException { try { ScmRepository repository = getScmRepository(); ScmProvider provider = scmManager.getProviderByRepository( repository ); /* git branch can be obtained directly by a command */ if ( GitScmProviderRepository.PROTOCOL_GIT.equals( provider.getScmType() ) ) { ScmFileSet fileSet = new ScmFileSet( scmDirectory ); return GitBranchCommand.getCurrentBranch( getLogger(), (GitScmProviderRepository) repository.getProviderRepository(), fileSet ); } else if ( provider instanceof HgScmProvider ) { /* hg branch can be obtained directly by a command */ HgOutputConsumer consumer = new HgOutputConsumer( getLogger() ); ScmResult result = HgUtils.execute( consumer, logger, scmDirectory, new String[] { "id", "-b" } ); checkResult( result ); if (StringUtils.isNotEmpty(consumer.getOutput())) { return consumer.getOutput(); } } } catch ( ScmException e ) { getLog().warn( "Cannot get the branch information from the git repository: \n" + e.getLocalizedMessage() ); } return getScmBranchFromUrl(); } private String getScmBranchFromUrl() throws MojoExecutionException { String scmUrl = null; try { ScmRepository repository = getScmRepository(); InfoScmResult scmResult = info( repository, new ScmFileSet( scmDirectory ) ); if ( scmResult == null || !scmResult.isSuccess() ) { getLog().debug( "Cannot get the branch information from the scm repository : " + ( scmResult == null ? "" : scmResult.getCommandOutput() ) ); return DEFAULT_BRANCH_NAME; } if ( scmResult.getInfoItems().isEmpty() ) { if ( !StringUtils.isEmpty( revisionOnScmFailure ) ) { setDoCheck( false ); setDoUpdate( false ); return DEFAULT_BRANCH_NAME; } } if ( !scmResult.getInfoItems().isEmpty() ) { InfoItem info = scmResult.getInfoItems().get( 0 ); scmUrl = info.getURL(); } } catch ( ScmException e ) { if ( !StringUtils.isEmpty( revisionOnScmFailure ) ) { getLog().warn( "Cannot get the branch information from the scm repository, proceeding with " + DEFAULT_BRANCH_NAME + " : \n" + e.getLocalizedMessage() ); setDoCheck( false ); setDoUpdate( false ); return DEFAULT_BRANCH_NAME; } throw new MojoExecutionException( "Cannot get the branch information from the scm repository : \n" + e.getLocalizedMessage(), e ); } return filterBranchFromScmUrl( scmUrl ); } protected String filterBranchFromScmUrl( String scmUrl ) { String scmBranch = "UNKNOWN"; if ( StringUtils.contains( scmUrl, "/trunk" ) ) { scmBranch = "trunk"; } else if ( StringUtils.contains( scmUrl, "/branches" ) || StringUtils.contains( scmUrl, "/tags" ) ) { scmBranch = scmUrl.replaceFirst( ".*((branches|tags)/[^/]*).*", "$1" ); } return scmBranch; } /** * Get the revision info from the repository. For svn, it is svn info * * @return * @throws MojoExecutionException */ public String getRevision() throws MojoExecutionException { if ( format != null && !useScm ) { return revision; } useScm = false; try { return this.getScmRevision(); } catch ( ScmException e ) { if ( !StringUtils.isEmpty( revisionOnScmFailure ) ) { getLog().warn( "Cannot get the revision information from the scm repository, proceeding with " + "revision of " + revisionOnScmFailure + " : \n" + e.getLocalizedMessage() ); setDoCheck( false ); setDoUpdate( false ); return revisionOnScmFailure; } throw new MojoExecutionException( "Cannot get the revision information from the scm repository : \n" + e.getLocalizedMessage(), e ); } } /** * @return * @todo normally this would be handled in AbstractScmProvider */ private ScmLogger getLogger() { if ( logger == null ) { logger = new ScmLogDispatcher(); } return logger; } // //////////////////////////////////////////////////////////////////////////////////////////// // setters to help with test public void setScmManager( ScmManager scmManager ) { this.scmManager = scmManager; } public void setUrlScm( String urlScm ) { this.scmConnectionUrl = urlScm; } public void setUsername( String username ) { this.username = username; } public void setPassword( String password ) { this.password = password; } public void setDoCheck( boolean doCheck ) { this.doCheck = getBooleanProperty( "maven.buildNumber.doCheck", doCheck ); } public void setDoUpdate( boolean doUpdate ) { this.doUpdate = getBooleanProperty( "maven.buildNumber.doUpdate", doUpdate ); } private boolean getBooleanProperty( String key, boolean defaultValue ) { String systemProperty = System.getProperty( key ); if (systemProperty == null) { return defaultValue; } else { return parseBoolean( systemProperty ); } } void setFormat( String format ) { this.format = format; } void setLocale( String locale ) { this.locale = locale; } void setItems( List<?> items ) { this.items = items; } public void setBuildNumberPropertiesFileLocation( File buildNumberPropertiesFileLocation ) { this.buildNumberPropertiesFileLocation = buildNumberPropertiesFileLocation; } public void setScmDirectory( File scmDirectory ) { this.scmDirectory = scmDirectory; } public void setRevisionOnScmFailure( String revisionOnScmFailure ) { this.revisionOnScmFailure = revisionOnScmFailure; } public void setShortRevisionLength( int shortRevision ) { this.shortRevisionLength = shortRevision; } }
package org.keycloak.services.resources.admin; import org.jboss.logging.Logger; import org.jboss.resteasy.annotations.cache.NoCache; import org.jboss.resteasy.spi.BadRequestException; import org.jboss.resteasy.spi.NotFoundException; import org.jboss.resteasy.spi.ResteasyProviderFactory; import org.keycloak.events.admin.OperationType; import org.keycloak.models.ClientModel; import org.keycloak.models.ClientSessionModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.ModelDuplicateException; import org.keycloak.models.RealmModel; import org.keycloak.models.UserCredentialModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionModel; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.models.utils.ModelToRepresentation; import org.keycloak.models.utils.RepresentationToModel; import org.keycloak.representations.adapters.action.GlobalRequestResult; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.CredentialRepresentation; import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.representations.idm.UserSessionRepresentation; import org.keycloak.services.clientregistration.ClientRegistrationTokenUtils; import org.keycloak.services.managers.ClientManager; import org.keycloak.services.managers.RealmManager; import org.keycloak.services.managers.ResourceAdminManager; import org.keycloak.services.resources.KeycloakApplication; import org.keycloak.services.ErrorResponse; import org.keycloak.util.JsonSerialization; import org.keycloak.common.util.Time; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import static java.lang.Boolean.TRUE; /** * Base resource class for managing one particular client of a realm. * * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class ClientResource { protected static final Logger logger = Logger.getLogger(ClientResource.class); protected RealmModel realm; private RealmAuth auth; private AdminEventBuilder adminEvent; protected ClientModel client; protected KeycloakSession session; @Context protected UriInfo uriInfo; @Context protected KeycloakApplication keycloak; protected KeycloakApplication getKeycloakApplication() { return keycloak; } public ClientResource(RealmModel realm, RealmAuth auth, ClientModel clientModel, KeycloakSession session, AdminEventBuilder adminEvent) { this.realm = realm; this.auth = auth; this.client = clientModel; this.session = session; this.adminEvent = adminEvent; auth.init(RealmAuth.Resource.CLIENT); } @Path("protocol-mappers") public ProtocolMappersResource getProtocolMappers() { ProtocolMappersResource mappers = new ProtocolMappersResource(client, auth, adminEvent); ResteasyProviderFactory.getInstance().injectProperties(mappers); return mappers; } /** * Update the client * @param rep * @return */ @PUT @Consumes(MediaType.APPLICATION_JSON) public Response update(final ClientRepresentation rep) { auth.requireManage(); try { if (TRUE.equals(rep.isServiceAccountsEnabled()) && !client.isServiceAccountsEnabled()) { new ClientManager(new RealmManager(session)).enableServiceAccount(client);; } RepresentationToModel.updateClient(rep, client); adminEvent.operation(OperationType.UPDATE).resourcePath(uriInfo).representation(rep).success(); return Response.noContent().build(); } catch (ModelDuplicateException e) { return ErrorResponse.exists("Client " + rep.getClientId() + " already exists"); } } /** * Get representation of the client * * @return */ @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public ClientRepresentation getClient() { auth.requireView(); return ModelToRepresentation.toRepresentation(client); } /** * Get representation of certificate resource * * @param attributePrefix * @return */ @Path("certificates/{attr}") public ClientAttributeCertificateResource getCertficateResource(@PathParam("attr") String attributePrefix) { return new ClientAttributeCertificateResource(realm, auth, client, session, attributePrefix, adminEvent); } /** * Get keycloak.json file * * Returns a keycloak.json file to be used to configure the adapter of the specified client. * * @return * @throws IOException */ @GET @NoCache @Path("installation/json") @Produces(MediaType.APPLICATION_JSON) public String getInstallation() throws IOException { auth.requireView(); ClientManager clientManager = new ClientManager(new RealmManager(session)); Object rep = clientManager.toInstallationRepresentation(realm, client, getKeycloakApplication().getBaseUri(uriInfo)); // TODO Temporary solution to pretty-print return JsonSerialization.mapper.writerWithDefaultPrettyPrinter().writeValueAsString(rep); } /** * Get adapter configuration XML for JBoss / Wildfly Keycloak subsystem * * Returns XML that can be included in the JBoss / Wildfly Keycloak subsystem to configure the adapter of that client. * * @return * @throws IOException */ @GET @NoCache @Path("installation/jboss") @Produces(MediaType.TEXT_PLAIN) public String getJBossInstallation() throws IOException { auth.requireView(); ClientManager clientManager = new ClientManager(new RealmManager(session)); return clientManager.toJBossSubsystemConfig(realm, client, getKeycloakApplication().getBaseUri(uriInfo)); } /** * Delete the client * */ @DELETE @NoCache public void deleteClient() { auth.requireManage(); new ClientManager(new RealmManager(session)).removeClient(realm, client); adminEvent.operation(OperationType.DELETE).resourcePath(uriInfo).success(); } /** * Generate a new secret for the client * * @return */ @Path("client-secret") @POST @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public CredentialRepresentation regenerateSecret() { auth.requireManage(); logger.debug("regenerateSecret"); UserCredentialModel cred = KeycloakModelUtils.generateSecret(client); CredentialRepresentation rep = ModelToRepresentation.toRepresentation(cred); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).representation(rep).success(); return rep; } /** * Generate a new registration access token for the client * * @return */ @Path("registration-access-token") @POST @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public ClientRepresentation regenerateRegistrationAccessToken() { auth.requireManage(); String token = ClientRegistrationTokenUtils.updateRegistrationAccessToken(realm, uriInfo, client); ClientRepresentation rep = ModelToRepresentation.toRepresentation(client); rep.setRegistrationAccessToken(token); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).representation(rep).success(); return rep; } /** * Get the client secret * * @return */ @Path("client-secret") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public CredentialRepresentation getClientSecret() { auth.requireView(); logger.debug("getClientSecret"); UserCredentialModel model = UserCredentialModel.secret(client.getSecret()); if (model == null) throw new NotFoundException("Client does not have a secret"); return ModelToRepresentation.toRepresentation(model); } /** * Base path for managing the scope mappings for the client * * @return */ @Path("scope-mappings") public ScopeMappedResource getScopeMappedResource() { return new ScopeMappedResource(realm, auth, client, session, adminEvent); } @Path("roles") public RoleContainerResource getRoleContainerResource() { return new RoleContainerResource(uriInfo, realm, auth, client, adminEvent); } /** * Get allowed origins * * This is used for CORS requests. Access tokens will have * their allowedOrigins claim set to this value for tokens created for this client. * * @return */ @Path("allowed-origins") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public Set<String> getAllowedOrigins() { auth.requireView(); return client.getWebOrigins(); } /** * Update allowed origins * * This is used for CORS requests. Access tokens will have * their allowedOrigins claim set to this value for tokens created for this client. * * @param allowedOrigins */ @Path("allowed-origins") @PUT @Consumes(MediaType.APPLICATION_JSON) public void updateAllowedOrigins(Set<String> allowedOrigins) { auth.requireManage(); client.setWebOrigins(allowedOrigins); adminEvent.operation(OperationType.UPDATE).resourcePath(uriInfo).representation(client).success(); } /** * Delete the specified origins from current allowed origins * * This is used for CORS requests. Access tokens will have * their allowedOrigins claim set to this value for tokens created for this client. * * @param allowedOrigins List of origins to delete */ @Path("allowed-origins") @DELETE @Consumes(MediaType.APPLICATION_JSON) public void deleteAllowedOrigins(Set<String> allowedOrigins) { auth.requireManage(); for (String origin : allowedOrigins) { client.removeWebOrigin(origin); } adminEvent.operation(OperationType.DELETE).resourcePath(uriInfo).success(); } /** * Get a user dedicated to the service account * * @return */ @Path("service-account-user") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public UserRepresentation getServiceAccountUser() { auth.requireView(); UserModel user = session.users().getUserByServiceAccountClient(client); if (user == null) { if (client.isServiceAccountsEnabled()) { new ClientManager(new RealmManager(session)).enableServiceAccount(client); user = session.users().getUserByServiceAccountClient(client); } else { throw new BadRequestException("Service account not enabled for the client '" + client.getClientId() + "'"); } } return ModelToRepresentation.toRepresentation(user); } /** * Push the client's revocation policy to its admin URL * * If the client has an admin URL, push revocation policy to it. */ @Path("push-revocation") @POST public GlobalRequestResult pushRevocation() { auth.requireManage(); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).success(); return new ResourceAdminManager(session).pushClientRevocationPolicy(uriInfo.getRequestUri(), realm, client); } /** * Get application session count * * Returns a number of user sessions associated with this client * * { * "count": number * } * * @return */ @Path("session-count") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public Map<String, Integer> getApplicationSessionCount() { auth.requireView(); Map<String, Integer> map = new HashMap<String, Integer>(); map.put("count", session.sessions().getActiveUserSessions(client.getRealm(), client)); return map; } /** * Get user sessions for client * * Returns a list of user sessions associated with this client * * @param firstResult Paging offset * @param maxResults Paging size * @return */ @Path("user-sessions") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public List<UserSessionRepresentation> getUserSessions(@QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults) { auth.requireView(); firstResult = firstResult != null ? firstResult : -1; maxResults = maxResults != null ? maxResults : -1; List<UserSessionRepresentation> sessions = new ArrayList<UserSessionRepresentation>(); for (UserSessionModel userSession : session.sessions().getUserSessions(client.getRealm(), client, firstResult, maxResults)) { UserSessionRepresentation rep = ModelToRepresentation.toRepresentation(userSession); sessions.add(rep); } return sessions; } /** * Get application offline session count * * Returns a number of offline user sessions associated with this client * * { * "count": number * } * * @return */ @Path("offline-session-count") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public Map<String, Integer> getOfflineSessionCount() { auth.requireView(); Map<String, Integer> map = new HashMap<String, Integer>(); map.put("count", session.sessions().getOfflineSessionsCount(client.getRealm(), client)); return map; } /** * Get offline sessions for client * * Returns a list of offline user sessions associated with this client * * @param firstResult Paging offset * @param maxResults Paging size * @return */ @Path("offline-sessions") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public List<UserSessionRepresentation> getOfflineUserSessions(@QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults) { auth.requireView(); firstResult = firstResult != null ? firstResult : -1; maxResults = maxResults != null ? maxResults : -1; List<UserSessionRepresentation> sessions = new ArrayList<UserSessionRepresentation>(); List<UserSessionModel> userSessions = session.sessions().getOfflineUserSessions(client.getRealm(), client, firstResult, maxResults); for (UserSessionModel userSession : userSessions) { UserSessionRepresentation rep = ModelToRepresentation.toRepresentation(userSession); // Update lastSessionRefresh with the timestamp from clientSession for (ClientSessionModel clientSession : userSession.getClientSessions()) { if (client.getId().equals(clientSession.getClient().getId())) { rep.setLastAccess(Time.toMillis(clientSession.getTimestamp())); break; } } sessions.add(rep); } return sessions; } /** * Logout all sessions * * If the client has an admin URL, invalidate all sessions associated with that client directly. * */ @Path("logout-all") @POST public GlobalRequestResult logoutAll() { auth.requireManage(); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).success(); return new ResourceAdminManager(session).logoutClient(uriInfo.getRequestUri(), realm, client); } /** * Logout the user by username * * If the client has an admin URL, invalidate the sessions for a particular user directly. * */ @Path("logout-user/{username}") @POST public void logout(final @PathParam("username") String username) { auth.requireManage(); UserModel user = session.users().getUserByUsername(username, realm); if (user == null) { throw new NotFoundException("User not found"); } adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).success(); new ResourceAdminManager(session).logoutUserFromClient(uriInfo.getRequestUri(), realm, client, user); } /** * Register a cluster node with the client * * Manually register cluster node to this client - usually it's not needed to call this directly as adapter should handle * by sending registration request to Keycloak * * @param formParams */ @Path("nodes") @POST @Consumes(MediaType.APPLICATION_JSON) public void registerNode(Map<String, String> formParams) { auth.requireManage(); String node = formParams.get("node"); if (node == null) { throw new BadRequestException("Node not found in params"); } if (logger.isDebugEnabled()) logger.debug("Register node: " + node); client.registerNode(node, Time.currentTime()); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).success(); } /** * Unregister a cluster node from the client * * @param node */ @Path("nodes/{node}") @DELETE @NoCache public void unregisterNode(final @PathParam("node") String node) { auth.requireManage(); if (logger.isDebugEnabled()) logger.debug("Unregister node: " + node); Integer time = client.getRegisteredNodes().get(node); if (time == null) { throw new NotFoundException("Client does not have node "); } client.unregisterNode(node); adminEvent.operation(OperationType.DELETE).resourcePath(uriInfo).success(); } /** * Test if registered cluster nodes are available * * Tests availability by sending 'ping' request to all cluster nodes. * * @return */ @Path("test-nodes-available") @GET @NoCache public GlobalRequestResult testNodesAvailable() { auth.requireManage(); logger.debug("Test availability of cluster nodes"); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).success(); return new ResourceAdminManager(session).testNodesAvailability(uriInfo.getRequestUri(), realm, client); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.changes.actions.diff; import com.intellij.diff.DiffContentFactory; import com.intellij.diff.DiffContentFactoryEx; import com.intellij.diff.DiffRequestFactory; import com.intellij.diff.DiffRequestFactoryImpl; import com.intellij.diff.chains.DiffRequestProducer; import com.intellij.diff.chains.DiffRequestProducerException; import com.intellij.diff.contents.DiffContent; import com.intellij.diff.impl.DiffViewerWrapper; import com.intellij.diff.merge.MergeUtil; import com.intellij.diff.requests.DiffRequest; import com.intellij.diff.requests.ErrorDiffRequest; import com.intellij.diff.requests.SimpleDiffRequest; import com.intellij.diff.util.DiffUserDataKeys; import com.intellij.diff.util.DiffUserDataKeysEx; import com.intellij.diff.util.DiffUtil; import com.intellij.diff.util.Side; import consulo.logging.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import consulo.util.dataholder.Key; import com.intellij.openapi.util.Ref; import consulo.util.dataholder.UserDataHolder; import com.intellij.openapi.vcs.AbstractVcs; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.VcsDataKeys; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.changes.*; import com.intellij.openapi.vcs.merge.MergeData; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ThreeState; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; public class ChangeDiffRequestProducer implements DiffRequestProducer { private static final Logger LOG = Logger.getInstance(ChangeDiffRequestProducer.class); public static Key<Change> CHANGE_KEY = Key.create("DiffRequestPresentable.Change"); @Nullable private final Project myProject; @Nonnull private final Change myChange; @Nonnull private final Map<Key, Object> myChangeContext; private ChangeDiffRequestProducer(@Nullable Project project, @Nonnull Change change, @Nonnull Map<Key, Object> changeContext) { myChange = change; myProject = project; myChangeContext = changeContext; } @Nonnull public Change getChange() { return myChange; } @Nullable public Project getProject() { return myProject; } @Nonnull @Override public String getName() { return ChangesUtil.getFilePath(myChange).getPath(); } public static boolean isEquals(@Nonnull Change change1, @Nonnull Change change2) { for (ChangeDiffViewerWrapperProvider provider : ChangeDiffViewerWrapperProvider.EP_NAME.getExtensionList()) { ThreeState equals = provider.isEquals(change1, change2); if (equals == ThreeState.NO) return false; } for (ChangeDiffRequestProvider provider : ChangeDiffRequestProvider.EP_NAME.getExtensionList()) { ThreeState equals = provider.isEquals(change1, change2); if (equals == ThreeState.YES) return true; if (equals == ThreeState.NO) return false; } if (!Comparing.equal(change1.getClass(), change2.getClass())) return false; if (!isEquals(change1.getBeforeRevision(), change2.getBeforeRevision())) return false; if (!isEquals(change1.getAfterRevision(), change2.getAfterRevision())) return false; return true; } private static boolean isEquals(@Nullable ContentRevision revision1, @Nullable ContentRevision revision2) { if (Comparing.equal(revision1, revision2)) return true; if (revision1 instanceof CurrentContentRevision && revision2 instanceof CurrentContentRevision) { VirtualFile vFile1 = ((CurrentContentRevision)revision1).getVirtualFile(); VirtualFile vFile2 = ((CurrentContentRevision)revision2).getVirtualFile(); return Comparing.equal(vFile1, vFile2); } return false; } @Nullable public static ChangeDiffRequestProducer create(@Nullable Project project, @Nonnull Change change) { return create(project, change, Collections.<Key, Object>emptyMap()); } @Nullable public static ChangeDiffRequestProducer create(@Nullable Project project, @Nonnull Change change, @Nonnull Map<Key, Object> changeContext) { if (!canCreate(project, change)) return null; return new ChangeDiffRequestProducer(project, change, changeContext); } public static boolean canCreate(@Nullable Project project, @Nonnull Change change) { for (ChangeDiffViewerWrapperProvider provider : ChangeDiffViewerWrapperProvider.EP_NAME.getExtensionList()) { if (provider.canCreate(project, change)) return true; } for (ChangeDiffRequestProvider provider : ChangeDiffRequestProvider.EP_NAME.getExtensionList()) { if (provider.canCreate(project, change)) return true; } ContentRevision bRev = change.getBeforeRevision(); ContentRevision aRev = change.getAfterRevision(); if (bRev == null && aRev == null) return false; if (bRev != null && bRev.getFile().isDirectory()) return false; if (aRev != null && aRev.getFile().isDirectory()) return false; return true; } @Nonnull @Override public DiffRequest process(@Nonnull UserDataHolder context, @Nonnull ProgressIndicator indicator) throws DiffRequestProducerException, ProcessCanceledException { try { return loadCurrentContents(context, indicator); } catch (ProcessCanceledException e) { throw e; } catch (DiffRequestProducerException e) { throw e; } catch (Exception e) { LOG.warn(e); throw new DiffRequestProducerException(e.getMessage()); } } @Nonnull protected DiffRequest loadCurrentContents(@Nonnull UserDataHolder context, @Nonnull ProgressIndicator indicator) throws DiffRequestProducerException { DiffRequestProducerException wrapperException = null; DiffRequestProducerException requestException = null; DiffViewerWrapper wrapper = null; try { for (ChangeDiffViewerWrapperProvider provider : ChangeDiffViewerWrapperProvider.EP_NAME.getExtensionList()) { if (provider.canCreate(myProject, myChange)) { wrapper = provider.process(this, context, indicator); break; } } } catch (DiffRequestProducerException e) { wrapperException = e; } DiffRequest request = null; try { for (ChangeDiffRequestProvider provider : ChangeDiffRequestProvider.EP_NAME.getExtensionList()) { if (provider.canCreate(myProject, myChange)) { request = provider.process(this, context, indicator); break; } } if (request == null) request = createRequest(myProject, myChange, context, indicator); } catch (DiffRequestProducerException e) { requestException = e; } if (requestException != null && wrapperException != null) { String message = requestException.getMessage() + "\n\n" + wrapperException.getMessage(); throw new DiffRequestProducerException(message); } if (requestException != null) { request = new ErrorDiffRequest(getRequestTitle(myChange), requestException); LOG.info("Request: " + requestException.getMessage()); } if (wrapperException != null) { LOG.info("Wrapper: " + wrapperException.getMessage()); } request.putUserData(CHANGE_KEY, myChange); request.putUserData(DiffViewerWrapper.KEY, wrapper); for (Map.Entry<Key, Object> entry : myChangeContext.entrySet()) { request.putUserData(entry.getKey(), entry.getValue()); } DiffUtil.putDataKey(request, VcsDataKeys.CURRENT_CHANGE, myChange); return request; } @Nonnull private DiffRequest createRequest(@Nullable Project project, @Nonnull Change change, @Nonnull UserDataHolder context, @Nonnull ProgressIndicator indicator) throws DiffRequestProducerException { if (ChangesUtil.isTextConflictingChange(change)) { // three side diff // FIXME: This part is ugly as a VCS merge subsystem itself. FilePath path = ChangesUtil.getFilePath(change); VirtualFile file = path.getVirtualFile(); if (file == null) { file = LocalFileSystem.getInstance().refreshAndFindFileByPath(path.getPath()); } if (file == null) throw new DiffRequestProducerException("Can't show merge conflict - file not found"); if (project == null) { throw new DiffRequestProducerException("Can't show merge conflict - project is unknown"); } final AbstractVcs vcs = ChangesUtil.getVcsForChange(change, project); if (vcs == null || vcs.getMergeProvider() == null) { throw new DiffRequestProducerException("Can't show merge conflict - operation nos supported"); } try { // FIXME: loadRevisions() can call runProcessWithProgressSynchronously() inside final Ref<Throwable> exceptionRef = new Ref<>(); final Ref<MergeData> mergeDataRef = new Ref<>(); final VirtualFile finalFile = file; UIUtil.invokeAndWaitIfNeeded(new Runnable() { @Override public void run() { try { mergeDataRef.set(vcs.getMergeProvider().loadRevisions(finalFile)); } catch (VcsException e) { exceptionRef.set(e); } } }); if (!exceptionRef.isNull()) { Throwable e = exceptionRef.get(); if (e instanceof VcsException) throw (VcsException)e; if (e instanceof Error) throw (Error)e; if (e instanceof RuntimeException) throw (RuntimeException)e; throw new RuntimeException(e); } MergeData mergeData = mergeDataRef.get(); ContentRevision bRev = change.getBeforeRevision(); ContentRevision aRev = change.getAfterRevision(); String beforeRevisionTitle = getRevisionTitle(bRev, "Your version"); String afterRevisionTitle = getRevisionTitle(aRev, "Server version"); String title = DiffRequestFactory.getInstance().getTitle(file); List<String> titles = ContainerUtil.list(beforeRevisionTitle, "Base Version", afterRevisionTitle); DiffContentFactory contentFactory = DiffContentFactory.getInstance(); List<DiffContent> contents = ContainerUtil.list( contentFactory.createFromBytes(project, mergeData.CURRENT, file), contentFactory.createFromBytes(project, mergeData.ORIGINAL, file), contentFactory.createFromBytes(project, mergeData.LAST, file) ); SimpleDiffRequest request = new SimpleDiffRequest(title, contents, titles); MergeUtil.putRevisionInfos(request, mergeData); return request; } catch (VcsException e) { LOG.info(e); throw new DiffRequestProducerException(e); } catch (IOException e) { LOG.info(e); throw new DiffRequestProducerException(e); } } else { ContentRevision bRev = change.getBeforeRevision(); ContentRevision aRev = change.getAfterRevision(); if (bRev == null && aRev == null) { LOG.warn("Both revision contents are empty"); throw new DiffRequestProducerException("Bad revisions contents"); } if (bRev != null) checkContentRevision(project, bRev, context, indicator); if (aRev != null) checkContentRevision(project, aRev, context, indicator); String title = getRequestTitle(change); indicator.setIndeterminate(true); DiffContent content1 = createContent(project, bRev, context, indicator); DiffContent content2 = createContent(project, aRev, context, indicator); final String userLeftRevisionTitle = (String)myChangeContext.get(DiffUserDataKeysEx.VCS_DIFF_LEFT_CONTENT_TITLE); String beforeRevisionTitle = userLeftRevisionTitle != null ? userLeftRevisionTitle : getRevisionTitle(bRev, "Base version"); final String userRightRevisionTitle = (String)myChangeContext.get(DiffUserDataKeysEx.VCS_DIFF_RIGHT_CONTENT_TITLE); String afterRevisionTitle = userRightRevisionTitle != null ? userRightRevisionTitle : getRevisionTitle(aRev, "Your version"); SimpleDiffRequest request = new SimpleDiffRequest(title, content1, content2, beforeRevisionTitle, afterRevisionTitle); boolean bRevCurrent = bRev instanceof CurrentContentRevision; boolean aRevCurrent = aRev instanceof CurrentContentRevision; if (bRevCurrent && !aRevCurrent) request.putUserData(DiffUserDataKeys.MASTER_SIDE, Side.LEFT); if (!bRevCurrent && aRevCurrent) request.putUserData(DiffUserDataKeys.MASTER_SIDE, Side.RIGHT); return request; } } @Nonnull public static String getRequestTitle(@Nonnull Change change) { ContentRevision bRev = change.getBeforeRevision(); ContentRevision aRev = change.getAfterRevision(); assert bRev != null || aRev != null; if (bRev != null && aRev != null) { FilePath bPath = bRev.getFile(); FilePath aPath = aRev.getFile(); if (bPath.equals(aPath)) { return DiffRequestFactoryImpl.getContentTitle(bPath); } else { return DiffRequestFactoryImpl.getTitle(bPath, aPath, " -> "); } } else if (bRev != null) { return DiffRequestFactoryImpl.getContentTitle(bRev.getFile()); } else { return DiffRequestFactoryImpl.getContentTitle(aRev.getFile()); } } @Nonnull public static String getRevisionTitle(@Nullable ContentRevision revision, @Nonnull String defaultValue) { if (revision == null) return defaultValue; String title = revision.getRevisionNumber().asString(); if (title == null || title.isEmpty()) return defaultValue; return title; } @Nonnull public static DiffContent createContent(@Nullable Project project, @Nullable ContentRevision revision, @Nonnull UserDataHolder context, @Nonnull ProgressIndicator indicator) throws DiffRequestProducerException { try { indicator.checkCanceled(); if (revision == null) return DiffContentFactory.getInstance().createEmpty(); FilePath filePath = revision.getFile(); DiffContentFactoryEx contentFactory = DiffContentFactoryEx.getInstanceEx(); if (revision instanceof CurrentContentRevision) { VirtualFile vFile = ((CurrentContentRevision)revision).getVirtualFile(); if (vFile == null) throw new DiffRequestProducerException("Can't get current revision content"); return contentFactory.create(project, vFile); } if (revision instanceof BinaryContentRevision) { byte[] content = ((BinaryContentRevision)revision).getBinaryContent(); if (content == null) { throw new DiffRequestProducerException("Can't get binary revision content"); } return contentFactory.createFromBytes(project, content, filePath); } if (revision instanceof ByteBackedContentRevision) { byte[] revisionContent = ((ByteBackedContentRevision)revision).getContentAsBytes(); if (revisionContent == null) throw new DiffRequestProducerException("Can't get revision content"); return contentFactory.createFromBytes(project, revisionContent, filePath); } else { String revisionContent = revision.getContent(); if (revisionContent == null) throw new DiffRequestProducerException("Can't get revision content"); return contentFactory.create(project, revisionContent, filePath); } } catch (IOException e) { LOG.info(e); throw new DiffRequestProducerException(e); } catch (VcsException e) { LOG.info(e); throw new DiffRequestProducerException(e); } } public static void checkContentRevision(@javax.annotation.Nullable Project project, @Nonnull ContentRevision rev, @Nonnull UserDataHolder context, @Nonnull ProgressIndicator indicator) throws DiffRequestProducerException { if (rev.getFile().isDirectory()) { throw new DiffRequestProducerException("Can't show diff for directory"); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ChangeDiffRequestProducer that = (ChangeDiffRequestProducer)o; return myChange.equals(that.myChange); } @Override public int hashCode() { return myChange.hashCode(); } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.compositor.layouts.phone.stack; import android.content.Context; import android.content.res.Resources; import org.chromium.base.annotations.SuppressFBWarnings; import org.chromium.chrome.R; import org.chromium.chrome.browser.compositor.layouts.ChromeAnimation; import org.chromium.chrome.browser.compositor.layouts.Layout.Orientation; import org.chromium.chrome.browser.compositor.layouts.components.LayoutTab; /** * StackTab is used to keep track of a thumbnail's bitmap and position and to * draw itself onto the GL canvas at the desired Y Offset. * @VisibleForTesting */ @SuppressFBWarnings("MS_PKGPROTECT") public class StackTab implements ChromeAnimation.Animatable<StackTab.Property> { /** * Properties that can be animated by using a * {@link org.chromium.chrome.browser.compositor.layouts.ChromeAnimation.Animatable}. */ enum Property { SCALE, SCROLL_OFFSET, ALPHA, X_IN_STACK_INFLUENCE, X_IN_STACK_OFFSET, X_OUT_OF_STACK, Y_IN_STACK_INFLUENCE, Y_IN_STACK_OFFSET, Y_OUT_OF_STACK, DISCARD_AMOUNT } // Cached values from values/dimens.xml public static float sStackedTabVisibleSize; // stacked_tab_visible_size public static float sStackBufferWidth; // stack_buffer_width public static float sStackBufferHeight; // stack_buffer_height // Positioner selector private float mXInStackInfluence = 1.0f; private float mYInStackInfluence = 1.0f; // In stack positioner private float mScrollOffset; private float mXInStackOffset; private float mYInStackOffset; // Out of stack positioner private float mXOutOfStack; private float mYOutOfStack; // Values that get animated private float mAlpha = 1.0f; private float mScale = 1.0f; private float mDiscardAmount; // This might alter position, rotation and alpha // Discard states private float mDiscardOriginX; private float mDiscardOriginY; private boolean mDiscardFromClick; // The index of the tab in the stack private int mIndex; // True if the tab is currently being removed (while animating). protected boolean mDying; // The visibility sorting value is used to determine the importance of the tab for // texture allocation. It is computed from the area and its position in the stack. // Larger values will have more priority for acquiring texture. Negative values "often" // means that the tab is not visible at all (but there are no guaranty and it's fine). private float mCachedVisibleArea; // Intermediate value private float mCachedIndexDistance; // Intermediate value private float mCacheStackVisibility = 1.0f; // Intermediate value private long mVisiblitySortingValue; // Sorting value based on visible area. private int mOrderSortingValue; // Sorting value based on distance to selection. private LayoutTab mLayoutTab; /** * @param tab The tab this instance is supposed to draw. */ public StackTab(LayoutTab tab) { mLayoutTab = tab; } /** * @param index The new index in the stack layout. */ public void setNewIndex(int index) { mIndex = index; } /** * @return The index in the stack layout. */ public int getIndex() { return mIndex; } /** * @return The {@link LayoutTab} this instance is supposed to draw. */ public LayoutTab getLayoutTab() { return mLayoutTab; } /** * Set the {@link LayoutTab} this instance is supposed to draw. */ public void setLayoutTab(LayoutTab tab) { mLayoutTab = tab; } /** * @return The id of the tab, same as the id from the Tab in TabModel. */ public int getId() { return mLayoutTab.getId(); } /** * @param y The vertical translation to be applied after the placement in the stack. */ public void setYInStackOffset(float y) { mYInStackOffset = y; } /** * @return The vertical translation applied after the placement in the stack. */ public float getYInStackOffset() { return mYInStackOffset; } /** * @param x The horizontal translation to be applied after the placement in the stack. */ public void setXInStackOffset(float x) { mXInStackOffset = x; } /** * @return The horizontal translation applied after the placement in the stack. */ public float getXInStackOffset() { return mXInStackOffset; } /** * @param y The vertical absolute position when out of stack. */ public void setYOutOfStack(float y) { mYOutOfStack = y; } /** * @return The vertical absolute position when out of stack. */ public float getYOutOfStack() { return mYOutOfStack; } /** * @param x The horizontal absolute position when out of stack. */ public void setXOutOfStack(float x) { mXOutOfStack = x; } /** * @return The horizontal absolute position when out of stack. */ public float getXOutOfStack() { return mXOutOfStack; } /** * Set the transparency value for all of the tab (the contents, * border, etc...). For components that allow specifying * their own alpha values, it will use the min of these two fields. * * @param f The transparency value for the tab. */ public void setAlpha(float f) { mAlpha = f; } /** * @return The transparency value for all of the tab components. */ public float getAlpha() { return mAlpha; } /** * @param xInStackInfluence The horizontal blend value between instack * and out of stack pacement [0 .. 1]. */ public void setXInStackInfluence(float xInStackInfluence) { mXInStackInfluence = xInStackInfluence; } /** * @return The horizontal blend value between instack and out of stack pacement [0 .. 1]. */ public float getXInStackInfluence() { return mXInStackInfluence; } /** * @param yInStackInfluence The vertical blend value between instack * and out of stack pacement [0 .. 1]. */ public void setYInStackInfluence(float yInStackInfluence) { mYInStackInfluence = yInStackInfluence; } /** * @return The verical blend value between instack and out of stack pacement [0 .. 1]. */ public float getYInStackInfluence() { return mYInStackInfluence; } /** * @param scale The scale to apply to the tab, compared to the parent. */ public void setScale(float scale) { mScale = scale; } /** * @return The scale to apply to the tab, compared to the parent. */ public float getScale() { return mScale; } /** * @param offset The offset of the tab along the scrolling direction in scroll space. */ public void setScrollOffset(float offset) { mScrollOffset = offset; } /** * @return The offset of the tab along the scrolling direction in scroll space. */ public float getScrollOffset() { return mScrollOffset; } /** * @param amount The amount of discard displacement. 0 is no discard. Negative is discard * on the left. Positive is discard on the right. */ public void setDiscardAmount(float amount) { mDiscardAmount = amount; } /** * @param deltaAmount The amount of delta discard to be added to the current discard amount. */ public void addToDiscardAmount(float deltaAmount) { mDiscardAmount += deltaAmount; } /** * @return The amount of discard displacement. 0 is no discard. Negative is discard * on the left. Positive is discard on the right. */ public float getDiscardAmount() { return mDiscardAmount; } /** * @param x The x coordinate in tab space of where the discard transforms should originate. */ public void setDiscardOriginX(float x) { mDiscardOriginX = x; } /** * @param y The y coordinate in tab space of where the discard transforms should originate. */ public void setDiscardOriginY(float y) { mDiscardOriginY = y; } /** * @return The x coordinate in tab space of where the discard transforms should originate. */ public float getDiscardOriginX() { return mDiscardOriginX; } /** * @return The y coordinate in tab space of where the discard transforms should originate. */ public float getDiscardOriginY() { return mDiscardOriginY; } /** * @param fromClick Whether or not this discard was from a click event. */ public void setDiscardFromClick(boolean fromClick) { mDiscardFromClick = fromClick; } /** * @return Whether or not this discard was from a click event. */ public boolean getDiscardFromClick() { return mDiscardFromClick; } /** * @param dying True if the Tab/ContentView will be destroyed, and we are still animating its * visible representation. */ public void setDying(boolean dying) { mDying = dying; } /** * @return True if the Tab/ContentView is destroyed, but we are still animating its * visible representation. */ public boolean isDying() { return mDying; } /** * The scroll space does not map linearly to the screen so it creates a nice slow down * effect at the top of the screen while scrolling. * Warps x so it matches y(x) = x - warpSize on the positive side and 0 on the negative side * with a smooth transition between [0, 2 * warpSize]. * @see #screenToScroll(float, float) * * [-oo, 0] -> 0 * [0, 2 * warpSize] -> warpSize * ((x-warpSize) / 2 * warpSize + 0.5) ^ 2. * [2 * warpSize, +oo] -> x * @param x The offset in scroll space. * @param warpSize The size in scroll space of the slow down effect. * @return The offset on screen corresponding to the scroll space offset. */ public static float scrollToScreen(float x, float warpSize) { if (x <= 0) return 0; if (x >= 2 * warpSize) return x - warpSize; x = (x - warpSize) / (2.0f * warpSize) + 0.5f; return x * x * warpSize; } /** * Unwarps x so it matches the above warp function. * @see #scrollToScreen(float, float) * * [-oo, 0] -> -warpSize * [0, warpSize] -> 2 * warpSize * sqrt(x / warpSize). * [warpSize, +oo] -> x + warpSize * @param x The screen space offset. * @param warpSize The size in scroll space of the slow down effect. * @return The offset in scroll space corresponding to the offset on screen. */ public static float screenToScroll(float x, float warpSize) { if (x <= 0) return 0; if (x >= warpSize) return x + warpSize; return (float) Math.sqrt(x * warpSize) * 2; } /** * @param orientation The orientation to choose to get the size. * @return The size of the content along the provided orientation. */ public float getSizeInScrollDirection(int orientation) { if (orientation == Orientation.PORTRAIT) { return mLayoutTab.getScaledContentHeight(); } else { return mLayoutTab.getScaledContentWidth(); } } /** * Helper function that gather the static constants from values/dimens.xml. * @param context The Android Context. */ public static void resetDimensionConstants(Context context) { Resources res = context.getResources(); final float pxToDp = 1.0f / res.getDisplayMetrics().density; sStackedTabVisibleSize = res.getDimensionPixelOffset(R.dimen.stacked_tab_visible_size) * pxToDp; sStackBufferWidth = res.getDimensionPixelOffset(R.dimen.stack_buffer_width) * pxToDp; sStackBufferHeight = res.getDimensionPixelOffset(R.dimen.stack_buffer_height) * pxToDp; } /** * Reset the offset to factory default. */ public void resetOffset() { mXInStackInfluence = 1.0f; mYInStackInfluence = 1.0f; mScrollOffset = 0.0f; mXInStackOffset = 0.0f; mYInStackOffset = 0.0f; mXOutOfStack = 0.0f; mYOutOfStack = 0.0f; mDiscardOriginX = 0.f; mDiscardOriginY = 0.f; mDiscardFromClick = false; } /** * Updates the cached visible area value to be used to sort tabs by visibility. * @param referenceIndex The index that has the highest priority. */ public void updateVisiblityValue(int referenceIndex) { mCachedVisibleArea = mLayoutTab.computeVisibleArea(); mCachedIndexDistance = Math.abs(mIndex - referenceIndex); mOrderSortingValue = computeOrderSortingValue(mCachedIndexDistance, mCacheStackVisibility); mVisiblitySortingValue = computeVisibilitySortingValue( mCachedVisibleArea, mOrderSortingValue, mCacheStackVisibility); } /** * Updates the cached visible area value to be used to sort tabs by visibility. * @param stackVisibility Multiplier that represents how much the stack fills the screen. */ public void updateStackVisiblityValue(float stackVisibility) { mCacheStackVisibility = stackVisibility; mOrderSortingValue = computeOrderSortingValue(mCachedIndexDistance, mCacheStackVisibility); mVisiblitySortingValue = computeVisibilitySortingValue( mCachedVisibleArea, mOrderSortingValue, mCacheStackVisibility); } /** * Computes the visibility sorting value based on the tab visible area, its distance to the * central index and the overall visibility of the stack. * The '-index' index factor need to be smaller for stack that have small visibility. * Multiplying by a small stackVisibility makes it bigger (because it is negative), hence the * division. To avoid dividing by 0 it need to be offset a bit. 0.1f is the 'a bit' part of * the explanation. */ private static long computeVisibilitySortingValue( float area, float orderSortingValue, float stackVisibility) { return (long) (area * stackVisibility - orderSortingValue); } /** * @return The cached visible sorting value. Call updateCachedVisibleArea to update it. */ public long getVisiblitySortingValue() { return mVisiblitySortingValue; } /** * Computes the ordering value only based on the distance of the tab to the center one. * Low values have higher priority. */ private static int computeOrderSortingValue(float indexDistance, float stackVisibility) { return (int) ((indexDistance + 1) / (0.1f + 0.9f * stackVisibility)); } /** * @return The cached order sorting value. Used to sort based on the tab ordering rather than * visible area. */ public int getOrderSortingValue() { return mOrderSortingValue; } /** * Callback for * {@link org.chromium.chrome.browser.compositor.layouts.ChromeAnimation.Animatable} * * @param prop The property to set * @param val The value to set it to */ @Override public void setProperty(Property prop, float val) { switch (prop) { case SCALE: setScale(val); break; case SCROLL_OFFSET: setScrollOffset(val); break; case ALPHA: setAlpha(val); break; case X_IN_STACK_INFLUENCE: setXInStackInfluence(val); break; case X_IN_STACK_OFFSET: setXInStackOffset(val); break; case X_OUT_OF_STACK: setXOutOfStack(val); break; case Y_IN_STACK_INFLUENCE: setYInStackInfluence(val); break; case Y_IN_STACK_OFFSET: setYInStackOffset(val); break; case Y_OUT_OF_STACK: setYOutOfStack(val); break; case DISCARD_AMOUNT: setDiscardAmount(val); break; } } @Override public void onPropertyAnimationFinished(Property prop) {} }
/* * #%L * Native ARchive plugin for Maven * %% * Copyright (C) 2002 - 2014 NAR Maven Plugin developers. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.github.maven_nar.cpptasks.compiler; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.Vector; import java.util.ArrayList; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.types.Environment; import org.apache.commons.io.FilenameUtils; import com.github.maven_nar.cpptasks.CCTask; import com.github.maven_nar.cpptasks.CUtil; import com.github.maven_nar.cpptasks.CompilerDef; import com.github.maven_nar.cpptasks.OptimizationEnum; import com.github.maven_nar.cpptasks.ProcessorDef; import com.github.maven_nar.cpptasks.ProcessorParam; import com.github.maven_nar.cpptasks.TargetDef; import com.github.maven_nar.cpptasks.VersionInfo; import com.github.maven_nar.cpptasks.types.CommandLineArgument; import com.github.maven_nar.cpptasks.types.UndefineArgument; import com.google.common.collect.ObjectArrays; import org.apache.tools.ant.util.FileUtils; /** * An abstract Compiler implementation which uses an external program to * perform the compile. * * @author Adam Murdoch */ public abstract class CommandLineCompiler extends AbstractCompiler { /** Command used when invoking ccache */ private static final String CCACHE_CMD = "ccache"; private String command; private String prefix; private final Environment env; private String identifier; private final String identifierArg; private final boolean libtool; private final CommandLineCompiler libtoolCompiler; private final boolean newEnvironment; private String fortifyID=""; private List<String[]> commands; private boolean dryRun; protected CommandLineCompiler(final String command, final String identifierArg, final String[] sourceExtensions, final String[] headerExtensions, final String outputSuffix, final boolean libtool, final CommandLineCompiler libtoolCompiler, final boolean newEnvironment, final Environment env) { super(sourceExtensions, headerExtensions, outputSuffix); this.command = command; if (libtool && libtoolCompiler != null) { throw new java.lang.IllegalArgumentException("libtoolCompiler should be null when libtool is true"); } this.libtool = libtool; this.libtoolCompiler = libtoolCompiler; this.identifierArg = identifierArg; this.newEnvironment = newEnvironment; this.env = env; } abstract protected void addImpliedArgs(Vector<String> args, boolean debug, boolean multithreaded, boolean exceptions, LinkType linkType, Boolean rtti, OptimizationEnum optimization); /** * Adds command-line arguments for include directories. * * If relativeArgs is not null will add corresponding relative paths * include switches to that vector (for use in building a configuration * identifier that is consistent between machines). * * @param baseDirPath * Base directory path. * @param includeDirs * Array of include directory paths * @param args * Vector of command line arguments used to execute the task * @param relativeArgs * Vector of command line arguments used to build the * configuration identifier */ protected void addIncludes(final String baseDirPath, final File[] includeDirs, final Vector<String> args, final Vector<String> relativeArgs, final StringBuffer includePathId, final boolean isSystem) { for (final File includeDir : includeDirs) { args.addElement(getIncludeDirSwitch(includeDir.getAbsolutePath(), isSystem)); if (relativeArgs != null) { final String relative = CUtil.getRelativePath(baseDirPath, includeDir); relativeArgs.addElement(getIncludeDirSwitch(relative, isSystem)); if (includePathId != null) { if (includePathId.length() == 0) { includePathId.append("/I"); } else { includePathId.append(" /I"); } includePathId.append(relative); } } } } abstract protected void addWarningSwitch(Vector<String> args, int warnings); protected void buildDefineArguments(final CompilerDef[] defs, final Vector<String> args) { // // assume that we aren't inheriting defines from containing <cc> // UndefineArgument[] merged = defs[0].getActiveDefines(); for (int i = 1; i < defs.length; i++) { // // if we are inheriting, merge the specific defines with the // containing defines merged = UndefineArgument.merge(defs[i].getActiveDefines(), merged); } final StringBuffer buf = new StringBuffer(30); for (final UndefineArgument current : merged) { buf.setLength(0); if (current.isDefine()) { getDefineSwitch(buf, current.getName(), current.getValue()); } else { getUndefineSwitch(buf, current.getName()); } args.addElement(buf.toString()); } } @Override public String[] getOutputFileNames(final String inputFile, final VersionInfo versionInfo) { // // if a recognized input file // if (bid(inputFile) > 1) { final String baseName = getBaseOutputName(inputFile); final File standardisedFile = new File(inputFile); try { return new String[] { baseName + FilenameUtils.EXTENSION_SEPARATOR + Integer.toHexString(standardisedFile.getCanonicalPath().hashCode()) + getOutputSuffix() }; } catch (IOException e) { throw new BuildException("Source file not found", e); } } return new String[0]; } /** * Compiles a source file. * */ public void compile(final CCTask task, final File outputDir, final String[] sourceFiles, String[] args, final String[] endArgs, final boolean relentless, final CommandLineCompilerConfiguration config, final ProgressMonitor monitor) throws BuildException { BuildException exc = null; // // determine length of executable name and args // String command = getCommandWithPath(config); if (config.isUseCcache()) { // Replace the command with "ccache" and push the old compiler // command into the args. final String compilerCommand = command; command = CCACHE_CMD; args = ObjectArrays.concat(compilerCommand, args); } int baseLength = command.length() + args.length + endArgs.length; if (this.libtool) { baseLength += 8; } for (final String arg : args) { baseLength += arg.length(); } for (final String endArg : endArgs) { baseLength += endArg.length(); } if (baseLength > getMaximumCommandLength()) { throw new BuildException("Command line is over maximum length without specifying source file"); } // // typically either 1 or Integer.MAX_VALUE // final int maxInputFilesPerCommand = getMaximumInputFilesPerCommand(); final int argumentCountPerInputFile = getArgumentCountPerInputFile(); for (int sourceIndex = 0; sourceIndex < sourceFiles.length;) { int cmdLength = baseLength; int firstFileNextExec; for (firstFileNextExec = sourceIndex; firstFileNextExec < sourceFiles.length && firstFileNextExec - sourceIndex < maxInputFilesPerCommand; firstFileNextExec++) { cmdLength += getTotalArgumentLengthForInputFile(outputDir, sourceFiles[firstFileNextExec]); if (cmdLength >= getMaximumCommandLength()) { break; } } if (firstFileNextExec == sourceIndex) { throw new BuildException("Extremely long file name, can't fit on command line"); } ArrayList<String> commandlinePrefix = new ArrayList<>(); if (this.libtool) { commandlinePrefix.add("libtool"); } if((this.fortifyID !=null) && (!this.fortifyID.equals(""))) {// If FortifyID attribute was set, run the Fortify framework commandlinePrefix.add("sourceanalyzer"); commandlinePrefix.add("-b"); commandlinePrefix.add(this.fortifyID); } commandlinePrefix.add(command); Collections.addAll(commandlinePrefix, args); int retval = 0; for (int j = sourceIndex; j < firstFileNextExec; j++) { ArrayList<String> commandlineSuffix = new ArrayList<>(); for (int k = 0; k < argumentCountPerInputFile; k++) { commandlineSuffix.add(getInputFileArgument(outputDir, sourceFiles[j], k)); } Collections.addAll(commandlineSuffix, endArgs); ArrayList<String> commandline = new ArrayList<>(commandlinePrefix); commandline.addAll(commandlineSuffix); final int ret = runCommand(task, workDir, commandline.toArray(new String[commandline.size()])); if (ret != 0) { retval = ret; } } if (monitor != null) { final String[] fileNames = new String[firstFileNextExec - sourceIndex]; System.arraycopy(sourceFiles, sourceIndex + 0, fileNames, 0, fileNames.length); monitor.progress(fileNames); } // // if the process returned a failure code and // we aren't holding an exception from an earlier // interation if (retval != 0 && exc == null) { // // construct the exception // exc = new BuildException(getCommandWithPath(config) + " failed with return code " + retval, task.getLocation()); // // and throw it now unless we are relentless // if (!relentless) { throw exc; } } sourceIndex = firstFileNextExec; } // // if the compiler returned a failure value earlier // then throw an exception if (exc != null) { throw exc; } } @Override protected CompilerConfiguration createConfiguration(final CCTask task, final LinkType linkType, final ProcessorDef[] baseDefs, final CompilerDef specificDef, final TargetDef targetPlatform, final VersionInfo versionInfo) { this.prefix = specificDef.getCompilerPrefix(); this.objDir = task.getObjdir(); final Vector<String> args = new Vector<>(); final CompilerDef[] defaultProviders = new CompilerDef[baseDefs.length + 1]; for (int i = 0; i < baseDefs.length; i++) { defaultProviders[i + 1] = (CompilerDef) baseDefs[i]; } defaultProviders[0] = specificDef; final Vector<CommandLineArgument> cmdArgs = new Vector<>(); // // add command line arguments inherited from <cc> element // any "extends" and finally the specific CompilerDef CommandLineArgument[] commandArgs; for (int i = defaultProviders.length - 1; i >= 0; i--) { commandArgs = defaultProviders[i].getActiveProcessorArgs(); for (final CommandLineArgument commandArg : commandArgs) { if (commandArg.getLocation() == 0) { String arg = commandArg.getValue(); if (isWindows() && arg.matches(".*[ \"].*")) { // Work around inconsistent quoting by Ant arg = "\"" + arg.replaceAll("[\\\\\"]", "\\\\$0") + "\""; } args.addElement(arg); } else { cmdArgs.addElement(commandArg); } } } final Vector<ProcessorParam> params = new Vector<>(); // // add command line arguments inherited from <cc> element // any "extends" and finally the specific CompilerDef ProcessorParam[] paramArray; for (int i = defaultProviders.length - 1; i >= 0; i--) { paramArray = defaultProviders[i].getActiveProcessorParams(); Collections.addAll(params, paramArray); } paramArray = params.toArray(new ProcessorParam[params.size()]); if (specificDef.isClearDefaultOptions() == false) { final boolean multithreaded = specificDef.getMultithreaded(defaultProviders, 1); final boolean debug = specificDef.getDebug(baseDefs, 0); final boolean exceptions = specificDef.getExceptions(defaultProviders, 1); final Boolean rtti = specificDef.getRtti(defaultProviders, 1); final OptimizationEnum optimization = specificDef.getOptimization(defaultProviders, 1); this.addImpliedArgs(args, debug, multithreaded, exceptions, linkType, rtti, optimization); } // // add all appropriate defines and undefines // buildDefineArguments(defaultProviders, args); final int warnings = specificDef.getWarnings(defaultProviders, 0); addWarningSwitch(args, warnings); Enumeration<CommandLineArgument> argEnum = cmdArgs.elements(); int endCount = 0; while (argEnum.hasMoreElements()) { final CommandLineArgument arg = argEnum.nextElement(); switch (arg.getLocation()) { case 1: args.addElement(arg.getValue()); break; case 2: endCount++; break; } } final String[] endArgs = new String[endCount]; argEnum = cmdArgs.elements(); int index = 0; while (argEnum.hasMoreElements()) { final CommandLineArgument arg = argEnum.nextElement(); if (arg.getLocation() == 2) { endArgs[index++] = arg.getValue(); } } // // Want to have distinct set of arguments with relative // path names for includes that are used to build // the configuration identifier // final Vector<String> relativeArgs = (Vector) args.clone(); // // add all active include and sysincludes // final StringBuffer includePathIdentifier = new StringBuffer(); final File baseDir = specificDef.getProject().getBaseDir(); String baseDirPath; try { baseDirPath = baseDir.getCanonicalPath(); } catch (final IOException ex) { baseDirPath = baseDir.toString(); } final Vector<String> includePath = new Vector<>(); final Vector<String> sysIncludePath = new Vector<>(); for (int i = defaultProviders.length - 1; i >= 0; i--) { String[] incPath = defaultProviders[i].getActiveIncludePaths(); for (final String element : incPath) { includePath.addElement(element); } incPath = defaultProviders[i].getActiveSysIncludePaths(); for (final String element : incPath) { sysIncludePath.addElement(element); } } final File[] incPath = new File[includePath.size()]; for (int i = 0; i < includePath.size(); i++) { incPath[i] = new File(includePath.elementAt(i)); } final File[] sysIncPath = new File[sysIncludePath.size()]; for (int i = 0; i < sysIncludePath.size(); i++) { sysIncPath[i] = new File(sysIncludePath.elementAt(i)); } addIncludes(baseDirPath, incPath, args, relativeArgs, includePathIdentifier, false); addIncludes(baseDirPath, sysIncPath, args, null, null, true); final StringBuffer buf = new StringBuffer(getIdentifier()); for (int i = 0; i < relativeArgs.size(); i++) { buf.append(' '); buf.append(relativeArgs.elementAt(i)); } for (final String endArg : endArgs) { buf.append(' '); buf.append(endArg); } final String configId = buf.toString(); final String[] argArray = new String[args.size()]; args.copyInto(argArray); final boolean rebuild = specificDef.getRebuild(baseDefs, 0); final File[] envIncludePath = getEnvironmentIncludePath(); final String path = specificDef.getToolPath(); CommandLineCompiler compiler = this; Environment environment = specificDef.getEnv(); if (environment == null) { for (final ProcessorDef baseDef : baseDefs) { environment = baseDef.getEnv(); if (environment != null) { compiler = (CommandLineCompiler) compiler.changeEnvironment(baseDef.isNewEnvironment(), environment); } } } else { compiler = (CommandLineCompiler) compiler.changeEnvironment(specificDef.isNewEnvironment(), environment); } // Pass the fortifyID for compiler compiler.fortifyID = specificDef.getFortifyID(); compiler.setCommands(specificDef.getCommands()); compiler.setDryRun(specificDef.isDryRun()); return new CommandLineCompilerConfiguration(compiler, configId, incPath, sysIncPath, envIncludePath, includePathIdentifier.toString(), argArray, paramArray, rebuild, endArgs, path, specificDef.getCcache()); } protected int getArgumentCountPerInputFile() { return 1; } protected final String getCommand() { if (this.prefix != null && (!this.prefix.isEmpty())) { return this.prefix + this.command; } else { return this.command; } } public String getCommandWithPath(final CommandLineCompilerConfiguration config) { if (config.getCommandPath() != null) { final File command = new File(config.getCommandPath(), this.getCommand()); try { return command.getCanonicalPath(); } catch (final IOException e) { e.printStackTrace(); return command.getAbsolutePath(); } } else { return this.getCommand(); } } abstract protected void getDefineSwitch(StringBuffer buffer, String define, String value); protected abstract File[] getEnvironmentIncludePath(); @Override public String getIdentifier() { if (this.identifier == null) { if (this.identifierArg == null) { this.identifier = getIdentifier(new String[] { this.getCommand() }, this.getCommand()); } else { this.identifier = getIdentifier(new String[] { this.getCommand(), this.identifierArg }, this.getCommand()); } } return this.identifier; } abstract protected String getIncludeDirSwitch(String source); /** * Added by Darren Sargent 22Oct2008 Returns the include dir switch value. * Default implementation doesn't treat system includes specially, for * compilers which don't care. * * @param source * the given source value. * @param isSystem * "true" if this is a system include path * * @return the include dir switch value. */ protected String getIncludeDirSwitch(final String source, final boolean isSystem) { return getIncludeDirSwitch(source); } protected String getInputFileArgument(final File outputDir, final String filename, final int index) { // // if there is an embedded space, // must enclose in quotes String relative=""; String inputFile; try { relative = FileUtils.getRelativePath(workDir, new File(filename)); } catch (Exception ex) { } if (relative.isEmpty()) { inputFile = filename; } else { inputFile = relative; } if (inputFile.indexOf(' ') >= 0) { final String buf = "\"" + inputFile + "\""; return buf; } return inputFile; } protected final boolean getLibtool() { return this.libtool; } /** * Obtains the same compiler, but with libtool set * * Default behavior is to ignore libtool */ public final CommandLineCompiler getLibtoolCompiler() { if (this.libtoolCompiler != null) { return this.libtoolCompiler; } return this; } abstract public int getMaximumCommandLength(); protected int getMaximumInputFilesPerCommand() { return Integer.MAX_VALUE; } /** * Get total command line length due to the input file. * * @param outputDir * File output directory * @param inputFile * String input file * @return int characters added to command line for the input file. */ protected int getTotalArgumentLengthForInputFile(final File outputDir, final String inputFile) { final int argumentCountPerInputFile = getArgumentCountPerInputFile(); int len=0; for (int k = 0; k < argumentCountPerInputFile; k++) { len+=getInputFileArgument(outputDir, inputFile, k).length(); } return len + argumentCountPerInputFile; // argumentCountPerInputFile added for spaces } abstract protected void getUndefineSwitch(StringBuffer buffer, String define); /** * This method is exposed so test classes can overload and test the * arguments without actually spawning the compiler */ protected int runCommand(final CCTask task, final File workingDir, final String[] cmdline) throws BuildException { commands.add(cmdline); if (dryRun) return 0; return CUtil.runCommand(task, workingDir, cmdline, this.newEnvironment, this.env); } protected final void setCommand(final String command) { this.command = command; } public void setCommands(List<String[]> commands) { this.commands = commands; } public boolean isDryRun() { return dryRun; } public void setDryRun(boolean dryRun) { this.dryRun = dryRun; } }
/* * Copyright (C) 2013 The Conclusions Authors * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file without in compliance with the License. You may obtain a copy * of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package ru.rulex.conclusion.guice; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.inject.*; import com.google.inject.matcher.Matcher; import com.google.inject.matcher.Matchers; import com.google.inject.name.Named; import com.google.inject.spi.*; import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; import org.apache.log4j.Logger; import ru.rulex.conclusion.ConclusionPredicate; import ru.rulex.conclusion.Selector; import java.lang.reflect.Method; import java.util.List; import static com.google.inject.name.Names.named; import static ru.rulex.conclusion.guice.GuiceGenericTypes.*; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableAnyOffConclusionPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableAtMostConclusionPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableEqualsConclusionPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableLessConclusionPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableMoreConclusionPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableAtLeastConclusionPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableMatchAnyOffPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableRegexpPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableMultiRegexpPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableAlwaysFalsePredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableAlwaysTruePredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableNotNullConclusionPredicate; import ru.rulex.conclusion.guice.InjectableConclusionPredicates.InjectableIsNullConclusionPredicate; @SuppressWarnings("unchecked") abstract class ConclusionPredicateModule<T extends Comparable<? super T>> extends AbstractModule { /** * */ protected abstract void bindPredicate(); protected void configure() { bindPredicate(); } protected void bindAlwaysFalse() { bindAlwaysRequest( "alwaysFalse", false , InjectableAlwaysFalsePredicate.class ); } protected void bindAlwaysTrue() { bindAlwaysRequest( "alwaysTrue", true , InjectableAlwaysTruePredicate.class ); } /** * * @param conditionName * @param selector */ protected void bindIsNotNull( final String conditionName, final Class<T> clazz, final Selector<?, T> selector ) { bindNullableRequest( conditionName, clazz, selector, InjectableNotNullConclusionPredicate.class ); } protected void bindIsNull( final String conditionName, final Class<T> clazz, final Selector<?, T> selector ) { bindNullableRequest( conditionName, clazz, selector, InjectableIsNullConclusionPredicate.class ); } /** * @param conditionName * @param value * @param selector * @param <E> */ protected void bindEquality( final String conditionName, final T value, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, value, selector, InjectableEqualsConclusionPredicate.class ); } /** * * @param conditionName * @param values * @param selector */ protected void bindEquality( final String conditionName, final ImmutableSet<String> values, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, values, selector, InjectableEqualsConclusionPredicate.class ); } /** * * @param conditionName * @param values * @param selector */ protected void bindEqualsAnyOff( final String conditionName, final ImmutableSet<String> values, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, values, selector, InjectableMatchAnyOffPredicate.class ); } /** * * @param conditionName * @param values * @param selector */ protected void bindMultiRegExp( final String conditionName, final ImmutableSet<String> values, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, values, selector, InjectableMultiRegexpPredicate.class ); } protected void bindRegExp( final String conditionName, final T value, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, value, selector, InjectableRegexpPredicate.class ); } /** * @param conditionName * @param value * @param selector * @param <E> */ protected void bindMajority( final String conditionName, final T value, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, value, selector, InjectableMoreConclusionPredicate.class ); } /** * MoreOrEquals * @param conditionName * @param value * @param selector */ protected void bindAtLeast( final String conditionName, final T value, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, value, selector, InjectableAtLeastConclusionPredicate.class ); } /** * LessOrEquals * @param conditionName * @param value * @param selector */ protected void bindAtMost( final String conditionName, final T value, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, value, selector, InjectableAtMostConclusionPredicate.class ); } /** * @param conditionName * @param value * @param selector * @param <E> */ protected void bindMinority( final String conditionName, final T value, final Selector<?, T> selector ) { bindPredicateRequest( conditionName, value, selector, InjectableLessConclusionPredicate.class ); } /** * @param conditionName * @param modules */ protected void bindDisjunction( String conditionName, Module... modules ) { bindDisjunctionRequest( conditionName, modules ); } /** * @param conditionName * @param modules */ private void bindDisjunctionRequest( String conditionName, Module... modules ) { final ImmutableList.Builder<ElementInjectionRequest> disjunctionRequestRequests = ImmutableList.builder(); final ImmutableList.Builder<ConclusionPredicate> disjunctionPredicates = ImmutableList.builder(); final List<Element> elements = Elements.getElements( modules ); for ( Element element : elements ) { element.acceptVisitor( new DefaultElementVisitor<Void>() { public <T> Void visit( Binding<T> binding ) { Key<?> bindingKey = binding.getKey(); if ( binding instanceof InstanceBinding && bindingKey.getAnnotation() != null && (Named.class.isAssignableFrom( bindingKey.getAnnotationType() )) ) { InstanceBinding<?> requestBinding = (InstanceBinding<?>) binding; if ( requestBinding.getInstance() instanceof SinglePredicateInjectionRequest ) { disjunctionRequestRequests.add( ((ElementInjectionRequest) requestBinding.getInstance()) ); } } return super.visit( binding ); } } ); } for ( ElementInjectionRequest binding : disjunctionRequestRequests.build() ) { disjunctionPredicates.add( bindEarlierInjectedPredicates( binding ) ); } final ImmutableList<ConclusionPredicate> disjunctionPredicatesList = disjunctionPredicates.build(); if ( disjunctionPredicatesList.size() > 0 ) { bindDisjunction( conditionName, InjectableAnyOffConclusionPredicate.class, disjunctionPredicatesList ); } } /** * @param binding * @return ConclusionPredicate<?> */ private ConclusionPredicate<?> bindEarlierInjectedPredicates( final ElementInjectionRequest binding ) { Injector internalInjector = Guice.createInjector( new AbstractModule() { @Override protected void configure() { binding.setBinder( binder() ); binding.run(); } } ); return internalInjector.getInstance( Key.get( newGenericType( ConclusionPredicate.class, binding.getLiteral() ) ) ); } private <E extends ConclusionPredicate<T>> void bindDisjunction( final String conditionName, final Class<E> predicateClass0, final ImmutableList<ConclusionPredicate> disjunctionPredicatesList ) { bind( ElementInjectionRequest.class ).annotatedWith( named( conditionName ) ).toInstance( new OrPredicatesInjectionRequest() { private Binder binder; private final Class<E> predicateClass = predicateClass0; @Override public void setBinder( Binder binder ) { this.binder = binder; } @Override public TypeLiteral<?> getLiteral() { return TypeLiteral.get( Void.class ); } @Override public Matcher<Object> matcher() { return Matchers.any(); } @Override public String description() { return conditionName; } @Override public void run() { binder.bind( immutableListOf( ConclusionPredicate.class ) ).toInstance( disjunctionPredicatesList ); binder.bind( AbstractGuiceImmutablePhraseModule.OR_KEY ).to( InjectableAnyOffConclusionPredicate.class ); } } ); } private <U, E extends ConclusionPredicate<T>> void bindNullableRequest( final String conditionName, final Class<T> clazz, final Selector<U, T> selector0, final Class<E> predicateClass0 ) { bind( ElementInjectionRequest.class ).annotatedWith( named( conditionName ) ).toInstance( new SinglePredicateInjectionRequest() { private Binder binder; private final Class<E> predicateClass = predicateClass0; private TypeLiteral<T> literal = TypeLiteral.get( clazz ); @Override public void setBinder( Binder binder ) { this.binder = binder; } @Override public TypeLiteral<?> getLiteral() { return literal; } @Override public Matcher<Object> matcher() { return Matchers.only( newEnclosedGenericType( predicateClass0, literal ) ); } @Override public String description() { return conditionName; } @Override public void run() { binder.bind( newGenericType( ConclusionPredicate.class, literal ) ) .to( newEnclosedGenericType( predicateClass, literal ) ); binder.bindListener( matcher(), new GuiceSelectorBasedPredicateTypeListener<U, T>( selector0 ) ); } }); } private <U, E extends ConclusionPredicate<T>> void bindPredicateRequest( final String conditionName, final ImmutableSet<String> values0, final Selector<U, T> selector0, final Class<E> predicateClass0 ) { bind( ElementInjectionRequest.class ).annotatedWith( named( conditionName ) ).toInstance( new SinglePredicateInjectionRequest() { private Binder binder; private final Selector<U, T> selector = selector0; private final Class<E> predicateClazz = predicateClass0; private final ImmutableSet<String> values = values0; private TypeLiteral<String> literal = TypeLiteral.get( String.class ); @Override public void setBinder( Binder binder ) { this.binder = binder; } @Override public TypeLiteral<?> getLiteral() { return TypeLiteral.get( String.class ); } @Override public Matcher<Object> matcher() { return Matchers.only( newEnclosedGenericType( predicateClazz, literal ) ); } @Override public String description() { return conditionName; } @Override public void run() { binder.bind( new TypeLiteral<ImmutableSet<String>>() {} ).toInstance( values ); binder.bind( newGenericType( ConclusionPredicate.class, literal ) ).to( newEnclosedGenericType( predicateClazz, literal ) ); binder.bindListener( matcher(), new GuiceSelectorBasedPredicateTypeListener<U, T>( selector ) ); } } ); } /** * @param conditionName * @param value0 * @param selector0 * @param predicateClass0 * @param <E> */ private <U, E extends ConclusionPredicate<T>> void bindPredicateRequest( final String conditionName, final T value0, final Selector<U, T> selector0, final Class<E> predicateClass0 ) { bind( ElementInjectionRequest.class ).annotatedWith( named( conditionName ) ).toInstance( new SinglePredicateInjectionRequest() { private Binder binder; private final T value = value0; private final Class<E> predicateClass = predicateClass0; private final Selector<U, T> selector = selector0; private final TypeLiteral<T> literal = (TypeLiteral<T>) TypeLiteral.get( value0.getClass() ); @Override public void setBinder( Binder binder ) { this.binder = binder; } @Override public TypeLiteral<?> getLiteral() { return literal; } @Override public void run() { binder.bind( literal ).toInstance( value ); binder.bind( newGenericType( ConclusionPredicate.class, literal ) ).to( newEnclosedGenericType( predicateClass, literal ) ); binder.bindListener( matcher(), new GuiceSelectorBasedPredicateTypeListener<U, T>( selector ) ); } @Override public Matcher<Object> matcher() { return Matchers.only( newEnclosedGenericType( predicateClass, literal ) ); } @Override public String description() { return conditionName; } } ); } private <U, E extends ConclusionPredicate<T>> void bindAlwaysRequest(final String conditionName, final boolean value0, final Class<E> predicateClass0 ) { bind( ElementInjectionRequest.class ).annotatedWith( named( conditionName ) ).toInstance( new SinglePredicateInjectionRequest() { private Binder binder; private final boolean value = value0; private final TypeLiteral<Boolean> literal = new TypeLiteral<Boolean>() {}; @Override public void setBinder( Binder binder ) { this.binder = binder; } @Override public TypeLiteral<?> getLiteral() { return literal; } @Override public Matcher<Object> matcher() { return Matchers.only( newEnclosedGenericType( predicateClass0, literal ) ); } @Override public String description() { return conditionName; } @Override public void run() { binder.bind(literal).toInstance(value); binder.bindListener(matcher(), new GuiceBasedPredicateTypeListener()); binder.bind( newGenericType( ConclusionPredicate.class, literal ) ) .to( newEnclosedGenericType( predicateClass0, literal ) ); } }); } private static abstract class AbstractGuicePredicateTypeListener implements TypeListener { private static final String INTERCEPTOR_METHOD = "apply"; private static final String TO_STRING_METHOD = "toString"; protected <T> Optional<Method> findToStringMethod( Class<T> clazz ) { for ( Method method : clazz.getDeclaredMethods() ) { Matcher<Method> predicate = Matchers.returns( Matchers.only( String.class ) ); if ( predicate.matches( method ) && method.getName().equals( TO_STRING_METHOD ) ) { return Optional.fromNullable( method ); } } return Optional.absent(); } protected boolean isApplyMethod( Method method ) { return method.getName().equals( INTERCEPTOR_METHOD ) && (method.getParameterTypes()[0] == Object.class); } } private static final class GuiceBasedPredicateTypeListener extends AbstractGuicePredicateTypeListener { @Override public <I> void hear( TypeLiteral<I> literal, TypeEncounter<I> encounter ) { Class<? super I> clazz = literal.getRawType(); Optional<Method> toStringMethod = findToStringMethod(clazz); Preconditions.checkNotNull( toStringMethod.get(), "Can't find toString method in " + clazz.getSimpleName() ); for (Method method : clazz.getMethods()) { if ( isApplyMethod( method ) ) { encounter.bindInterceptor(Matchers.only(method), new PredicateBooleanApplyMethodInterceptor( toStringMethod.get() )); } } } } private static final class GuiceSelectorBasedPredicateTypeListener<U, T> extends AbstractGuicePredicateTypeListener { private final Selector<U, T> selector; private GuiceSelectorBasedPredicateTypeListener( Selector<U, T> selector ) { this.selector = selector; } @Override public <U> void hear( TypeLiteral<U> literal, TypeEncounter<U> encounter ) { final Class<? super U> clazz = literal.getRawType(); Optional<Method> toStringMethod = findToStringMethod( clazz ); Preconditions.checkNotNull( toStringMethod.get(), "Can't find toString method in " + clazz.getSimpleName() ); for ( Method method : clazz.getDeclaredMethods() ) { if ( isApplyMethod( method ) ) { PredicateApplyMethodInterceptor<U, T> interceptor = new PredicateApplyMethodInterceptor<U, T>( toStringMethod.get(), (Selector<U, T>) selector ); encounter.bindInterceptor( Matchers.only( method ), interceptor ); return; } } } } private static final class PredicateBooleanApplyMethodInterceptor implements MethodInterceptor { private final Method toStringMethod; private static final Logger logger = Logger.getLogger( PredicateBooleanApplyMethodInterceptor.class ); private PredicateBooleanApplyMethodInterceptor(Method toStringMethod) { this.toStringMethod = toStringMethod; } @Override public Object invoke(MethodInvocation invocation) throws Throwable { invocation.getArguments()[0] = Boolean.FALSE; logger.debug(String.format("%s", toStringMethod.invoke(invocation.getThis(), new Object[]{}))); return invocation.proceed(); } } private static final class PredicateApplyMethodInterceptor<U, T> implements MethodInterceptor { private final Method toStringMethod; private final Selector<U, T> selector; private static final Logger logger = Logger.getLogger( PredicateApplyMethodInterceptor.class ); private PredicateApplyMethodInterceptor( Method toStringMethod, Selector<U, T> selector ) { this.toStringMethod = toStringMethod; this.selector = selector; } /** * intercept {@code GuicefyConclusionPredicates} nested static classes * apply(Object value) method */ @Override public Object invoke( MethodInvocation invocation ) throws Throwable { U argument = (U) invocation.getArguments()[0]; Comparable<U> value = (Comparable<U>) selector.select( argument ); logger.debug( String.format( "%s %s", toStringMethod.invoke( invocation.getThis(), new Object[] {} ), value ) ); invocation.getArguments()[0] = value; return invocation.proceed(); } } }
package ca.uhn.fhir.jaxrs.server.example; import static org.junit.Assert.*; import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.StringUtils; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.hl7.fhir.dstu3.model.*; import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent; import org.junit.*; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jaxrs.client.JaxRsRestfulClientFactory; import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.util.TestUtil; public class JaxRsPatientProviderDstu3Test { private static IGenericClient client; private static FhirContext ourCtx = FhirContext.forDstu3(); private static final String PATIENT_NAME = "Van Houte"; private static int ourPort; private static Server jettyServer; @AfterClass public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); } @BeforeClass public static void setUpClass() throws Exception { ourPort = RandomServerPortProvider.findFreePort(); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.setContextPath("/"); System.out.println(ourPort); jettyServer = new Server(ourPort); jettyServer.setHandler(context); ServletHolder jerseyServlet = context.addServlet(org.glassfish.jersey.servlet.ServletContainer.class, "/*"); jerseyServlet.setInitOrder(0); //@formatter:off jerseyServlet.setInitParameter("jersey.config.server.provider.classnames", StringUtils.join(Arrays.asList( JaxRsConformanceProviderDstu3.class.getCanonicalName(), JaxRsPatientRestProviderDstu3.class.getCanonicalName(), JaxRsPageProviderDstu3.class.getCanonicalName() ), ";")); //@formatter:on jettyServer.start(); ourCtx.setRestfulClientFactory(new JaxRsRestfulClientFactory(ourCtx)); ourCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); ourCtx.getRestfulClientFactory().setSocketTimeout(1200 * 1000); client = ourCtx.newRestfulGenericClient("http://localhost:" + ourPort + "/"); client.setEncoding(EncodingEnum.JSON); client.registerInterceptor(new LoggingInterceptor(true)); } @AfterClass public static void tearDownClass() throws Exception { try { jettyServer.destroy(); } catch (Exception e) { } } /** Search/Query - Type */ @Test public void findUsingGenericClientBySearch() { // Perform a search final Bundle results = client.search().forResource(Patient.class) .where(Patient.NAME.matchesExactly().value(PATIENT_NAME)).returnBundle(Bundle.class).execute(); System.out.println(results.getEntry().get(0)); assertEquals(results.getEntry().size(), 1); } /** Search - Multi-valued Parameters (ANY/OR) */ @Test public void findUsingGenericClientBySearchWithMultiValues() { final Bundle response = client.search().forResource(Patient.class) .where(Patient.ADDRESS.matches().values("Toronto")).and(Patient.ADDRESS.matches().values("Ontario")) .and(Patient.ADDRESS.matches().values("Canada")) .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("SHORTNAME", "TOYS")).returnBundle(Bundle.class).execute(); System.out.println(response.getEntry().get(0)); } /** Search - Paging */ @Test public void findWithPaging() { // Perform a search final Bundle results = client.search().forResource(Patient.class).limitTo(8).returnBundle(Bundle.class).execute(); System.out.println(results.getEntry().size()); if (results.getLink(Bundle.LINK_NEXT) != null) { // load next page final Bundle nextPage = client.loadPage().next(results).execute(); System.out.println(nextPage.getEntry().size()); } } /** Search using other query options */ public void testOther() { //missing } /** */ @Test public void testSearchPost() { Bundle response = client.search() .forResource("Patient") .usingStyle(SearchStyleEnum.POST) .returnBundle(Bundle.class) .execute(); assertTrue(response.getEntry().size() > 0); } /** Search - Compartments */ @Test public void testSearchCompartements() { Bundle response = client.search() .forResource(Patient.class) .withIdAndCompartment("1", "Condition") .returnBundle(Bundle.class) .execute(); assertTrue(response.getEntry().size() > 0); } /** Search - Subsetting (_summary and _elements) */ @Test @Ignore public void testSummary() { client.search() .forResource(Patient.class) .returnBundle(Bundle.class) .execute(); } @Test public void testCreatePatient() { final Patient existing = new Patient(); existing.setId((IdType) null); existing.getName().add(new HumanName().setFamily("Created Patient 54")); client.setEncoding(EncodingEnum.JSON); final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); System.out.println(patient); assertNotNull(client.read(Patient.class, patient.getId())); client.setEncoding(EncodingEnum.JSON); } /** Conditional Creates */ @Test public void testConditionalCreate() { final Patient existing = new Patient(); existing.setId((IdType) null); existing.getName().add(new HumanName().setFamily("Created Patient 54")); client.setEncoding(EncodingEnum.XML); final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); client.create() .resource(patient) .conditional() .where(Patient.IDENTIFIER.exactly().identifier(patient.getId())) .execute(); } /** Find By Id */ @Test public void findUsingGenericClientById() { final Patient results = client.read(Patient.class, "1"); assertEquals(results.getIdElement().getIdPartAsLong().longValue(), 1L); } @Test public void testUpdateById() { final Patient existing = client.read(Patient.class, "1"); final List<HumanName> name = existing.getName(); name.get(0).addSuffix("The Second"); existing.getName().addAll(name); client.setEncoding(EncodingEnum.XML); final MethodOutcome results = client.update("1", existing); } @Test public void testDeletePatient() { final Patient existing = new Patient(); existing.getName().add(new HumanName().setFamily("Created Patient XYZ")); final MethodOutcome results = client.create().resource(existing).prefer(PreferReturnEnum.REPRESENTATION).execute(); System.out.println(results.getId()); final Patient patient = (Patient) results.getResource(); client.delete().resourceById(patient.getIdElement()).execute(); try { client.read().resource(Patient.class).withId(patient.getId()).execute(); fail(); } catch (final Exception e) { //assertEquals(e.getStatusCode(), Constants.STATUS_HTTP_404_NOT_FOUND); } } /** Transaction - Server */ @Ignore @Test public void testTransaction() { Bundle bundle = new Bundle(); BundleEntryComponent entry = bundle.addEntry(); final Patient existing = new Patient(); existing.getName().get(0).setFamily("Created with bundle"); entry.setResource(existing); // FIXME ? // BoundCodeDt<BundleEntryTransactionMethodEnum> theTransactionOperation = // new BoundCodeDt( // BundleEntryTransactionMethodEnum.VALUESET_BINDER, // BundleEntryTransactionMethodEnum.POST); // entry.setTransactionMethod(theTransactionOperation); Bundle response = client.transaction().withBundle(bundle).execute(); } /** Conformance - Server */ @Test @Ignore public void testConformance() { final CapabilityStatement conf = client.fetchConformance().ofType(CapabilityStatement.class).execute(); System.out.println(conf.getRest().get(0).getResource().get(0).getType()); assertEquals(conf.getRest().get(0).getResource().get(0).getType().toString(), "Patient"); } /** Extended Operations */ // Create a client to talk to the HeathIntersections server @Test public void testExtendedOperations() { client.registerInterceptor(new LoggingInterceptor(true)); // Create the input parameters to pass to the server Parameters inParams = new Parameters(); inParams.addParameter().setName("start").setValue(new DateType("2001-01-01")); inParams.addParameter().setName("end").setValue(new DateType("2015-03-01")); inParams.addParameter().setName("dummy").setValue(new StringType("myAwesomeDummyValue")); // Invoke $everything on "Patient/1" Parameters outParams = client .operation() .onInstance(new IdType("Patient", "1")) .named("$firstVersion") .withParameters(inParams) //.useHttpGet() // Use HTTP GET instead of POST .execute(); String resultValue = outParams.getParameter().get(0).getValue().toString(); System.out.println(resultValue); assertEquals("expected but found : "+ resultValue, resultValue.contains("myAwesomeDummyValue"), true); } @Test public void testExtendedOperationsUsingGet() { // Create the input parameters to pass to the server Parameters inParams = new Parameters(); inParams.addParameter().setName("start").setValue(new DateType("2001-01-01")); inParams.addParameter().setName("end").setValue(new DateType("2015-03-01")); inParams.addParameter().setName("dummy").setValue(new StringType("myAwesomeDummyValue")); // Invoke $everything on "Patient/1" Parameters outParams = client .operation() .onInstance(new IdType("Patient", "1")) .named("$firstVersion") .withParameters(inParams) .useHttpGet() // Use HTTP GET instead of POST .execute(); String resultValue = outParams.getParameter().get(0).getValue().toString(); System.out.println(resultValue); assertEquals("expected but found : "+ resultValue, resultValue.contains("myAwesomeDummyValue"), true); } @Test public void testVRead() { final Patient patient = client.vread(Patient.class, "1", "1"); System.out.println(patient); } @Test public void testRead() { final Patient patient = client.read(Patient.class, "1"); System.out.println(patient); } }
package com.twu.biblioteca.View; import com.twu.biblioteca.Model.*; import com.twu.biblioteca.Service.LibraryService; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayInputStream; import static org.junit.Assert.assertEquals; public class MenuTest { private LibraryService libSystem; private Menu menu; private User customer; private ByteArrayInputStream in; @Before public void setUp() { libSystem = new LibraryService(); menu = new Menu(); customer = new User(new Name("Bojack", "Horseman"), "micro@Gmail.com", "04112628", "123-4567", "64 digit hash"); libSystem.addBook(new Book("Test-Driven Development", new AuthorList(new Author("Kent", "Beck")), 2003)); libSystem.addBook(new Book("Gears of War: Anvil Gate", new AuthorList(new Author("Karen", "Travis")), 2010)); libSystem.addBook(new Book("Artificial Intelligence: A Modern Approach", new AuthorList(new Author("Peter", "Norvig")), 2010)); libSystem.addBook(new Book("Introduction to the Design & Analysis of Algorithm", new AuthorList(new Author("Anany", "Levitin")), 2012)); } @After public void teardown() { System.setIn(System.in); } @Test public void generateWelcomeShouldPrintWelcomeMessage() { assertEquals("===Welcome to Biblioteca!===", menu.generateWelcome()); } @Test public void generateMenuShouldPutNineLinesOfMenuIntoLineArray() { assertEquals(9, menu.generateMenu(libSystem).size()); } @Test public void checkoutBookShouldDeclareNoBooksAvailableIfAllAreBorrowed() { in = new ByteArrayInputStream("Test-Driven Development".getBytes()); System.setIn(in); menu.checkoutBook(libSystem); in = new ByteArrayInputStream("Gears of War: Anvil Gate".getBytes()); System.setIn(in); menu.checkoutBook(libSystem); in = new ByteArrayInputStream("Artificial Intelligence: A Modern Approach".getBytes()); System.setIn(in); menu.checkoutBook(libSystem); in = new ByteArrayInputStream("Introduction to the Design & Analysis of Algorithm".getBytes()); System.setIn(in); menu.checkoutBook(libSystem); assertEquals("There are currently no books available.", menu.checkoutBook(libSystem)); } @Test public void checkoutBookShouldReturnThankYouMessageOnSuccessfulCheckout() { in = new ByteArrayInputStream("Test-Driven Development".getBytes()); System.setIn(in); assertEquals("Thank you! Enjoy the book.", menu.checkoutBook(libSystem)); } @Test public void returnBookShouldDeclareNoBooksToReturnIfNoneAreBorrowed() { assertEquals("There are currently no books being borrowed.", menu.returnBook(libSystem)); } @Test public void checkoutMovieShouldDeclareNoAvailableMoviesIfAllAreBorrowed() { assertEquals("There are currently no movies available.", menu.checkoutMovie(libSystem)); } @Test public void checkoutMovieShouldReturnThankYouMsgOnSuccessfulRental() { libSystem.addMovie(new Movie("Mad Max: Fury Road", 2015, new Director("George", "Miller"), Rating.TEN)); in = new ByteArrayInputStream("Mad Max: Fury Road".getBytes()); System.setIn(in); assertEquals("Thank you! Enjoy the movie.", menu.checkoutMovie(libSystem)); } @Test public void checkoutMovieShouldReturnBookUnavailableMessageIfBookDoesNotMatch() { libSystem.addMovie(new Movie("Mad Max: Fury Road", 2015, new Director("George", "Miller"), Rating.TEN)); in = new ByteArrayInputStream("Mad Max: Furiosa Road".getBytes()); System.setIn(in); assertEquals("That movie is unavailable.", menu.checkoutMovie(libSystem)); } @Test public void returnMovieShouldReturnNoMoviesBeingBorrowedMsgIfNoAvailableMovies() { in = new ByteArrayInputStream("Mad Max: Furiosa Road".getBytes()); System.setIn(in); assertEquals("There are currently no movies being borrowed.", menu.returnMovie(libSystem)); } @Test public void returnMovieShouldReturnThankYouMsgOnSuccessfulReturn() { libSystem.addMovie(new Movie("Mad Max: Fury Road", 2015, new Director("George", "Miller"), Rating.TEN)); in = new ByteArrayInputStream("Mad Max: Fury Road".getBytes()); System.setIn(in); menu.checkoutMovie(libSystem); in = new ByteArrayInputStream("Mad Max: Fury Road".getBytes()); System.setIn(in); assertEquals("Thank you for returning the movie.", menu.returnMovie(libSystem)); } @Test public void returnMovieShouldReturnInvalidMovieMsgIfEnteredMovieNotFound() { libSystem.addMovie(new Movie("Mad Max: Fury Road", 2015, new Director("George", "Miller"), Rating.TEN)); in = new ByteArrayInputStream("Mad Max: Fury Road".getBytes()); System.setIn(in); menu.checkoutMovie(libSystem); in = new ByteArrayInputStream("Mad Max: Furiosa Road".getBytes()); System.setIn(in); assertEquals("That is not a valid movie to return.", menu.returnMovie(libSystem)); } @Test public void requestUsernameShouldReturnUsernameIfFormatIsSevenIntegers() { in = new ByteArrayInputStream("1235678".getBytes()); System.setIn(in); assertEquals("123-5678", menu.requestUsername()); } @Test public void requestPasswordShouldReturnEnteredPassword() { in = new ByteArrayInputStream("Amanda".getBytes()); System.setIn(in); assertEquals("Amanda", menu.requestPassword()); } @Test public void verifyUserShouldReturnTrueIfUserCredentialsAreCorrect() { LibraryService libSystem = new LibraryService(); String hash = "fdb8534840de9c6d46d6004697249a74c1730abfc3a2c090f940c91b388b66db"; libSystem.addUser(new User(new Name("Bojack", "Horseman"), "micro@Gmail.com", "04112628", "123-4567", hash)); in = new ByteArrayInputStream("1234567".getBytes()); System.setIn(in); String username = menu.requestUsername(); in = new ByteArrayInputStream("64 digit hash".getBytes()); System.setIn(in); String password = menu.requestPassword(); assertEquals(true, menu.verifyUser(libSystem, username, password)); } @Test public void verifyUserShouldReturnFalseIfUserCredentialsIncorrect() { LibraryService libSystem = new LibraryService(); String hash = "fdb8534840de9c6d46d6004697249a74c1730abfc3a2c090f940c91b388b66db"; libSystem.addUser(new User(new Name("Bojack", "Horseman"), "micro@Gmail.com", "04112628", "123-4567", hash)); in = new ByteArrayInputStream("1234567".getBytes()); System.setIn(in); String username = menu.requestUsername(); in = new ByteArrayInputStream("64 digit has".getBytes()); System.setIn(in); String password = menu.requestPassword(); assertEquals(false, menu.verifyUser(libSystem, username, password)); } @Test public void getOptionFromUserShouldReturnAnOptionEnumFromValidInput() { in = new ByteArrayInputStream("4".getBytes()); System.setIn(in); assertEquals(Options.LIST_MOVIES, menu.getOptionFromUser(libSystem)); } }
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.castor; import org.onlab.packet.ARP; import org.onlab.packet.Ethernet; import org.onlab.packet.Ip4Address; import org.onlab.packet.IpAddress; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DeviceId; import org.onosproject.net.flow.DefaultTrafficSelector; import org.onosproject.net.flow.DefaultTrafficTreatment; import org.onosproject.net.flow.TrafficSelector; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.packet.DefaultOutboundPacket; import org.onosproject.net.packet.InboundPacket; import org.onosproject.net.packet.PacketContext; import org.onosproject.net.packet.PacketProcessor; import org.onosproject.net.packet.PacketService; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.slf4j.Logger; import java.nio.ByteBuffer; import java.util.Optional; import java.util.Set; import static org.onlab.packet.Ethernet.TYPE_ARP; import static org.onosproject.net.packet.PacketPriority.CONTROL; import static org.slf4j.LoggerFactory.getLogger; /** * Component for managing the ARPs. */ @Component(immediate = true, service = ArpService.class) public class CastorArpManager implements ArpService { @Reference(cardinality = ReferenceCardinality.MANDATORY) protected ConnectivityManagerService connectivityManager; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected PacketService packetService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected CastorStore castorStore; private ProxyArpProcessor processor = new ProxyArpProcessor(); private final Logger log = getLogger(getClass()); private static final int FLOW_PRIORITY = 500; private static final MacAddress ARP_SOURCEMAC = MacAddress.valueOf("00:00:00:00:00:01"); private static final MacAddress ARP_DEST = MacAddress.valueOf("00:00:00:00:00:00"); private static final byte[] ZERO_MAC_ADDRESS = MacAddress.ZERO.toBytes(); private static final IpAddress ARP_SRC = Ip4Address.valueOf("0.0.0.0"); private ApplicationId appId; Optional<DeviceId> deviceID = null; private enum Protocol { ARP } private enum MessageType { REQUEST, REPLY } @Activate public void activate() { appId = coreService.getAppId(Castor.CASTOR_APP); packetService.addProcessor(processor, PacketProcessor.director(1)); requestPackets(); } @Deactivate public void deactivate() { withdrawIntercepts(); packetService.removeProcessor(processor); processor = null; } /** * Used to request the ARP packets. */ private void requestPackets() { TrafficSelector.Builder selectorBuilder = DefaultTrafficSelector.builder(); selectorBuilder.matchEthType(TYPE_ARP); packetService.requestPackets(selectorBuilder.build(), CONTROL, appId); } /** * Withdraws the requested ARP packets. */ private void withdrawIntercepts() { if (deviceID != null && deviceID.isPresent()) { TrafficSelector.Builder selectorBuilder = DefaultTrafficSelector.builder(); selectorBuilder.matchEthType(TYPE_ARP); packetService.cancelPackets(selectorBuilder.build(), CONTROL, appId, deviceID); } } /** * Forwards the ARP packet to the specified connect point via packet out. * * @param context The packet context */ private void forward(MessageContext context) { TrafficTreatment.Builder builder = null; Ethernet eth = context.packet(); ByteBuffer buf = ByteBuffer.wrap(eth.serialize()); IpAddress target = context.target(); String value = getMatchingConnectPoint(target); if (value != null) { ConnectPoint connectPoint = ConnectPoint.deviceConnectPoint(value); builder = DefaultTrafficTreatment.builder(); builder.setOutput(connectPoint.port()); packetService.emit(new DefaultOutboundPacket(connectPoint.deviceId(), builder.build(), buf)); } } @Override public void createArp(Peer peer) { Ethernet packet = null; packet = buildArpRequest(peer); ByteBuffer buf = ByteBuffer.wrap(packet.serialize()); ConnectPoint connectPoint = ConnectPoint.deviceConnectPoint(peer.getPort()); TrafficTreatment.Builder builder = DefaultTrafficTreatment.builder(); builder.setOutput(connectPoint.port()); packetService.emit(new DefaultOutboundPacket(connectPoint.deviceId(), builder.build(), buf)); } /** * Builds the ARP request when MAC is not known. * * @param peer The Peer whose MAC is not known. * @return Ethernet */ private Ethernet buildArpRequest(Peer peer) { ARP arp = new ARP(); arp.setHardwareType(ARP.HW_TYPE_ETHERNET) .setHardwareAddressLength((byte) Ethernet.DATALAYER_ADDRESS_LENGTH) .setProtocolType(ARP.PROTO_TYPE_IP) .setProtocolAddressLength((byte) IpAddress.INET_BYTE_LENGTH) .setOpCode(ARP.OP_REQUEST); arp.setSenderHardwareAddress(ARP_SOURCEMAC.toBytes()) .setSenderProtocolAddress(ARP_SRC.toOctets()) .setTargetHardwareAddress(ZERO_MAC_ADDRESS) .setTargetProtocolAddress(IpAddress.valueOf(peer.getIpAddress()).toOctets()); Ethernet ethernet = new Ethernet(); ethernet.setEtherType(Ethernet.TYPE_ARP) .setDestinationMACAddress(MacAddress.BROADCAST) .setSourceMACAddress(ARP_SOURCEMAC) .setPayload(arp); ethernet.setPad(true); return ethernet; } /** * Gets the matching connect point corresponding to the peering IP address. * * @param target Target IP address * @return Connect point as a String */ private String getMatchingConnectPoint(IpAddress target) { Set<Peer> peers = castorStore.getAllPeers(); for (Peer peer : peers) { IpAddress match = IpAddress.valueOf(peer.getIpAddress()); if (match.equals(target)) { return peer.getPort(); } } return null; } /** * Returns the matching Peer or route server on a Connect Point. * * @param connectPoint The peering connect point. * @return Peer or Route Server */ private Peer getMatchingPeer(ConnectPoint connectPoint) { for (Peer peer : castorStore.getAllPeers()) { if (connectPoint.equals(ConnectPoint.deviceConnectPoint(peer.getPort()))) { return peer; } } return null; } /** * Returns matching BGP Peer on a connect point. * * @param connectPoint The peering connect point. * @return The Peer */ private Peer getMatchingCustomer(ConnectPoint connectPoint) { for (Peer peer : castorStore.getCustomers()) { if (connectPoint.equals(ConnectPoint.deviceConnectPoint(peer.getPort()))) { return peer; } } return null; } /** * Updates the IP address to mac address map. * * @param context The message context. */ private void updateMac(MessageContext context) { if ((castorStore.getAddressMap()).containsKey(context.sender())) { return; } Ethernet eth = context.packet(); MacAddress macAddress = eth.getSourceMAC(); IpAddress ipAddress = context.sender(); castorStore.setAddressMap(ipAddress, macAddress); } /** * Setup the layer two flows if not already installed after an ARP packet is received. * If the layer 2 status is true, means layer two flows are already provisioned. * If the status was false, layer 2 flows will be installed at this point. This * happens when the mac address of a peer was not known at the time of its addition. * * @param msgContext The message context. */ private void handleArpForL2(MessageContext msgContext) { ConnectPoint cp = msgContext.inPort(); Peer peer = getMatchingCustomer(cp); if (peer != null && !peer.getl2Status()) { connectivityManager.setUpL2(peer); } } @Override public boolean handlePacket(PacketContext context) { InboundPacket pkt = context.inPacket(); Ethernet ethPkt = pkt.parsed(); if (ethPkt == null) { return false; } MessageContext msgContext = createContext(ethPkt, pkt.receivedFrom()); if (msgContext == null) { return false; } switch (msgContext.type()) { case REPLY: forward(msgContext); updateMac(msgContext); handleArpForL2(msgContext); break; case REQUEST: forward(msgContext); updateMac(msgContext); handleArpForL2(msgContext); break; default: return false; } context.block(); return true; } private MessageContext createContext(Ethernet eth, ConnectPoint inPort) { if (eth.getEtherType() == Ethernet.TYPE_ARP) { return createArpContext(eth, inPort); } return null; } /** * Extracts context information from ARP packets. * * @param eth input Ethernet frame that is thought to be ARP * @param inPort in port * @return MessageContext object if the packet was a valid ARP packet, * otherwise null */ private MessageContext createArpContext(Ethernet eth, ConnectPoint inPort) { if (eth.getEtherType() != Ethernet.TYPE_ARP) { return null; } ARP arp = (ARP) eth.getPayload(); IpAddress target = Ip4Address.valueOf(arp.getTargetProtocolAddress()); IpAddress sender = Ip4Address.valueOf(arp.getSenderProtocolAddress()); MessageType type; if (arp.getOpCode() == ARP.OP_REQUEST) { type = MessageType.REQUEST; } else if (arp.getOpCode() == ARP.OP_REPLY) { type = MessageType.REPLY; } else { return null; } return new MessageContext(eth, inPort, Protocol.ARP, type, target, sender); } private class MessageContext { private Protocol protocol; private MessageType type; private IpAddress target; private IpAddress sender; private Ethernet eth; private ConnectPoint inPort; public MessageContext(Ethernet eth, ConnectPoint inPort, Protocol protocol, MessageType type, IpAddress target, IpAddress sender) { this.eth = eth; this.inPort = inPort; this.protocol = protocol; this.type = type; this.target = target; this.sender = sender; } public ConnectPoint inPort() { return inPort; } public Ethernet packet() { return eth; } public Protocol protocol() { return protocol; } public MessageType type() { return type; } public VlanId vlan() { return VlanId.vlanId(eth.getVlanID()); } public MacAddress srcMac() { return MacAddress.valueOf(eth.getSourceMACAddress()); } public IpAddress target() { return target; } public IpAddress sender() { return sender; } } private class ProxyArpProcessor implements PacketProcessor { @Override public void process(PacketContext context) { if (context.isHandled()) { return; } InboundPacket pkt = context.inPacket(); Ethernet ethPkt = pkt.parsed(); if (ethPkt == null) { return; } if (ethPkt.getEtherType() == TYPE_ARP) { //handle the arp packet. handlePacket(context); } else { return; } } } }
/** * Copyright 2008-2017 Qualogy Solutions B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.qualogy.qafe.gwt.client.component; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.gwt.gen2.table.client.ColumnDefinition; import com.google.gwt.gen2.table.client.MutableTableModel; import com.google.gwt.gen2.table.client.TableModelHelper.Request; import com.google.gwt.gen2.table.client.TableModelHelper.Response; import com.google.gwt.gen2.table.client.TableModelHelper.SerializableResponse; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.ui.UIObject; import com.qualogy.qafe.gwt.client.context.ClientApplicationContext; import com.qualogy.qafe.gwt.client.ui.renderer.events.CallbackHandler; import com.qualogy.qafe.gwt.client.vo.functions.DataContainerGVO; import com.qualogy.qafe.gwt.client.vo.ui.event.EventListenerGVO; import com.qualogy.qafe.gwt.client.vo.ui.event.InputVariableGVO; /** * An iterator that serves as the data source for TableOracle requests. */ public class QTableModel extends MutableTableModel<DataContainerGVO> { public static final String KEY_WORD_SORT_ON_COLUMN = "$SORT_COLUMN"; public static final String KEY_WORD_SORT_ORDER = "$SORT_ORDER"; public static final String KEY_WORD_SORT_ORDER_ASC = "asc"; public static final String KEY_WORD_SORT_ORDER_DESC = "desc"; public final static String KEY_WORD_PAGESIZE = "$PAGESIZE"; public final static String KEY_WORD_PAGE_NUMBER = "$OFFSET"; public static final String[] RESERVED_KEWORDS = {KEY_WORD_SORT_ON_COLUMN, KEY_WORD_SORT_ORDER, KEY_WORD_PAGESIZE, KEY_WORD_PAGE_NUMBER}; /** * */ private Callback<DataContainerGVO> currentCallback = null; private Request currentRequest = null; /** * which button is responsible for the sending part! */ private UIObject sender; private QPagingScrollTable source; private String sortColumn = null; private String sortOrder = null; public QTableModel(){ // create request and response yourself currentRequest= new Request(0,50);//,new TableModelHelper.ColumnSortList()); currentCallback = new Callback<DataContainerGVO>(){ public void onFailure(Throwable caught) { ClientApplicationContext.getInstance().log("Setting datagrid values failed", caught); } public void onRowsReady(Request request, Response<DataContainerGVO> response) { source.setData(request, response); }}; } @Override public void requestRows(final Request request, final Callback<DataContainerGVO> callback) { requestRows(request, callback, null); } public void requestRows(final Request request, final Callback<DataContainerGVO> callback, final ResultHandler resultHandler) { currentRequest = request; currentCallback = callback; final int col = request.getColumnSortList().getPrimaryColumn(); final boolean ascending = request.getColumnSortList().isPrimaryAscending(); boolean canCreateCallback = false; String listenerType = null; Map<String,Object> internalVars = null; boolean isSorting = (col > -1); boolean isPaging = false; boolean isDelete = false; boolean isAdd = false; boolean isSave = false; boolean isCancel = false; boolean isRefresh = false; if (sender != null) { isPaging = isPagingControl(sender.getStyleName()); isDelete = isDeleteControl(sender.getStyleName()); isAdd = isAddControl(sender.getStyleName()); isSave = isSaveControl(sender.getStyleName()); isCancel = isCancelControl(sender.getStyleName()); isRefresh = isRefreshControl(sender.getStyleName()); } if (isPaging || isDelete || isAdd || isSave || isCancel || isRefresh) { canCreateCallback = true; listenerType = "onclick"; if(source.isScrollEvent()) { sender = source; listenerType = "onscroll-bottom"; source.setScrollEvent(false); // setting to default value } } else if (isSorting) { canCreateCallback = true; sender = source; listenerType = "onfetchdata"; sortColumn = lookupColumnName(col); sortOrder = ascending ? KEY_WORD_SORT_ORDER_ASC : KEY_WORD_SORT_ORDER_DESC; } internalVars = createInternalVars(); if (canCreateCallback) { // registering result handler to do the post event body actions. registerResultHandler(sender, listenerType, resultHandler); EventListenerGVO eventListenerGVO = lookupEvent(sender, listenerType); List<InputVariableGVO> inputVars = (eventListenerGVO != null) ? eventListenerGVO.getInputvariablesList() : new ArrayList<InputVariableGVO>(); CallbackHandler.createCallBack(sender, listenerType, eventListenerGVO, inputVars, internalVars,null); // When paging the sender is set, but when sorting the sender is not set // so "refreshing" the sender by setting it to null setSender(null); } } private void registerResultHandler(UIObject sender, String listenerType, ResultHandler resultHandler) { if ((sender != null) && (resultHandler != null)) { String senderId = DOM.getElementAttribute(sender.getElement(), "id"); ClientApplicationContext.getInstance().addResultHandler(senderId, listenerType, resultHandler); } } protected String lookupColumnName(int columnIndex) { if (source.getTableDefinition() != null) { List<ColumnDefinition<DataContainerGVO,?>> columnDefinitionList = source.getTableDefinition().getVisibleColumnDefinitions(); if ((columnDefinitionList != null) && (columnDefinitionList.size() > columnIndex)) { ColumnDefinition<DataContainerGVO,?> columnDefinition = columnDefinitionList.get(columnIndex); if (columnDefinition instanceof QColumnDefinition) { QColumnDefinition qColumnDefinition = (QColumnDefinition)columnDefinition; return qColumnDefinition.getField(); } } } return ""; } protected EventListenerGVO lookupEvent(UIObject uiObject, String listenerType) { if (source.getSource() != null) { String uiObjectId = DOM.getElementAttribute(uiObject.getElement(), "id"); if (uiObjectId != null) { int pipeIndex = uiObjectId.indexOf("|"); if (pipeIndex > -1) { uiObjectId = uiObjectId.substring(0, pipeIndex); if (source.getSource().getEvents() != null) { for (EventListenerGVO eventGVO : source.getSource().getEvents()) { if ((eventGVO.getEventListenerType() != null) && eventGVO.getEventListenerType().equals(listenerType) && (eventGVO.getEventComponentId() != null) && eventGVO.getEventComponentId().equals(uiObjectId)) { return eventGVO; } } } } } } return null; } protected boolean isDeleteControl(String senderName) { if (senderName != null) { return senderName.endsWith(QPagingScrollTableOperation.STYLE_DELETE); } return false; } protected boolean isAddControl(String senderName) { if (senderName != null) { return senderName.endsWith(QPagingScrollTableOperation.STYLE_ADD); } return false; } protected boolean isSaveControl(String senderName) { if (senderName != null) { return senderName.endsWith(QPagingScrollTableOperation.STYLE_SAVE); } return false; } protected boolean isCancelControl(String senderName) { if (senderName != null) { return senderName.endsWith(QPagingScrollTableOperation.STYLE_CANCEL); } return false; } protected boolean isRefreshControl(String senderName) { if (senderName != null) { return senderName.endsWith(QPagingScrollTableOperation.STYLE_REFRESH); } return false; } protected boolean isPagingControl(String senderName) { if (senderName != null) { if (senderName.endsWith(QPagingOptions.STYLE_FIRSTPAGE) || senderName.endsWith(QPagingOptions.STYLE_PREVIOUSPAGE) || senderName.endsWith(QPagingOptions.STYLE_NEXTPAGE) || senderName.endsWith(QPagingOptions.STYLE_LASTPAGE)) { return true; } } return false; } protected Map<String,Object> createInternalVars() { Map<String,Object> internalVars = new HashMap<String,Object>(); // sorting internalVars.put(KEY_WORD_SORT_ON_COLUMN, sortColumn); internalVars.put(KEY_WORD_SORT_ORDER, sortOrder); // paging internalVars.put(KEY_WORD_PAGESIZE, String.valueOf(source.getPageSize())); internalVars.put(KEY_WORD_PAGE_NUMBER, String.valueOf(source.getCurrentPage())); return internalVars; } public boolean isByPagingRequest(){ return (currentRequest!=null && currentCallback!=null); } public void processData(List<DataContainerGVO> listOfDataMap){ if (listOfDataMap!=null){ if (currentRequest!=null && currentCallback!=null){ setRowCount(listOfDataMap.size()); SerializableResponse<DataContainerGVO> response = new SerializableResponse<DataContainerGVO>(listOfDataMap); currentCallback.onRowsReady(currentRequest, response); } } } @Override protected boolean onRowInserted(int beforeRow) { return true; } @Override protected boolean onRowRemoved(int row) { return true; } @Override protected boolean onSetRowValue(int row, DataContainerGVO rowValue) { return true; } public void setSender(UIObject sender) { this.sender = sender; } public void setSource(QPagingScrollTable pagingScrollTable) { this.source= pagingScrollTable; } }
package org.pengyr.tool.core.data; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.UiThread; import android.support.v7.widget.RecyclerView; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import org.pengyr.tool.core.log.Logger; /** * to keep change data for adapter * <p> * only refresh data when adapter is on main ui thread * <p> * Created by Peng on 2017/6/13. */ public class DataProvider<D> { private final static String TAG = DataProvider.class.getSimpleName(); @NonNull protected List<D> data; @NonNull protected Set<D> notifyIDs; @Nullable protected PositionFixer positionFixer; @Nullable protected RecyclerView.Adapter<?> adapter; public DataProvider() { data = new ArrayList<>(); notifyIDs = new HashSet<>(); } public void addNotifiID(@NonNull D data) { if (adapter != null) return; notifyIDs.add(data); } public boolean has(@NonNull D d) { return find(d) >= 0; } public int find(@NonNull D d) { return data.indexOf(d); } @Nullable public D get(int position) { if (position < 0) return null; if (position >= count()) return null; return data.get(position); } /** * add Methods */ public synchronized void add(@NonNull D model) { if (adapter != null) { addCurrent(model, count()); return; } notifyIDs.add(model); } public synchronized void addAll(@NonNull D[] datas) { for (D d : datas) { add(d); } } public synchronized void addAll(@NonNull Collection<D> datas) { for (D d : datas) { add(d); } } /** * remove methods */ public synchronized void remove(@NonNull D data) { if (adapter != null) { removeCurrent(data); return; } addNotifiID(data); } public synchronized void remove(@NonNull List<D> datas) { for (D d : datas) { remove(d); } } /** * update methods */ public synchronized void update(@NonNull D id) { if (adapter != null) { updateCurrent(id); return; } addNotifiID(id); } /** * Bind adapter to refresh ui when data change * Must bind(null) when adapter view is not shown */ public void unbind() { this.adapter = null; } public void bind(@NonNull RecyclerView.Adapter<?> adapter) { bind(adapter, null); } public void bind(@NonNull RecyclerView.Adapter<?> adapter, @Nullable OnNotifyListener<D> onNotifyListener) { this.adapter = adapter; if (onNotifyListener != null) { onNotifyListener.onNotify(notifyIDs); notifyIDs.clear(); } } protected int fixAdapterPosition(int position) { if (positionFixer == null) return position; return positionFixer.getFixPosition(position); } public void setPositionFixer(@NonNull PositionFixer positionFixer) { this.positionFixer = positionFixer; } /** * update current data only when adapter is exist * must run on main ui thread * * @param id update value * @param index update index (if need) */ @UiThread public synchronized void addCurrent(@NonNull D id, int index) { if (adapter == null) return; if (data.indexOf(id) >= 0) { removeCurrent(id); index--; } if (index < 0) index = 0; if (index > count()) index = count(); data.add(index, id); adapter.notifyItemInserted(fixAdapterPosition(index)); } @UiThread public synchronized void removeCurrent(@NonNull D d) { if (adapter == null) return; int index = data.indexOf(d); if (index < 0) return; if (index >= count()) return; data.remove(index); adapter.notifyItemRemoved(fixAdapterPosition(index)); } @UiThread public synchronized void updateCurrent(@NonNull D d) { if (adapter == null) return; int index = data.indexOf(d); if (index < 0) return; if (index >= count()) return; adapter.notifyItemChanged(fixAdapterPosition(index)); } /** * pump out all notifyIDs into dataIDs * * @return all data in list */ public List<D> pumpList() { for (D d : notifyIDs) { if (!data.contains(d)) { data.add(d); } } return data; } /** * @return current data */ public List<D> list() { return data; } /** * @return notify data */ public Set<D> getNotifyIDs() { return notifyIDs; } public int count() { return data.size(); } public boolean isEmpty() { return count() == 0; } public void clear() { if (adapter == null) return; data.clear(); adapter.notifyDataSetChanged(); } public void destroy() { unbind(); positionFixer = null; adapter = null; data.clear(); notifyIDs.clear(); } public interface PositionFixer { int getFixPosition(int position); } public interface OnNotifyListener<P> { void onNotify(Set<P> notifyIDs); } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2018 Fabian Prasser and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.gui.model; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.deidentifier.arx.ARXAnonymizer; import org.deidentifier.arx.ARXConfiguration; import org.deidentifier.arx.ARXLattice.ARXNode; import org.deidentifier.arx.ARXPopulationModel; import org.deidentifier.arx.ARXProcessStatistics; import org.deidentifier.arx.ARXResult; import org.deidentifier.arx.AttributeType; import org.deidentifier.arx.AttributeType.Hierarchy; import org.deidentifier.arx.AttributeType.MicroAggregationFunction; import org.deidentifier.arx.DataDefinition; import org.deidentifier.arx.DataHandle; import org.deidentifier.arx.DataSubset; import org.deidentifier.arx.aggregates.HierarchyBuilder; import org.deidentifier.arx.criteria.Inclusion; import org.deidentifier.arx.criteria.PrivacyCriterion; import org.deidentifier.arx.gui.resources.Resources; import org.deidentifier.arx.io.CSVSyntax; import org.deidentifier.arx.metric.MetricConfiguration; import org.deidentifier.arx.metric.MetricDescription; /** * This class implements a large portion of the model used by the GUI. * * @author Fabian Prasser * @author James Gaupp */ public class Model implements Serializable { /** * The currently selected perspective * @author Fabian Prasser */ public static enum Perspective { CONFIGURATION, EXPLORATION, ANALYSIS, RISK } /** SVUID. */ private static final long serialVersionUID = -7669920657919151279L; /* ***************************************** * TRANSIENT VARIABLES *******************************************/ /** The current anonymizer, if any. */ private transient ARXAnonymizer anonymizer = null; /** The current output data. */ private transient DataHandle output = null; /** The currently displayed transformation. */ private transient ARXNode outputNode = null; /** The path to the project file. */ private transient String path = null; /** The current result. */ private transient ARXResult result = null; /** The currently selected node. */ private transient ARXNode selectedNode = null; /** The clip board. */ private transient ModelClipboard clipboard = null; /** The perspective */ private transient Perspective perspective = Perspective.CONFIGURATION; /* ***************************************** * PARAMETERS AND THRESHOLDS *******************************************/ /** Anonymization parameter. */ private double snapshotSizeDataset = 0.2d; /** Anonymization parameter. */ private double snapshotSizeSnapshot = 0.8d; /** Anonymization parameter. */ private int historySize = 200; /** Threshold. */ private int maximalSizeForComplexOperations = 5000000; /** Threshold. */ private int initialNodesInViewer = 100; /** Threshold. */ private int maxNodesInViewer = 700; /* ***************************************** * PROJECT METADATA ******************************************/ /** The project description. */ private String description; /** The size of the input file. */ private long inputBytes = 0L; /** Is the project file modified. */ private boolean modified = false; /** The project name. */ private String name = null; /** Left for backwards compatibility only! */ private char separator = ';'; //$NON-NLS-1$ /** The projects CSV syntax */ private CSVSyntax csvSyntax; /** Execution time of last anonymization. */ private long time; /** Locale. */ private Locale locale = null; /** The audit trail */ private List<ModelAuditTrailEntry> auditTrail = new ArrayList<ModelAuditTrailEntry>(); /** Standard charset since ARX > 3.7.1. Older projects will have the value <code>null</code>*/ private String charset = "UTF-8"; /* ***************************************** * DEBUGGING ******************************************/ /** Is the debugging mode enabled. */ private boolean debugEnabled = false; /* ***************************************** * VISUALIZATIONS ******************************************/ /** Indices of groups in the current output view. */ private int[] groups; /** Label. */ private String optimalNodeAsString; /** Label. */ private String outputNodeAsString; /** Current selection. */ private String selectedClassValue = null; /** Current selection. */ private String selectedAttribute = null; /** Enable/disable. */ private Boolean showVisualization = true; /** Last two selections. */ private String[] pair = new String[] { null, null }; /* ***************************************** * SUBSET MANAGEMENT ******************************************/ /** Query. */ private String query = ""; //$NON-NLS-1$ /** Origin of current subset. */ private String subsetOrigin = "All"; //$NON-NLS-1$ /* ***************************************** * SUB-MODELS ******************************************/ /** The current input configuration. */ private ModelConfiguration inputConfig = new ModelConfiguration(); /** A filter describing which transformations are currently selected. */ private ModelNodeFilter nodeFilter = null; /** Configuration of the data view. */ private ModelViewConfig viewConfig = new ModelViewConfig(); /** The current output configuration. */ private ModelConfiguration outputConfig = null; /** The current risk model. */ private ModelRisk riskModel = null; /* ***************************************** * PRIVACY CRITERIA * *****************************************/ /** Model for a specific privacy criterion. */ private ModelDPresenceCriterion dPresenceModel = new ModelDPresenceCriterion(); /** Model for a specific privacy criterion. */ private ModelKMapCriterion kMapModel = new ModelKMapCriterion(); /** Model for a specific privacy criterion. */ private ModelKAnonymityCriterion kAnonymityModel = new ModelKAnonymityCriterion(); /** Model for a specific privacy criterion. */ private Map<String, ModelLDiversityCriterion> lDiversityModel = new HashMap<String, ModelLDiversityCriterion>(); /** Model for a specific privacy criterion. */ private Map<String, ModelTClosenessCriterion> tClosenessModel = new HashMap<String, ModelTClosenessCriterion>(); /** Model for a specific privacy criterion. */ private Set<ModelRiskBasedCriterion> riskBasedModel = new HashSet<ModelRiskBasedCriterion>(); /** Model for a specific privacy criterion. */ private ModelDifferentialPrivacyCriterion differentialPrivacyModel = new ModelDifferentialPrivacyCriterion(); /** Model for a specific privacy criterion. */ private Map<String, ModelDDisclosurePrivacyCriterion> dDisclosurePrivacyModel = new HashMap<String, ModelDDisclosurePrivacyCriterion>(); /** Model for a specific privacy criterion. */ private ModelProfitabilityCriterion stackelbergPrivacyModel = new ModelProfitabilityCriterion(); /** Model for a specific privacy criterion. */ private Map<String, ModelBLikenessCriterion> bLikenessModel = new HashMap<String, ModelBLikenessCriterion>(); /* ***************************************** * UTILITY ANALYSIS ******************************************/ /** Configuration. */ private MetricConfiguration metricConfig = null; /** Description. */ private MetricDescription metricDescription = null; /** Summary statistics */ private Boolean useListwiseDeletion = true; /** Utility estimation during anonymization */ private Boolean useFunctionalHierarchies = true; /* ***************************************** * RISK ANALYSIS ******************************************/ /** Selected quasi identifiers */ private Set<String> selectedQuasiIdentifiers = null; /* ***************************************** * LOCAL RECODING ******************************************/ /** The local recoding model */ private ModelLocalRecoding localRecodingModel = new ModelLocalRecoding(); /** Heuristic search threshold */ private Integer heuristicSearchThreshold; /** Heuristic search threshold */ private Integer heuristicSearchTimeLimit; /** Heuristic search threshold */ private Integer heuristicSearchStepLimit; /** General anonymization configuration. Proxy for some fields for backwards compatibility */ private ModelAnonymizationConfiguration anonymizationConfiguration; /* ***************************************** * Data Mining * ***************************************** */ /** Selected attributes */ private Set<String> selectedFeatures = null; /** Selected attributes */ private Set<String> selectedClasses = null; /** Model */ private ModelClassification classificationModel = new ModelClassification(); /* ***************************************** * Information about the last anonymization process * ***************************************** */ /** Statistics about the last optimization process */ private ARXProcessStatistics optimizationStatistics = null; /** * Creates a new instance. * * @param name * @param description * @param locale */ public Model(final String name, final String description, Locale locale) { this.name = name; this.description = description; this.locale = locale; setModified(); } /** * Adds an entry to the audit trail * @param entry */ public void addAuditTrailEntry(ModelAuditTrailEntry entry) { this.getAuditTrail().add(entry); this.setModified(); } /** * Creates an anonymizer for the current config. * * @return */ public ARXAnonymizer createAnonymizer() { // Initialize anonymizer this.anonymizer = new ARXAnonymizer(); this.anonymizer.setHistorySize(getHistorySize()); this.anonymizer.setMaximumSnapshotSizeDataset(getSnapshotSizeDataset()); this.anonymizer.setMaximumSnapshotSizeSnapshot(getSnapshotSizeSnapshot()); // Add all criteria this.createConfig(); // Return the anonymizer return anonymizer; } /** * Replaces the output config with a clone of the input config. */ public void createClonedConfig() { outputConfig = inputConfig.clone(); this.setModified(); } /** * Creates an ARXConfiguration. */ public void createConfig() { ModelConfiguration config = getInputConfig(); DataDefinition definition = getInputDefinition(); // Initialize the config config.removeAllCriteria(); if (definition == null) return; // Initialize the metric config.setMetric(this.getMetricDescription().createInstance(this.getMetricConfiguration())); // Initialize definition for (String attr : definition.getQuasiIdentifyingAttributes()) { // Reset definition.resetAttributeType(attr); definition.resetHierarchy(attr); definition.resetHierarchyBuilder(attr); definition.resetMaximumGeneralization(attr); definition.resetMicroAggregationFunction(attr); definition.resetMinimumGeneralization(attr); // This increases the precision of the Loss utility measure if (this.getUseFunctionalHierarchies() && config.getHierarchyBuilder(attr) != null) { definition.setHierarchy(attr, config.getHierarchyBuilder(attr)); } else { definition.setHierarchy(attr, (HierarchyBuilder<?>)null); } // Set hierarchy Hierarchy hierarchy = config.getHierarchy(attr); if (hierarchy != null && hierarchy.getHierarchy() != null) { definition.setHierarchy(attr, hierarchy); } // Set attribute type definition.setAttributeType(attr, AttributeType.QUASI_IDENTIFYING_ATTRIBUTE); // Prepare for micro-aggregation if (config.getTransformationMode(attr) == ModelTransformationMode.MICRO_AGGREGATION) { MicroAggregationFunction function = config.getMicroAggregationFunction(attr) .createInstance(config.getMicroAggregationIgnoreMissingData(attr)); definition.setMicroAggregationFunction(attr, function); // Prepare for micro-aggregation with clustering } else if (config.getTransformationMode(attr) == ModelTransformationMode.CLUSTERING_AND_MICRO_AGGREGATION) { MicroAggregationFunction function = config.getMicroAggregationFunction(attr) .createInstance(config.getMicroAggregationIgnoreMissingData(attr)); definition.setMicroAggregationFunction(attr, function, true); // Prepare for generalization } else { definition.setMicroAggregationFunction(attr, null); Integer min = config.getMinimumGeneralization(attr); Integer max = config.getMaximumGeneralization(attr); if (min != null) { definition.setMinimumGeneralization(attr, min); } if (max != null) { definition.setMaximumGeneralization(attr, max); } } } if (this.differentialPrivacyModel != null && this.differentialPrivacyModel.isEnabled()) { config.addCriterion(this.differentialPrivacyModel.getCriterion(this)); // Convert the fraction of epsilon to use for automatic generalization to the absolute budget required by the config double dpSearchBudget = this.differentialPrivacyModel.getEpsilon() * this.differentialPrivacyModel.getEpsilonGeneralizationFraction(); config.getConfig().setDPSearchBudget(dpSearchBudget); } if (this.kAnonymityModel != null && this.kAnonymityModel.isEnabled()) { config.addCriterion(this.kAnonymityModel.getCriterion(this)); } if (this.kMapModel != null && this.kMapModel.isEnabled()) { config.addCriterion(this.kMapModel.getCriterion(this)); } if (this.dPresenceModel != null && this.dPresenceModel.isEnabled()) { config.addCriterion(this.dPresenceModel.getCriterion(this)); } if (this.stackelbergPrivacyModel != null && this.stackelbergPrivacyModel.isEnabled()) { config.addCriterion(this.stackelbergPrivacyModel.getCriterion(this)); } for (Entry<String, ModelLDiversityCriterion> entry : this.lDiversityModel.entrySet()){ if (entry.getValue() != null && entry.getValue().isEnabled()) { config.addCriterion(entry.getValue().getCriterion(this)); } } for (Entry<String, ModelTClosenessCriterion> entry : this.tClosenessModel.entrySet()){ if (entry.getValue() != null && entry.getValue().isEnabled()) { if (entry.getValue().getVariant()==1){ // EMD with hierarchy if (config.getHierarchy(entry.getValue().getAttribute()) == null){ config.setHierarchy(entry.getValue().getAttribute(), Hierarchy.create()); } } PrivacyCriterion criterion = entry.getValue().getCriterion(this); config.addCriterion(criterion); } } for (Entry<String, ModelDDisclosurePrivacyCriterion> entry : this.dDisclosurePrivacyModel.entrySet()){ if (entry.getValue() != null && entry.getValue().isEnabled()) { config.addCriterion(entry.getValue().getCriterion(this)); } } for (Entry<String, ModelBLikenessCriterion> entry : this.bLikenessModel.entrySet()){ if (entry.getValue() != null && entry.getValue().isEnabled()) { config.addCriterion(entry.getValue().getCriterion(this)); } } for (ModelRiskBasedCriterion entry : this.riskBasedModel){ if (entry != null && entry.isEnabled()) { PrivacyCriterion criterion = entry.getCriterion(this); config.addCriterion(criterion); } } // If a subset has been defined if (config.getInput() != null && config.getResearchSubset() != null && config.getResearchSubset().size() != config.getInput().getHandle().getNumRows()) { // Configure it, if not done already boolean subsetDefined = false; for (PrivacyCriterion c : config.getCriteria()) { // (e,d)-DP will return false at this point, // but consistency will be checked by ARXConfiguration.initialize(...) subsetDefined |= c.isSubsetAvailable(); } // Add, if missing if (!subsetDefined) { DataSubset subset = DataSubset.create(config.getInput(), config.getResearchSubset()); config.addCriterion(new Inclusion(subset)); } } } /** * Returns the current anonymization configuration * @return */ public ModelAnonymizationConfiguration getAnonymizationConfiguration() { if (anonymizationConfiguration == null) { anonymizationConfiguration = new ModelAnonymizationConfiguration(this); } return anonymizationConfiguration; } /** * Returns the current anonymizer. * * @return */ public ARXAnonymizer getAnonymizer() { return anonymizer; } /** * Returns the last two selected attributes. * * @return */ public String[] getAttributePair() { if (pair == null) pair = new String[] { null, null }; return pair; } /** * Returns the audit trail * @return */ public List<ModelAuditTrailEntry> getAuditTrail() { if (this.auditTrail == null) { this.auditTrail = new ArrayList<ModelAuditTrailEntry>(); } return auditTrail; } /** * Returns the b-Likeness privacy model. * * @return */ public Map<String, ModelBLikenessCriterion> getBLikenessModel() { if (this.bLikenessModel == null) { this.bLikenessModel = new HashMap<String, ModelBLikenessCriterion>(); DataHandle handle = inputConfig.getInput().getHandle(); for (int col = 0; col < handle.getNumColumns(); col++) { String attribute = handle.getAttributeName(col); bLikenessModel.put(attribute, new ModelBLikenessCriterion(attribute)); } } return bLikenessModel; } /** * Return charset. Returns <code>null</code> for projects with unknown charset, "UTF-8" else. * @return */ public String getCharset() { return this.charset; } /** * Returns the classification model * @return */ public ModelClassification getClassificationModel() { if (this.classificationModel == null) { this.classificationModel = new ModelClassification(); } return this.classificationModel; } /** * Returns the clipboard. * * @return */ public ModelClipboard getClipboard(){ if (clipboard==null){ clipboard = new ModelClipboard(); } return clipboard; } /** * Gets the csv config model. * @return */ public CSVSyntax getCSVSyntax() { if (csvSyntax == null) { csvSyntax = new CSVSyntax(); csvSyntax.setDelimiter(separator); } return csvSyntax; } /** * Returns the d-disclosure privacy model. * * @return */ public Map<String, ModelDDisclosurePrivacyCriterion> getDDisclosurePrivacyModel() { if (this.dDisclosurePrivacyModel == null) { this.dDisclosurePrivacyModel = new HashMap<String, ModelDDisclosurePrivacyCriterion>(); DataHandle handle = inputConfig.getInput().getHandle(); for (int col = 0; col < handle.getNumColumns(); col++) { String attribute = handle.getAttributeName(col); dDisclosurePrivacyModel.put(attribute, new ModelDDisclosurePrivacyCriterion(attribute)); } } return dDisclosurePrivacyModel; } /** * Returns the project description. * * @return */ public String getDescription() { return description; } /** * Returns the (e,d)-DP model. * * @return */ public ModelDifferentialPrivacyCriterion getDifferentialPrivacyModel() { if (this.differentialPrivacyModel == null) { this.differentialPrivacyModel = new ModelDifferentialPrivacyCriterion(); } return differentialPrivacyModel; } /** * Returns the d-presence model. * * @return */ public ModelDPresenceCriterion getDPresenceModel() { return dPresenceModel; } /** * Returns a list of indices of all equivalence classes. * * @return */ public int[] getGroups() { // TODO: Refactor to colors[groups[row]] return this.groups; } /** * @return the heuristicSearchStepLimit */ public Integer getHeuristicSearchStepLimit() { if (this.heuristicSearchStepLimit == null) { return this.heuristicSearchStepLimit = 1000; } return heuristicSearchStepLimit; } /** * @return the heuristicSearchThreshold */ public Integer getHeuristicSearchThreshold() { if (this.heuristicSearchThreshold == null) { return this.heuristicSearchThreshold = 100000; } return heuristicSearchThreshold; } /** * @return the heuristicSearchTimeLimit */ public Integer getHeuristicSearchTimeLimit() { if (this.heuristicSearchTimeLimit == null) { return this.heuristicSearchTimeLimit = 30000; } return heuristicSearchTimeLimit; } /** * Returns the according parameter. * * @return */ public int getHistorySize() { return historySize; } /** * Returns an upper bound on the number of nodes that will initially * be displayed in the lattice viewer. * * @return */ public int getInitialNodesInViewer() { return initialNodesInViewer; } /** * Returns the size in bytes of the input file. * * @return */ public long getInputBytes() { return inputBytes; } /** * Returns the input configuration. * * @return */ public ModelConfiguration getInputConfig() { return inputConfig; } /** * Returns the input definition. * * @return */ public DataDefinition getInputDefinition(){ if (inputConfig==null) return null; else if (inputConfig.getInput()==null) return null; else return inputConfig.getInput().getDefinition(); } /** * Returns the input population model * @return */ public ARXPopulationModel getInputPopulationModel() { return getRiskModel().getPopulationModel(); } /** * Returns the k-anonymity model. * * @return */ public ModelKAnonymityCriterion getKAnonymityModel() { return kAnonymityModel; } /** * Returns the k-map model. * * @return */ public ModelKMapCriterion getKMapModel() { if (kMapModel == null) { kMapModel = new ModelKMapCriterion(); } return kMapModel; } /** * Returns the l-diversity model. * * @return */ public Map<String, ModelLDiversityCriterion> getLDiversityModel() { if (this.lDiversityModel == null) { this.lDiversityModel = new HashMap<String, ModelLDiversityCriterion>(); } return lDiversityModel; } /** * Returns the project locale. * * @return */ public Locale getLocale() { if (this.locale == null) { return Locale.getDefault(); } else { return locale; } } /** * Returns the model for local recoding * @return */ public ModelLocalRecoding getLocalRecodingModel() { if (this.localRecodingModel == null) { this.localRecodingModel = new ModelLocalRecoding(); } return localRecodingModel; } /** * When a dataset has more records than this threshold, * visualization of statistics will be disabled. * * @return */ public int getMaximalSizeForComplexOperations(){ return this.maximalSizeForComplexOperations; } /** * Returns the maximal size of a sub-lattice that will be displayed * by the viewer. * * @return */ public int getMaxNodesInViewer() { return maxNodesInViewer; } /** * Returns the configuration of the metric. * * @return */ public MetricConfiguration getMetricConfiguration() { if (this.metricConfig == null) { if (this.inputConfig == null || this.inputConfig.getMetric() == null) { this.metricConfig = ARXConfiguration.create().getQualityModel().getConfiguration(); } else { this.metricConfig = this.inputConfig.getMetric().getConfiguration(); } } return this.metricConfig; } /** * Returns a description of the metric. * * @return */ public MetricDescription getMetricDescription() { if (this.metricDescription == null) { if (this.inputConfig == null || this.inputConfig.getMetric() == null) { this.metricDescription = ARXConfiguration.create().getQualityModel().getDescription(); } else { this.metricDescription = this.inputConfig.getMetric().getDescription(); } } return this.metricDescription; } /** * Returns the name of this project. * * @return */ public String getName() { return name; } /** * Returns the current filter. * * @return */ public ModelNodeFilter getNodeFilter() { return nodeFilter; } /** * Returns a string representation of the current optimum. * * @return */ public String getOptimalNodeAsString() { return optimalNodeAsString; } /** * @return the output */ public DataHandle getOutput() { return output; } /** * Returns the output config. * * @return */ public ModelConfiguration getOutputConfig() { return outputConfig; } /** * Returns the output definition. * * @return */ public DataDefinition getOutputDefinition(){ if (this.output == null){ if (this.result != null) { return this.result.getDataDefinition(); } else { return null; } } else return this.output.getDefinition(); } /** * Returns a string representation of the currently applied transformation. * * @return */ public String getOutputNodeAsString() { return outputNodeAsString; } /** * Returns the output population model, if any. Null otherwise. * @return */ public ARXPopulationModel getOutputPopulationModel() { ModelConfiguration config = getOutputConfig(); if (config != null) { for (PrivacyCriterion c : config.getCriteria()) { if (c.getPopulationModel() != null) { return c.getPopulationModel(); } } } return null; } /** * Returns the currently applied transformation. * * @return */ public ARXNode getOutputTransformation() { return outputNode; } /** * Returns the path of the project. * * @return */ public String getPath() { return path; } /** * @return the perspective */ public Perspective getPerspective() { if (perspective == null) { perspective = Perspective.CONFIGURATION; } return perspective; } /** * @return the optimizationStatistics */ public ARXProcessStatistics getProcessStatistics() { if (optimizationStatistics == null && this.result != null) { return this.result.getProcessStatistics(); } return optimizationStatistics; } /** * Returns the current query. * * @return */ public String getQuery() { return query; } /** * Returns the current result. * * @return the result */ public ARXResult getResult() { return result; } /** * Returns the risk-based model. * * @return */ public Set<ModelRiskBasedCriterion> getRiskBasedModel() { if (this.riskBasedModel == null) { this.riskBasedModel = new HashSet<ModelRiskBasedCriterion>(); this.riskBasedModel.add(new ModelRiskBasedCriterion(ModelRiskBasedCriterion.VARIANT_AVERAGE_RISK)); this.riskBasedModel.add(new ModelRiskBasedCriterion(ModelRiskBasedCriterion.VARIANT_SAMPLE_UNIQUES)); this.riskBasedModel.add(new ModelRiskBasedCriterion(ModelRiskBasedCriterion.VARIANT_POPULATION_UNIQUES_DANKAR)); } return riskBasedModel; } /** * Returns the risk model * @return the risk model */ public ModelRisk getRiskModel() { if (this.riskModel == null) { this.riskModel = new ModelRisk(); } return riskModel; } /** * Returns the currently selected attribute. * * @return */ public String getSelectedAttribute() { return selectedAttribute; } /** * Returns the selected classes * @return */ public Set<String> getSelectedClasses() { if (this.selectedClasses == null) { this.selectedClasses = new HashSet<String>(); } return this.selectedClasses; } /** * Returns the selected classes, ordered by occurrence in the dataset * @return */ public String[] getSelectedClassesAsArray() { return this.getAttributesAsArray(this.getSelectedClasses()); } /** * Returns the currently selected class value. * * @return */ public String getSelectedClassValue() { return selectedClassValue; } /** * Returns the selected features * @return */ public Set<String> getSelectedFeatures() { if (this.selectedFeatures == null) { this.selectedFeatures = new HashSet<String>(); } return this.selectedFeatures; } /** * Returns the selected features, ordered by occurrence in the dataset * @return */ public String[] getSelectedFeaturesAsArray() { return this.getAttributesAsArray(this.getSelectedFeatures()); } /** * Returns the selected transformation. * * @return */ public ARXNode getSelectedNode() { return selectedNode; } /** * Returns a set of quasi identifiers selected for risk analysis * @return */ public Set<String> getSelectedQuasiIdentifiers() { if (this.selectedQuasiIdentifiers == null) { // Add qis or other attributes if (this.getInputConfig() != null && this.getInputConfig().getInput() != null) { DataHandle handle = this.getInputConfig().getInput().getHandle(); this.selectedQuasiIdentifiers = new HashSet<String>(); Set<String> qis = this.getInputDefinition().getQuasiIdentifyingAttributes(); // Add standard attributes if (qis.isEmpty()) { int max = handle.getNumColumns(); max = Math.min(max, getRiskModel().getMaxQiSize()); for (int i=0; i<max; i++) { this.selectedQuasiIdentifiers.add(handle.getAttributeName(i)); } // Add QIs } else { int max = qis.size(); max = Math.min(max, getRiskModel().getMaxQiSize()); for (int i = 0; i < handle.getNumColumns() && selectedQuasiIdentifiers.size() <= max; i++) { String attr = handle.getAttributeName(i); if (qis.contains(attr)) { this.selectedQuasiIdentifiers.add(attr); } } } } else { // Return empty set return new HashSet<String>(); } } return this.selectedQuasiIdentifiers; } /** * Returns the separator. * * @return */ public char getSeparator() { return separator; } /** * Returns the according parameter. * * @return */ public double getSnapshotSizeDataset() { return snapshotSizeDataset; } /** * Returns the according parameter. * * @return */ public double getSnapshotSizeSnapshot() { return snapshotSizeSnapshot; } /** * Returns the size of the solution space for the current * input parameters * * @return */ public double getSolutionSpaceSize() { // Obtain definition DataDefinition definition = getInputDefinition(); if (definition == null) { return 0; } // Generalized and clustered QIs Set<String> qis = new HashSet<>(definition.getQuasiIdentifiersWithGeneralization()); qis.addAll(definition.getQuasiIdentifiersWithClusteringAndMicroaggregation()); double size = 1; for (String qi : qis) { Hierarchy hierarchy = getInputConfig().getHierarchy(qi); if (!(hierarchy == null || hierarchy.getHierarchy() == null || hierarchy.getHierarchy().length == 0 || hierarchy.getHierarchy()[0] == null)) { size *= hierarchy.getHierarchy()[0].length; } } // Return return size; } /** * Returns the configuration object for the stackelberg privacy model * @return */ public ModelProfitabilityCriterion getStackelbergModel() { if (this.stackelbergPrivacyModel == null) { this.stackelbergPrivacyModel = new ModelProfitabilityCriterion(); } return stackelbergPrivacyModel; } /** * Returns the origin of the subset. * * @return */ public String getSubsetOrigin(){ return this.subsetOrigin; } /** * Returns the t-closeness model. * * @return */ public Map<String, ModelTClosenessCriterion> getTClosenessModel() { if (this.tClosenessModel == null) { this.tClosenessModel = new HashMap<String, ModelTClosenessCriterion>(); } return tClosenessModel; } /** * Returns the execution time of the last anonymization process. * * @return */ public long getTime() { return time; } /** * Returns whether functional hierarchies should be used * @return */ public Boolean getUseFunctionalHierarchies() { // Backwards compatibility if (useFunctionalHierarchies == null) { useFunctionalHierarchies = true; } return useFunctionalHierarchies; } /** * Returns whether list-wise deletion is used for summary statistics * @return */ public Boolean getUseListwiseDeletion() { // Backwards compatibility if (useListwiseDeletion == null) { useListwiseDeletion = true; } return useListwiseDeletion; } /** * Returns the view configuration. * * @return */ public ModelViewConfig getViewConfig() { return this.viewConfig; } /** * Returns whether debugging is enabled. * * @return */ public boolean isDebugEnabled() { return debugEnabled; } /** * Returns whether this project is modified. * * @return */ public boolean isModified() { if (inputConfig.isModified()) { return true; } if (getRiskModel().isModified()) { return true; } if (getClassificationModel().isModified()) { return true; } if ((outputConfig != null) && outputConfig.isModified()) { return true; } if ((clipboard != null) && clipboard.isModified()) { return true; } return modified; } /** * Returns whether a quasi-identifier is selected. * * @return */ public boolean isQuasiIdentifierSelected() { return (getInputDefinition().getAttributeType(getSelectedAttribute()) == AttributeType.QUASI_IDENTIFYING_ATTRIBUTE); } /** * Returns whether a sensitive attribute is selected. * * @return */ public boolean isSensitiveAttributeSelected() { return (getInputDefinition().getAttributeType(getSelectedAttribute()) == AttributeType.SENSITIVE_ATTRIBUTE); } /** * Returns whether visualization is enabled. * * @return */ public boolean isVisualizationEnabled(){ if (this.showVisualization == null) { return true; } else { return this.showVisualization; } } /** * Resets the model. */ public void reset() { this.resetCriteria(); this.resetAttributePair(); this.inputConfig = new ModelConfiguration(); this.outputConfig = null; this.output = null; this.result = null; if (auditTrail != null) auditTrail.clear(); this.selectedQuasiIdentifiers = null; this.selectedFeatures = null; this.selectedClasses = null; this.subsetOrigin = Resources.getMessage("Model.0"); //$NON-NLS-1$ this.groups = null; this.classificationModel = new ModelClassification(); this.anonymizationConfiguration = null; this.heuristicSearchStepLimit = null; this.heuristicSearchThreshold = null; this.heuristicSearchTimeLimit = null; this.optimizationStatistics = null; this.localRecodingModel = null; this.selectedClassValue = null; this.selectedAttribute = null; } /** * Returns the last two selected attributes. */ public void resetAttributePair() { if (pair == null) { pair = new String[] { null, null }; } pair[0] = null; pair[1] = null; } /** * Resets the configuration of the privacy criteria. */ public void resetCriteria() { if (inputConfig==null || inputConfig.getInput()==null) return; differentialPrivacyModel = new ModelDifferentialPrivacyCriterion(); kAnonymityModel = new ModelKAnonymityCriterion(); stackelbergPrivacyModel = new ModelProfitabilityCriterion(); dPresenceModel = new ModelDPresenceCriterion(); kMapModel = new ModelKMapCriterion(); lDiversityModel.clear(); tClosenessModel.clear(); riskBasedModel.clear(); dDisclosurePrivacyModel.clear(); bLikenessModel.clear(); DataHandle handle = inputConfig.getInput().getHandle(); for (int col = 0; col < handle.getNumColumns(); col++) { String attribute = handle.getAttributeName(col); lDiversityModel.put(attribute, new ModelLDiversityCriterion(attribute)); tClosenessModel.put(attribute, new ModelTClosenessCriterion(attribute)); dDisclosurePrivacyModel.put(attribute, new ModelDDisclosurePrivacyCriterion(attribute)); bLikenessModel.put(attribute, new ModelBLikenessCriterion(attribute)); } riskBasedModel.add(new ModelRiskBasedCriterion(ModelRiskBasedCriterion.VARIANT_AVERAGE_RISK)); riskBasedModel.add(new ModelRiskBasedCriterion(ModelRiskBasedCriterion.VARIANT_SAMPLE_UNIQUES)); riskBasedModel.add(new ModelRiskBasedCriterion(ModelRiskBasedCriterion.VARIANT_POPULATION_UNIQUES_DANKAR)); } /** * Sets the anonymizer. * * @param anonymizer */ public void setAnonymizer(final ARXAnonymizer anonymizer) { setModified(); this.anonymizer = anonymizer; } /** * Sets the charset * @param charset */ public void setCharset(String charset) { this.charset = charset; } /** * Enables debugging. * * @param value */ public void setDebugEnabled(boolean value){ this.debugEnabled = value; this.setModified(); } /** * Sets the project description. * * @param description */ public void setDescription(final String description) { this.description = description; setModified(); } /** * Updates features and classes to reflect the definition provided * @param definition * @return Whether an update has been performed */ public boolean setFeaturesAndClasses(DataDefinition definition) { // Previous Set<String> features = this.getSelectedFeatures(); Set<String> classes = this.getSelectedClasses(); // New this.setSelectedFeatures(new HashSet<String>(definition.getQuasiIdentifyingAttributes())); this.setSelectedClasses(new HashSet<String>(definition.getResponseVariables())); // Return whether an update has been performed return (!features.equals(this.getSelectedFeatures()) || !classes.equals(this.getSelectedClasses())); } /** * Sets the indices of equivalence classes. * * @param groups */ public void setGroups(int[] groups) { this.groups = groups; } /** * @param heuristicSearchStepLimit the heuristicSearchStepLimit to set */ public void setHeuristicSearchStepLimit(Integer heuristicSearchStepLimit) { this.heuristicSearchStepLimit = heuristicSearchStepLimit; } /** * @param heuristicSearchThreshold the heuristicSearchThreshold to set */ public void setHeuristicSearchThreshold(Integer heuristicSearchThreshold) { this.heuristicSearchThreshold = heuristicSearchThreshold; } /** * @param heuristicSearchTimeLimit the heuristicSearchTimeLimit to set */ public void setHeuristicSearchTimeLimit(Integer heuristicSearchTimeLimit) { this.heuristicSearchTimeLimit = heuristicSearchTimeLimit; } /** * Sets the according parameter. * * @param historySize */ public void setHistorySize(final int historySize) { this.historySize = historySize; setModified(); } /** * Sets the according parameter. * * @param val */ public void setInitialNodesInViewer(final int val) { initialNodesInViewer = val; setModified(); } /** * Sets the size of the input in bytes. * * @param inputBytes */ public void setInputBytes(final long inputBytes) { setModified(); this.inputBytes = inputBytes; } /** * Sets the input config. * * @param config */ public void setInputConfig(final ModelConfiguration config) { this.inputConfig = config; } /** * Sets the project locale. * * @param locale Null for default locale */ public void setLocale(Locale locale) { this.locale = locale; this.setModified(); } /** * Sets the according parameter. * * @param numberOfRows */ public void setMaximalSizeForComplexOperations(int numberOfRows) { this.maximalSizeForComplexOperations = numberOfRows; this.setModified(); } /** * Sets the according parameter. * * @param maxNodesInViewer */ public void setMaxNodesInViewer(final int maxNodesInViewer) { this.maxNodesInViewer = maxNodesInViewer; setModified(); } /** * Sets the description of the metric. * * @param description */ public void setMetricDescription(MetricDescription description) { this.metricDescription = description; } /** * Marks this project as modified. */ public void setModified() { modified = true; } /** * Sets the project name. * * @param name */ public void setName(final String name) { this.name = name; setModified(); } /** * Sets a filter. * * @param filter */ public void setNodeFilter(final ModelNodeFilter filter) { nodeFilter = filter; setModified(); } /** * Sets the current output. * * @param output * @param node */ public void setOutput(final DataHandle output, final ARXNode node) { this.output = output; this.outputNode = node; if (node != null) { outputNodeAsString = Arrays.toString(node.getTransformation()); } else { outputNodeAsString = null; } setModified(); } /** * Sets the current output, deserialized from a project * * @param stream * @param node * @throws IOException * @throws ClassNotFoundException */ public void setOutput(final InputStream stream) throws ClassNotFoundException, IOException { // Backwards compatibility if (stream == null) { return; } this.outputNode = this.getSelectedNode(); if (this.outputNode != null) { this.output = this.result.getOutput(stream, outputNode); this.outputNodeAsString = Arrays.toString(outputNode.getTransformation()); } else { this.output = null; this.outputNodeAsString = null; } } /** * Sets the output config. * * @param config */ public void setOutputConfig(final ModelConfiguration config) { outputConfig = config; } /** * Sets the project path. * * @param path */ public void setPath(final String path) { this.path = path; } /** * @param perspective the perspective to set */ public void setPerspective(Perspective perspective) { this.perspective = perspective; } /** * @param optimizationStatistics the optimizationStatistics to set */ public void setProcessStatistics(ARXProcessStatistics optimizationStatistics) { this.optimizationStatistics = optimizationStatistics; this.setModified(); } /** * Sets the query. * * @param query */ public void setQuery(String query){ this.query = query; setModified(); } /** * Sets the result. * * @param result */ public void setResult(final ARXResult result) { this.result = result; if ((result != null) && (result.getGlobalOptimum() != null)) { optimalNodeAsString = Arrays.toString(result.getGlobalOptimum().getTransformation()); } else { optimalNodeAsString = null; } setModified(); } /** * Sets the selected attribute. * * @param attribute */ public void setSelectedAttribute(final String attribute) { selectedAttribute = attribute; // Track last two selected attributes if (pair == null) pair = new String[] { null, null }; if (pair[0] == null) { pair[0] = attribute; pair[1] = null; } else if (pair[1] == null) { pair[1] = attribute; } else { pair[0] = pair[1]; pair[1] = attribute; } setModified(); } /** * Sets a set of selected attributes * @param set */ public void setSelectedClasses(Set<String> set) { this.selectedClasses = set; this.setModified(); } /** * Sets the selected class value. * @param classValue */ public void setSelectedClassValue(final String classValue) { selectedClassValue = classValue; } /** * Sets a set of selected attributes * @param set */ public void setSelectedFeatures(Set<String> set) { this.selectedFeatures = set; this.setModified(); } /** * Sets the selected node. * * @param node */ public void setSelectedNode(final ARXNode node) { selectedNode = node; setModified(); } /** * Sets a set of quasi identifiers selected for risk analysis * @param set */ public void setSelectedQuasiIdentifiers(Set<String> set) { this.selectedQuasiIdentifiers = set; this.setModified(); } /** * * * @param snapshotSize */ public void setSnapshotSizeDataset(final double snapshotSize) { snapshotSizeDataset = snapshotSize; setModified(); } /** * Sets the according parameter. * * @param snapshotSize */ public void setSnapshotSizeSnapshot(final double snapshotSize) { setModified(); snapshotSizeSnapshot = snapshotSize; } /** * Sets how the subset was defined. */ public void setSubsetManual(){ if (!this.subsetOrigin.endsWith(Resources.getMessage("Model.1"))) { //$NON-NLS-1$ this.subsetOrigin += Resources.getMessage("Model.2"); //$NON-NLS-1$ } } /** * Sets how the subset was defined. * * @param origin */ public void setSubsetOrigin(String origin){ this.subsetOrigin = origin; } /** * Sets the execution time of the last anonymization process. * * @param time */ public void setTime(final long time) { this.time = time; } /** * Marks this model as unmodified. */ public void setUnmodified() { modified = false; inputConfig.setUnmodified(); getRiskModel().setUnmodified(); if (outputConfig != null) { outputConfig.setUnmodified(); } if (clipboard != null) { clipboard.setUnmodified(); } getClassificationModel().setUnmodified(); } /** * Sets whether functional hierarchies should be used during anonymization to estimate utility * @param useFunctionalHierarchies */ public void setUseFunctionalHierarchies(boolean useFunctionalHierarchies) { this.useFunctionalHierarchies = useFunctionalHierarchies; } /** * Sets whether list-wise deletion should be used for summary statistics * @param useListwiseDeletion */ public void setUseListwiseDeletion(boolean useListwiseDeletion) { this.useListwiseDeletion = useListwiseDeletion; } /** * Sets the view configuration. * * @param viewConfig */ public void setViewConfig(ModelViewConfig viewConfig) { this.viewConfig = viewConfig; } /** * Sets visualization as enabled/disabled. * * @param value */ public void setVisualizationEnabled(boolean value){ this.showVisualization = value; this.setModified(); } /** * Converts attributes into an array ordered by occurrence in the dataset * @param set * @return */ private String[] getAttributesAsArray(Set<String> set) { if (this.getInputConfig() == null || this.getInputConfig().getInput() == null || this.getInputConfig().getInput().getHandle() == null || set == null || set.isEmpty()) { return new String[0]; } List<String> result = new ArrayList<String>(); DataHandle handle = this.getInputConfig().getInput().getHandle(); for (int column = 0; column < handle.getNumColumns(); column++) { String attribute = handle.getAttributeName(column); if (set.contains(attribute)) { result.add(attribute); } } return result.toArray(new String[result.size()]); } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yemyatthu.bumc.widget; import android.content.Context; import android.graphics.Typeface; import android.support.v4.view.PagerAdapter; import android.support.v4.view.ViewPager; import android.util.AttributeSet; import android.util.SparseArray; import android.util.TypedValue; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.HorizontalScrollView; import android.widget.LinearLayout; import android.widget.TextView; /** * To be used with ViewPager to provide a tab indicator component which give constant feedback as * to * the user's scroll progress. * <p> * To use the component, simply add it to your view hierarchy. Then in your * {@link android.app.Activity} or {@link android.support.v4.app.Fragment} call * {@link #setViewPager(ViewPager)} providing it the ViewPager this layout is being used for. * <p> * The colors can be customized in two ways. The first and simplest is to provide an array of * colors * via {@link #setSelectedIndicatorColors(int...)} and {@link #(int...)}. The * alternative is via the {@link TabColorizer} interface which provides you complete control over * which color is used for any individual position. * <p> * The views used as tabs can be customized by calling {@link #setCustomTabView(int, int)}, * providing the layout ID of your custom layout. */ public class SlidingTabLayout extends HorizontalScrollView { private static final int TITLE_OFFSET_DIPS = 24; private static final int TAB_VIEW_PADDING_DIPS = 16; private static final int TAB_VIEW_TEXT_SIZE_SP = 12; private final SlidingTabStrip mTabStrip; private int mTitleOffset; private int mTabViewLayoutId; private int mTabViewTextViewId; private boolean mDistributeEvenly; private ViewPager mViewPager; private SparseArray<String> mContentDescriptions = new SparseArray<String>(); private ViewPager.OnPageChangeListener mViewPagerPageChangeListener; public SlidingTabLayout(Context context) { this(context, null); } public SlidingTabLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public SlidingTabLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); // Disable the Scroll Bar setHorizontalScrollBarEnabled(false); // Make sure that the Tab Strips fills this View setFillViewport(true); mTitleOffset = (int) (TITLE_OFFSET_DIPS * getResources().getDisplayMetrics().density); mTabStrip = new SlidingTabStrip(context); addView(mTabStrip, LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); } /** * Set the custom {@link TabColorizer} to be used. * * If you only require simple custmisation then you can use * {@link #setSelectedIndicatorColors(int...)} to achieve * similar effects. */ public void setCustomTabColorizer(TabColorizer tabColorizer) { mTabStrip.setCustomTabColorizer(tabColorizer); } public void setDistributeEvenly(boolean distributeEvenly) { mDistributeEvenly = distributeEvenly; } /** * Sets the colors to be used for indicating the selected tab. These colors are treated as a * circular array. Providing one color will mean that all tabs are indicated with the same color. */ public void setSelectedIndicatorColors(int... colors) { mTabStrip.setSelectedIndicatorColors(colors); } /** * Set the {@link ViewPager.OnPageChangeListener}. When using {@link SlidingTabLayout} you are * required to set any {@link ViewPager.OnPageChangeListener} through this method. This is so * that the layout can update it's scroll position correctly. * * @see ViewPager#setOnPageChangeListener(ViewPager.OnPageChangeListener) */ public void setOnPageChangeListener(ViewPager.OnPageChangeListener listener) { mViewPagerPageChangeListener = listener; } /** * Set the custom layout to be inflated for the tab views. * * @param layoutResId Layout id to be inflated * @param textViewId id of the {@link TextView} in the inflated view */ public void setCustomTabView(int layoutResId, int textViewId) { mTabViewLayoutId = layoutResId; mTabViewTextViewId = textViewId; } /** * Sets the associated view pager. Note that the assumption here is that the pager content * (number of tabs and tab titles) does not change after this call has been made. */ public void setViewPager(ViewPager viewPager) { mTabStrip.removeAllViews(); mViewPager = viewPager; if (viewPager != null) { viewPager.setOnPageChangeListener(new InternalViewPagerListener()); populateTabStrip(); } } /** * Create a default view to be used for tabs. This is called if a custom tab view is not set via * {@link #setCustomTabView(int, int)}. */ protected TextView createDefaultTabView(Context context) { AutofitTextView textView = new AutofitTextView(context); textView.setGravity(Gravity.CENTER); textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP); textView.setTypeface(Typeface.DEFAULT_BOLD); textView.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); TypedValue outValue = new TypedValue(); getContext().getTheme() .resolveAttribute(android.R.attr.selectableItemBackground, outValue, true); textView.setBackgroundResource(outValue.resourceId); textView.setAllCaps(true); int padding = (int) (TAB_VIEW_PADDING_DIPS * getResources().getDisplayMetrics().density); textView.setPadding(padding, padding, padding, padding); return textView; } private void populateTabStrip() { final PagerAdapter adapter = mViewPager.getAdapter(); final View.OnClickListener tabClickListener = new TabClickListener(); for (int i = 0; i < adapter.getCount(); i++) { View tabView = null; TextView tabTitleView = null; if (mTabViewLayoutId != 0) { // If there is a custom tab view layout id set, try and inflate it tabView = LayoutInflater.from(getContext()).inflate(mTabViewLayoutId, mTabStrip, false); tabTitleView = (TextView) tabView.findViewById(mTabViewTextViewId); } if (tabView == null) { tabView = createDefaultTabView(getContext()); } if (tabTitleView == null && TextView.class.isInstance(tabView)) { tabTitleView = (TextView) tabView; } if (mDistributeEvenly) { LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) tabView.getLayoutParams(); lp.width = 0; lp.weight = 1; } tabTitleView.setText(adapter.getPageTitle(i)); tabView.setOnClickListener(tabClickListener); String desc = mContentDescriptions.get(i, null); if (desc != null) { tabView.setContentDescription(desc); } mTabStrip.addView(tabView); if (i == mViewPager.getCurrentItem()) { tabView.setSelected(true); } } } public void setContentDescription(int i, String desc) { mContentDescriptions.put(i, desc); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); if (mViewPager != null) { scrollToTab(mViewPager.getCurrentItem(), 0); } } private void scrollToTab(int tabIndex, int positionOffset) { final int tabStripChildCount = mTabStrip.getChildCount(); if (tabStripChildCount == 0 || tabIndex < 0 || tabIndex >= tabStripChildCount) { return; } View selectedChild = mTabStrip.getChildAt(tabIndex); if (selectedChild != null) { int targetScrollX = selectedChild.getLeft() + positionOffset; if (tabIndex > 0 || positionOffset > 0) { // If we're not at the first child and are mid-scroll, make sure we obey the offset targetScrollX -= mTitleOffset; } scrollTo(targetScrollX, 0); } } /** * Allows complete control over the colors drawn in the tab layout. Set with * {@link #setCustomTabColorizer(TabColorizer)}. */ public interface TabColorizer { /** * @return return the color of the indicator used when {@code position} is selected. */ int getIndicatorColor(int position); } private class InternalViewPagerListener implements ViewPager.OnPageChangeListener { private int mScrollState; @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { int tabStripChildCount = mTabStrip.getChildCount(); if ((tabStripChildCount == 0) || (position < 0) || (position >= tabStripChildCount)) { return; } mTabStrip.onViewPagerPageChanged(position, positionOffset); View selectedTitle = mTabStrip.getChildAt(position); int extraOffset = (selectedTitle != null) ? (int) (positionOffset * selectedTitle.getWidth()) : 0; scrollToTab(position, extraOffset); if (mViewPagerPageChangeListener != null) { mViewPagerPageChangeListener.onPageScrolled(position, positionOffset, positionOffsetPixels); } } @Override public void onPageScrollStateChanged(int state) { mScrollState = state; if (mViewPagerPageChangeListener != null) { mViewPagerPageChangeListener.onPageScrollStateChanged(state); } } @Override public void onPageSelected(int position) { if (mScrollState == ViewPager.SCROLL_STATE_IDLE) { mTabStrip.onViewPagerPageChanged(position, 0f); scrollToTab(position, 0); } for (int i = 0; i < mTabStrip.getChildCount(); i++) { mTabStrip.getChildAt(i).setSelected(position == i); } if (mViewPagerPageChangeListener != null) { mViewPagerPageChangeListener.onPageSelected(position); } } } private class TabClickListener implements View.OnClickListener { @Override public void onClick(View v) { for (int i = 0; i < mTabStrip.getChildCount(); i++) { if (v == mTabStrip.getChildAt(i)) { mViewPager.setCurrentItem(i); return; } } } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.debugger; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.process.ProcessListener; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.execution.ui.ExecutionConsole; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.actionSystem.ToggleAction; import com.intellij.openapi.application.AccessToken; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.ResolveState; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.remote.RemoteProcessControl; import com.intellij.util.ui.UIUtil; import com.intellij.xdebugger.*; import com.intellij.xdebugger.breakpoints.*; import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider; import com.intellij.xdebugger.frame.XExecutionStack; import com.intellij.xdebugger.frame.XStackFrame; import com.intellij.xdebugger.frame.XSuspendContext; import com.intellij.xdebugger.frame.XValueChildrenList; import com.intellij.xdebugger.stepping.XSmartStepIntoHandler; import com.jetbrains.python.PythonFileType; import com.jetbrains.python.console.PythonConsoleView; import com.jetbrains.python.console.PythonDebugLanguageConsoleView; import com.jetbrains.python.console.pydev.PydevCompletionVariant; import com.jetbrains.python.debugger.pydev.*; import com.jetbrains.python.debugger.settings.PyDebuggerSettings; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.resolve.PyResolveContext; import com.jetbrains.python.psi.resolve.PyResolveUtil; import com.jetbrains.python.psi.resolve.RatedResolveResult; import com.jetbrains.python.psi.types.PyClassType; import com.jetbrains.python.psi.types.PyModuleType; import com.jetbrains.python.psi.types.PyType; import com.jetbrains.python.psi.types.PyTypeParser; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.net.ServerSocket; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import static javax.swing.SwingUtilities.invokeLater; /** * @author yole */ // todo: bundle messages // todo: pydevd supports module reloading - look for a way to use the feature public class PyDebugProcess extends XDebugProcess implements IPyDebugProcess, ProcessListener { private static final Logger LOG = Logger.getInstance("#com.jetbrains.python.debugger.PyDebugProcess"); private static final int CONNECTION_TIMEOUT = 60000; private final ProcessDebugger myDebugger; private final XBreakpointHandler[] myBreakpointHandlers; private final PyDebuggerEditorsProvider myEditorsProvider; private final ProcessHandler myProcessHandler; private final ExecutionConsole myExecutionConsole; private final Map<PySourcePosition, XLineBreakpoint> myRegisteredBreakpoints = new ConcurrentHashMap<>(); private final Map<String, XBreakpoint<? extends ExceptionBreakpointProperties>> myRegisteredExceptionBreakpoints = new ConcurrentHashMap<>(); private final List<PyThreadInfo> mySuspendedThreads = Collections.synchronizedList(Lists.<PyThreadInfo>newArrayList()); private final Map<String, XValueChildrenList> myStackFrameCache = Maps.newHashMap(); private final Map<String, PyDebugValue> myNewVariableValue = Maps.newHashMap(); private boolean myDownloadSources = false; private boolean myClosing = false; private PyPositionConverter myPositionConverter; private final XSmartStepIntoHandler<?> mySmartStepIntoHandler; private boolean myWaitingForConnection = false; private PyStackFrame myConsoleContextFrame = null; private PyReferrersLoader myReferrersProvider; public PyDebugProcess(final @NotNull XDebugSession session, @NotNull final ServerSocket serverSocket, @NotNull final ExecutionConsole executionConsole, @Nullable final ProcessHandler processHandler, boolean multiProcess) { super(session); session.setPauseActionSupported(true); if (multiProcess) { myDebugger = createMultiprocessDebugger(serverSocket); } else { myDebugger = new RemoteDebugger(this, serverSocket, getConnectTimeout()); } List<XBreakpointHandler> breakpointHandlers = new ArrayList<>(); breakpointHandlers.add(new PyLineBreakpointHandler(this)); breakpointHandlers.add(new PyExceptionBreakpointHandler(this)); for (PyBreakpointHandlerFactory factory : Extensions.getExtensions(PyBreakpointHandlerFactory.EP_NAME)) { breakpointHandlers.add(factory.createBreakpointHandler(this)); } myBreakpointHandlers = breakpointHandlers.toArray(new XBreakpointHandler[breakpointHandlers.size()]); myEditorsProvider = new PyDebuggerEditorsProvider(); mySmartStepIntoHandler = new PySmartStepIntoHandler(this); myProcessHandler = processHandler; myExecutionConsole = executionConsole; if (myProcessHandler != null) { myProcessHandler.addProcessListener(this); } if (processHandler instanceof PositionConverterProvider) { myPositionConverter = ((PositionConverterProvider)processHandler).createPositionConverter(this); } else { myPositionConverter = new PyLocalPositionConverter(); } myDebugger.addCloseListener(new RemoteDebuggerCloseListener() { @Override public void closed() { handleStop(); } @Override public void communicationError() { detachDebuggedProcess(); } @Override public void detached() { detachDebuggedProcess(); } }); session.addSessionListener(new XDebugSessionListener() { @Override public void stackFrameChanged() { String currentFrameThreadId = null; final XStackFrame currentFrame = session.getCurrentStackFrame(); if (currentFrame instanceof PyStackFrame) { currentFrameThreadId = ((PyStackFrame)currentFrame).getThreadId(); } final XExecutionStack activeStack = session.getSuspendContext().getActiveExecutionStack(); if ((activeStack == null) || (currentFrameThreadId == null)) { return; } final XStackFrame frameFromSuspendContext = activeStack.getTopFrame(); String activeStackThreadId = null; if (frameFromSuspendContext instanceof PyStackFrame) { activeStackThreadId = ((PyStackFrame)frameFromSuspendContext).getThreadId(); } if (!currentFrameThreadId.equals(activeStackThreadId)) { // another thread was selected, we should update suspendContext PyThreadInfo threadInfo = null; for (PyThreadInfo info : mySuspendedThreads) { if (info.getId().equals(currentFrameThreadId)) { threadInfo = info; break; } } if (threadInfo != null) { getSession().positionReached(createSuspendContext(threadInfo)); } } } }); } private MultiProcessDebugger createMultiprocessDebugger(ServerSocket serverSocket) { MultiProcessDebugger debugger = new MultiProcessDebugger(this, serverSocket, 10000); debugger.addOtherDebuggerCloseListener(new MultiProcessDebugger.DebuggerProcessListener() { @Override public void threadsClosed(Set<String> threadIds) { for (PyThreadInfo t : mySuspendedThreads) { if (threadIds.contains(t.getId())) { if (getSession().isSuspended()) { getSession().resume(); break; } } } } }); return debugger; } protected void detachDebuggedProcess() { handleStop(); //in case of normal debug we stop the session } protected void handleStop() { getSession().stop(); } public void setPositionConverter(PyPositionConverter positionConverter) { myPositionConverter = positionConverter; } @Override public PyPositionConverter getPositionConverter() { return myPositionConverter; } @NotNull @Override public XBreakpointHandler<?>[] getBreakpointHandlers() { return myBreakpointHandlers; } @Override @NotNull public XDebuggerEditorsProvider getEditorsProvider() { return myEditorsProvider; } @Override @Nullable protected ProcessHandler doGetProcessHandler() { return myProcessHandler; } @Override @NotNull public ExecutionConsole createConsole() { return myExecutionConsole; } @Override public XSmartStepIntoHandler<?> getSmartStepIntoHandler() { return mySmartStepIntoHandler; } @Override public void sessionInitialized() { waitForConnection(getConnectionMessage(), getConnectionTitle()); } protected void waitForConnection(final String connectionMessage, String connectionTitle) { ProgressManager.getInstance().run(new Task.Backgroundable(getSession().getProject(), connectionTitle, false) { @Override public void run(@NotNull final ProgressIndicator indicator) { indicator.setText(connectionMessage); try { beforeConnect(); myWaitingForConnection = true; myDebugger.waitForConnect(); myWaitingForConnection = false; afterConnect(); handshake(); init(); myDebugger.run(); } catch (final Exception e) { myWaitingForConnection = false; if (myProcessHandler != null) { myProcessHandler.destroyProcess(); } if (!myClosing) { invokeLater( () -> Messages.showErrorDialog("Unable to establish connection with debugger:\n" + e.getMessage(), getConnectionTitle())); } } } }); } @Override public void init() { getSession().rebuildViews(); registerBreakpoints(); setShowReturnValues(PyDebuggerSettings.getInstance().isWatchReturnValues()); } @Override public int handleDebugPort(int localPort) throws IOException { if (myProcessHandler instanceof RemoteProcessControl) { return getRemoteTunneledPort(localPort, (RemoteProcessControl)myProcessHandler); } else { return localPort; } } protected static int getRemoteTunneledPort(int localPort, @NotNull RemoteProcessControl handler) throws IOException { try { return handler.getRemoteSocket(localPort).getSecond(); } catch (Exception e) { throw new IOException(e); } } @Override public void recordSignature(PySignature signature) { PySignatureCacheManager.getInstance(getSession().getProject()).recordSignature(myPositionConverter.convertSignature(signature)); } @Override public void recordLogEvent(PyConcurrencyEvent event) { PyConcurrencyService.getInstance(getSession().getProject()).recordEvent(getSession(), event, event.isAsyncio()); } @Override public void showConsole(PyThreadInfo thread) { myConsoleContextFrame = new PyExecutionStack(this, thread).getTopFrame(); if (myExecutionConsole instanceof PythonDebugLanguageConsoleView) { PythonDebugLanguageConsoleView consoleView = (PythonDebugLanguageConsoleView)myExecutionConsole; UIUtil.invokeLaterIfNeeded(() -> { consoleView.enableConsole(false); consoleView.getPydevConsoleView().setConsoleEnabled(true); }); } } @Override public void consoleInputRequested(boolean isStarted) { if (myExecutionConsole instanceof PythonDebugLanguageConsoleView) { PythonConsoleView consoleView = ((PythonDebugLanguageConsoleView)myExecutionConsole).getPydevConsoleView(); if (isStarted) { consoleView.inputRequested(); } else { consoleView.inputReceived(); } } } protected void afterConnect() { } protected void beforeConnect() { } protected String getConnectionMessage() { return "Waiting for connection..."; } protected String getConnectionTitle() { return "Connecting To Debugger"; } private void handshake() throws PyDebuggerException { String remoteVersion = myDebugger.handshake(); String currentBuild = ApplicationInfo.getInstance().getBuild().asStringWithoutProductCode(); if ("@@BUILD_NUMBER@@".equals(remoteVersion)) { remoteVersion = currentBuild; } else if (remoteVersion.startsWith("PY-")) { remoteVersion = remoteVersion.substring(3); } else { remoteVersion = null; } printToConsole("Connected to pydev debugger (build " + remoteVersion + ")\n", ConsoleViewContentType.SYSTEM_OUTPUT); if (remoteVersion != null) { if (!(remoteVersion.equals(currentBuild) || remoteVersion.startsWith(currentBuild))) { LOG.warn(String.format("Wrong debugger version. Remote version: %s Current build: %s", remoteVersion, currentBuild)); printToConsole("Warning: wrong debugger version. Use pycharm-debugger.egg from PyCharm installation folder.\n", ConsoleViewContentType.ERROR_OUTPUT); } } } @Override public void printToConsole(String text, ConsoleViewContentType contentType) { ((ConsoleView)myExecutionConsole).print(text, contentType); } private void registerBreakpoints() { registerLineBreakpoints(); registerExceptionBreakpoints(); } private void registerExceptionBreakpoints() { for (XBreakpoint<? extends ExceptionBreakpointProperties> bp : myRegisteredExceptionBreakpoints.values()) { addExceptionBreakpoint(bp); } } public void registerLineBreakpoints() { for (Map.Entry<PySourcePosition, XLineBreakpoint> entry : myRegisteredBreakpoints.entrySet()) { addBreakpoint(entry.getKey(), entry.getValue()); } } @Override public void registerAdditionalActions(@NotNull DefaultActionGroup leftToolbar, @NotNull DefaultActionGroup topToolbar, @NotNull DefaultActionGroup settings) { super.registerAdditionalActions(leftToolbar, topToolbar, settings); settings.add(new WatchReturnValuesAction(this)); settings.add(new SimplifiedView(this)); } private static class WatchReturnValuesAction extends ToggleAction { private volatile boolean myWatchesReturnValues; private final PyDebugProcess myProcess; private final String myText; public WatchReturnValuesAction(@NotNull PyDebugProcess debugProcess) { super("", "Enables watching executed functions return values", null); myWatchesReturnValues = PyDebuggerSettings.getInstance().isWatchReturnValues(); myProcess = debugProcess; myText = "Show Return Values"; } @Override public void update(@NotNull final AnActionEvent e) { super.update(e); final Presentation presentation = e.getPresentation(); presentation.setEnabled(true); presentation.setText(myText); } @Override public boolean isSelected(AnActionEvent e) { return myWatchesReturnValues; } @Override public void setSelected(AnActionEvent e, boolean watch) { myWatchesReturnValues = watch; PyDebuggerSettings.getInstance().setWatchReturnValues(watch); final Project project = e.getProject(); if (project != null) { myProcess.setShowReturnValues(myWatchesReturnValues); myProcess.getSession().rebuildViews(); } } } private static class SimplifiedView extends ToggleAction { private volatile boolean mySimplifiedView; private final PyDebugProcess myProcess; private final String myText; public SimplifiedView(@NotNull PyDebugProcess debugProcess) { super("", "Disables watching classes, functions and modules objects", null); mySimplifiedView = PyDebuggerSettings.getInstance().isSimplifiedView(); myProcess = debugProcess; myText = "Simplified Variables View"; } @Override public void update(@NotNull final AnActionEvent e) { super.update(e); final Presentation presentation = e.getPresentation(); presentation.setEnabled(true); presentation.setText(myText); } @Override public boolean isSelected(AnActionEvent e) { return mySimplifiedView; } @Override public void setSelected(AnActionEvent e, boolean hide) { mySimplifiedView = hide; PyDebuggerSettings.getInstance().setSimplifiedView(hide); myProcess.getSession().rebuildViews(); } } public void setShowReturnValues(boolean showReturnValues) { myDebugger.setShowReturnValues(showReturnValues); } @Override public void startStepOver(@Nullable XSuspendContext context) { passToCurrentThread(context, ResumeOrStepCommand.Mode.STEP_OVER); } @Override public void startStepInto(@Nullable XSuspendContext context) { passToCurrentThread(context, ResumeOrStepCommand.Mode.STEP_INTO); } public void startStepIntoMyCode(@Nullable XSuspendContext context) { if (!checkCanPerformCommands()) return; getSession().sessionResumed(); passToCurrentThread(context, ResumeOrStepCommand.Mode.STEP_INTO_MY_CODE); } @Override public void startStepOut(@Nullable XSuspendContext context) { passToCurrentThread(context, ResumeOrStepCommand.Mode.STEP_OUT); } public void startSmartStepInto(String functionName) { dropFrameCaches(); if (isConnected()) { for (PyThreadInfo suspendedThread : mySuspendedThreads) { myDebugger.smartStepInto(suspendedThread.getId(), functionName); } } } @Override public void stop() { myDebugger.close(); } @Override public void resume(@Nullable XSuspendContext context) { passToAllThreads(ResumeOrStepCommand.Mode.RESUME); } @Override public void startPausing() { if (isConnected()) { myDebugger.suspendAllThreads(); } } public void suspendAllOtherThreads(PyThreadInfo thread) { myDebugger.suspendOtherThreads(thread); } /** * Check if there is the thread suspended on the breakpoint with "Suspend all" policy * * @return true if this thread exists */ @Override public boolean isSuspendedOnAllThreadsPolicy() { if (getSession().isSuspended()) { for (PyThreadInfo threadInfo : getThreads()) { final List<PyStackFrameInfo> frames = threadInfo.getFrames(); if ((threadInfo.getState() == PyThreadInfo.State.SUSPENDED) && (frames != null)) { XBreakpoint<?> breakpoint = null; if (threadInfo.isStopOnBreakpoint()) { final PySourcePosition position = frames.get(0).getPosition(); breakpoint = myRegisteredBreakpoints.get(position); } else if (threadInfo.isExceptionBreak()) { String exceptionName = threadInfo.getMessage(); if (exceptionName != null) { breakpoint = myRegisteredExceptionBreakpoints.get(exceptionName); } } if ((breakpoint != null) && (breakpoint.getType().isSuspendThreadSupported()) && (breakpoint.getSuspendPolicy() == SuspendPolicy.ALL)) { return true; } } } } return false; } private void passToAllThreads(final ResumeOrStepCommand.Mode mode) { dropFrameCaches(); if (isConnected()) { for (PyThreadInfo thread : myDebugger.getThreads()) { myDebugger.resumeOrStep(thread.getId(), mode); } } } private void passToCurrentThread(@Nullable XSuspendContext context, final ResumeOrStepCommand.Mode mode) { dropFrameCaches(); if (isConnected()) { String threadId = threadIdBeforeResumeOrStep(context); for (PyThreadInfo suspendedThread : mySuspendedThreads) { if (threadId == null || threadId.equals(suspendedThread.getId())) { myDebugger.resumeOrStep(suspendedThread.getId(), mode); break; } } } } @Nullable private static String threadIdBeforeResumeOrStep(@Nullable XSuspendContext context) { if (context instanceof PySuspendContext) { return ((PySuspendContext)context).getActiveExecutionStack().getThreadId(); } else { return null; } } protected boolean isConnected() { return myDebugger.isConnected(); } protected void disconnect() { myDebugger.disconnect(); cleanUp(); } public boolean isDownloadSources() { return myDownloadSources; } public void setDownloadSources(boolean downloadSources) { myDownloadSources = downloadSources; } protected void cleanUp() { mySuspendedThreads.clear(); myDownloadSources = false; } @Override public void runToPosition(@NotNull final XSourcePosition position, @Nullable XSuspendContext context) { dropFrameCaches(); if (isConnected() && !mySuspendedThreads.isEmpty()) { final PySourcePosition pyPosition = myPositionConverter.convertToPython(position); String type = PyLineBreakpointType.ID; AccessToken lock = ApplicationManager.getApplication().acquireReadActionLock(); try { final Document document = FileDocumentManager.getInstance().getDocument(position.getFile()); if (document != null) { for (XBreakpointType breakpointType : Extensions.getExtensions(XBreakpointType.EXTENSION_POINT_NAME)) { if (breakpointType instanceof PyBreakpointType && ((PyBreakpointType)breakpointType).canPutInDocument(getSession().getProject(), document)) { type = breakpointType.getId(); break; } } } } finally { lock.finish(); } myDebugger.setTempBreakpoint(type, pyPosition.getFile(), pyPosition.getLine()); passToCurrentThread(context, ResumeOrStepCommand.Mode.RESUME); } } @Override public PyDebugValue evaluate(final String expression, final boolean execute, boolean doTrunc) throws PyDebuggerException { dropFrameCaches(); final PyStackFrame frame = currentFrame(); return evaluate(expression, execute, frame, doTrunc); } private PyDebugValue evaluate(String expression, boolean execute, PyStackFrame frame, boolean trimResult) throws PyDebuggerException { return myDebugger.evaluate(frame.getThreadId(), frame.getFrameId(), expression, execute, trimResult); } public void consoleExec(String command, PyDebugCallback<String> callback) { dropFrameCaches(); try { final PyStackFrame frame = currentFrame(); myDebugger.consoleExec(frame.getThreadId(), frame.getFrameId(), command, callback); } catch (PyDebuggerException e) { callback.error(e); } } @Override @Nullable public XValueChildrenList loadFrame() throws PyDebuggerException { final PyStackFrame frame = currentFrame(); //do not reload frame every time it is needed, because due to bug in pdb, reloading frame clears all variable changes if (!myStackFrameCache.containsKey(frame.getThreadFrameId())) { XValueChildrenList values = myDebugger.loadFrame(frame.getThreadId(), frame.getFrameId()); myStackFrameCache.put(frame.getThreadFrameId(), values); } return applyNewValue(myStackFrameCache.get(frame.getThreadFrameId()), frame.getThreadFrameId()); } private XValueChildrenList applyNewValue(XValueChildrenList pyDebugValues, String threadFrameId) { if (myNewVariableValue.containsKey(threadFrameId)) { PyDebugValue newValue = myNewVariableValue.get(threadFrameId); XValueChildrenList res = new XValueChildrenList(); for (int i = 0; i < pyDebugValues.size(); i++) { final String name = pyDebugValues.getName(i); if (name.equals(newValue.getName())) { res.add(name, newValue); } else { res.add(name, pyDebugValues.getValue(i)); } } return res; } else { return pyDebugValues; } } @Override public XValueChildrenList loadVariable(final PyDebugValue var) throws PyDebuggerException { final PyStackFrame frame = currentFrame(); PyDebugValue debugValue = var.setName(var.getFullName()); return myDebugger.loadVariable(frame.getThreadId(), frame.getFrameId(), debugValue); } @Override public void loadReferrers(PyReferringObjectsValue var, PyDebugCallback<XValueChildrenList> callback) { try { final PyStackFrame frame = currentFrame(); myDebugger.loadReferrers(frame.getThreadId(), frame.getFrameId(), var, callback); } catch (PyDebuggerException e) { callback.error(e); } } @Override public void changeVariable(final PyDebugValue var, final String value) throws PyDebuggerException { final PyStackFrame frame = currentFrame(); PyDebugValue newValue = myDebugger.changeVariable(frame.getThreadId(), frame.getFrameId(), var, value); myNewVariableValue.put(frame.getThreadFrameId(), newValue); } @Nullable @Override public PyReferrersLoader getReferrersLoader() { if (myReferrersProvider == null) { myReferrersProvider = new PyReferrersLoader(this); } return myReferrersProvider; } @Override public ArrayChunk getArrayItems(PyDebugValue var, int rowOffset, int colOffset, int rows, int cols, String format) throws PyDebuggerException { final PyStackFrame frame = currentFrame(); return myDebugger.loadArrayItems(frame.getThreadId(), frame.getFrameId(), var, rowOffset, colOffset, rows, cols, format); } @Nullable public String loadSource(String path) { return myDebugger.loadSource(path); } @Override public boolean canSaveToTemp(String name) { final Project project = getSession().getProject(); return PyDebugSupportUtils.canSaveToTemp(project, name); } @NotNull private PyStackFrame currentFrame() throws PyDebuggerException { if (!isConnected()) { throw new PyDebuggerException("Disconnected"); } final PyStackFrame frame = (PyStackFrame)getSession().getCurrentStackFrame(); if (frame == null && myConsoleContextFrame != null) { return myConsoleContextFrame; } if (frame == null) { throw new PyDebuggerException("Process is running"); } return frame; } @Nullable private String getFunctionName(final XLineBreakpoint breakpoint) { if (breakpoint.getSourcePosition() == null) { return null; } final VirtualFile file = breakpoint.getSourcePosition().getFile(); AccessToken lock = ApplicationManager.getApplication().acquireReadActionLock(); try { final Document document = FileDocumentManager.getInstance().getDocument(file); final Project project = getSession().getProject(); if (document != null) { if (file.getFileType() == PythonFileType.INSTANCE) { PsiElement psiElement = XDebuggerUtil.getInstance(). findContextElement(file, document.getLineStartOffset(breakpoint.getSourcePosition().getLine()), project, false); PyFunction function = PsiTreeUtil.getParentOfType(psiElement, PyFunction.class); if (function != null) { return function.getName(); } } } return null; } finally { lock.finish(); } } public void addBreakpoint(final PySourcePosition position, final XLineBreakpoint breakpoint) { myRegisteredBreakpoints.put(position, breakpoint); if (isConnected()) { final String conditionExpression = breakpoint.getConditionExpression() == null ? null : breakpoint.getConditionExpression().getExpression(); final String logExpression = breakpoint.getLogExpressionObject() == null ? null : breakpoint.getLogExpressionObject().getExpression(); SuspendPolicy policy = breakpoint.getType().isSuspendThreadSupported()? breakpoint.getSuspendPolicy(): SuspendPolicy.NONE; myDebugger.setBreakpoint(breakpoint.getType().getId(), position.getFile(), position.getLine(), conditionExpression, logExpression, getFunctionName(breakpoint), policy ); } } public void addTemporaryBreakpoint(String typeId, String file, int line) { if (isConnected()) { myDebugger.setTempBreakpoint(typeId, file, line); } } public void removeBreakpoint(final PySourcePosition position) { XLineBreakpoint breakpoint = myRegisteredBreakpoints.get(position); if (breakpoint != null) { myRegisteredBreakpoints.remove(position); if (isConnected()) { myDebugger.removeBreakpoint(breakpoint.getType().getId(), position.getFile(), position.getLine()); } } } public void addExceptionBreakpoint(XBreakpoint<? extends ExceptionBreakpointProperties> breakpoint) { myRegisteredExceptionBreakpoints.put(breakpoint.getProperties().getException(), breakpoint); if (isConnected()) { myDebugger.addExceptionBreakpoint(breakpoint.getProperties()); } } public void removeExceptionBreakpoint(XBreakpoint<? extends ExceptionBreakpointProperties> breakpoint) { myRegisteredExceptionBreakpoints.remove(breakpoint.getProperties().getException()); if (isConnected()) { myDebugger.removeExceptionBreakpoint(breakpoint.getProperties()); } } public Collection<PyThreadInfo> getThreads() { return myDebugger.getThreads(); } @Override public void threadSuspended(final PyThreadInfo threadInfo, boolean updateSourcePosition) { if (!mySuspendedThreads.contains(threadInfo)) { mySuspendedThreads.add(threadInfo); final List<PyStackFrameInfo> frames = threadInfo.getFrames(); if (frames != null) { final PySuspendContext suspendContext = createSuspendContext(threadInfo); XBreakpoint<?> breakpoint = null; if (threadInfo.isStopOnBreakpoint()) { final PySourcePosition position = frames.get(0).getPosition(); breakpoint = myRegisteredBreakpoints.get(position); if (breakpoint == null) { myDebugger.removeTempBreakpoint(position.getFile(), position.getLine()); } } else if (threadInfo.isExceptionBreak()) { String exceptionName = threadInfo.getMessage(); threadInfo.setMessage(null); if (exceptionName != null) { breakpoint = myRegisteredExceptionBreakpoints.get(exceptionName); } } if (breakpoint != null) { if ((breakpoint.getType().isSuspendThreadSupported()) && (breakpoint.getSuspendPolicy() == SuspendPolicy.ALL)) { suspendAllOtherThreads(threadInfo); } } if (updateSourcePosition) { if (breakpoint != null) { if (!getSession().breakpointReached(breakpoint, threadInfo.getMessage(), suspendContext)) { resume(suspendContext); } } else { getSession().positionReached(suspendContext); } } } } } @NotNull protected PySuspendContext createSuspendContext(PyThreadInfo threadInfo) { return new PySuspendContext(this, threadInfo); } @Override public void threadResumed(final PyThreadInfo threadInfo) { mySuspendedThreads.remove(threadInfo); } private void dropFrameCaches() { myStackFrameCache.clear(); myNewVariableValue.clear(); } @NotNull public List<PydevCompletionVariant> getCompletions(String prefix) throws Exception { if (isConnected()) { dropFrameCaches(); final PyStackFrame frame = currentFrame(); return myDebugger.getCompletions(frame.getThreadId(), frame.getFrameId(), prefix); } return Lists.newArrayList(); } @NotNull public String getDescription(String prefix) throws Exception { if (isConnected()) { dropFrameCaches(); final PyStackFrame frame = currentFrame(); return myDebugger.getDescription(frame.getThreadId(), frame.getFrameId(), prefix); } return ""; } @Override public void startNotified(ProcessEvent event) { } @Override public void processTerminated(ProcessEvent event) { myDebugger.close(); } @Override public void processWillTerminate(ProcessEvent event, boolean willBeDestroyed) { myClosing = true; } @Override public void onTextAvailable(ProcessEvent event, Key outputType) { } public PyStackFrame createStackFrame(PyStackFrameInfo frameInfo) { return new PyStackFrame(getSession().getProject(), this, frameInfo, getPositionConverter().convertFromPython(frameInfo.getPosition())); } @Override public String getCurrentStateMessage() { if (getSession().isStopped()) { return XDebuggerBundle.message("debugger.state.message.disconnected"); } else if (isConnected()) { return XDebuggerBundle.message("debugger.state.message.connected"); } else { return getConnectionMessage(); } } public void addProcessListener(ProcessListener listener) { ProcessHandler handler = doGetProcessHandler(); if (handler != null) { handler.addProcessListener(listener); } } public boolean isWaitingForConnection() { return myWaitingForConnection; } public void setWaitingForConnection(boolean waitingForConnection) { myWaitingForConnection = waitingForConnection; } public int getConnectTimeout() { return CONNECTION_TIMEOUT; } @Nullable private XSourcePosition getCurrentFrameSourcePosition() { try { PyStackFrame frame = currentFrame(); return frame.getSourcePosition(); } catch (PyDebuggerException e) { return null; } } public Project getProject() { return getSession().getProject(); } @Nullable @Override public XSourcePosition getSourcePositionForName(String name, String parentType) { if (name == null) return null; XSourcePosition currentPosition = getCurrentFrameSourcePosition(); final PsiFile file = getPsiFile(currentPosition); if (file == null) return null; if (Strings.isNullOrEmpty(parentType)) { final Ref<PsiElement> elementRef = resolveInCurrentFrame(name, currentPosition, file); return elementRef.isNull() ? null : XDebuggerUtil.getInstance().createPositionByElement(elementRef.get()); } else { final PyType parentDef = resolveTypeFromString(parentType, file); if (parentDef == null) { return null; } List<? extends RatedResolveResult> results = parentDef.resolveMember(name, null, AccessDirection.READ, PyResolveContext.noImplicits()); if (results != null && !results.isEmpty()) { return XDebuggerUtil.getInstance().createPositionByElement(results.get(0).getElement()); } else { return typeToPosition(parentDef); // at least try to return parent } } } @NotNull private static Ref<PsiElement> resolveInCurrentFrame(final String name, XSourcePosition currentPosition, PsiFile file) { final Ref<PsiElement> elementRef = Ref.create(); PsiElement currentElement = file.findElementAt(currentPosition.getOffset()); if (currentElement == null) { return elementRef; } PyResolveUtil.scopeCrawlUp(new PsiScopeProcessor() { @Override public boolean execute(@NotNull PsiElement element, @NotNull ResolveState state) { if ((element instanceof PyImportElement)) { PyImportElement importElement = (PyImportElement)element; if (name.equals(importElement.getVisibleName())) { if (elementRef.isNull()) { elementRef.set(element); } return false; } return true; } else { if (elementRef.isNull()) { elementRef.set(element); } return false; } } @Nullable @Override public <T> T getHint(@NotNull Key<T> hintKey) { return null; } @Override public void handleEvent(@NotNull Event event, @Nullable Object associated) { } }, currentElement, name, null); return elementRef; } @Nullable private PsiFile getPsiFile(XSourcePosition currentPosition) { if (currentPosition == null) { return null; } return PsiManager.getInstance(getProject()).findFile(currentPosition.getFile()); } @Nullable @Override public XSourcePosition getSourcePositionForType(String typeName) { XSourcePosition currentPosition = getCurrentFrameSourcePosition(); final PsiFile file = getPsiFile(currentPosition); if (file == null || typeName == null || !(file instanceof PyFile)) return null; final PyType pyType = resolveTypeFromString(typeName, file); return pyType == null ? null : typeToPosition(pyType); } @Nullable private static XSourcePosition typeToPosition(PyType pyType) { final PyClassType classType = PyUtil.as(pyType, PyClassType.class); if (classType != null) { return XDebuggerUtil.getInstance().createPositionByElement(classType.getPyClass()); } final PyModuleType moduleType = PyUtil.as(pyType, PyModuleType.class); if (moduleType != null) { return XDebuggerUtil.getInstance().createPositionByElement(moduleType.getModule()); } return null; } private PyType resolveTypeFromString(String typeName, PsiFile file) { typeName = typeName.replace("__builtin__.", ""); PyType pyType = null; if (!typeName.contains(".")) { pyType = PyTypeParser.getTypeByName(file, typeName); } if (pyType == null) { PyElementGenerator generator = PyElementGenerator.getInstance(getProject()); PyPsiFacade psiFacade = PyPsiFacade.getInstance(getProject()); PsiFile dummyFile = generator.createDummyFile(((PyFile)file).getLanguageLevel(), ""); Module moduleForFile = ModuleUtilCore.findModuleForPsiElement(file); dummyFile.putUserData(ModuleUtilCore.KEY_MODULE, moduleForFile); pyType = psiFacade.parseTypeAnnotation(typeName, dummyFile); } return pyType; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.customize; import com.intellij.ide.WelcomeWizardUtil; import com.intellij.ide.cloudConfig.CloudConfigProvider; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.ide.plugins.PluginManagerCore; import com.intellij.ide.plugins.RepositoryHelper; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Pair; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import icons.PlatformImplIcons; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.io.File; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.ExecutionException; public class PluginGroups { static final String CORE = "Core"; private static final int MAX_DESCR_LENGTH = 55; static final String IDEA_VIM_PLUGIN_ID = "IdeaVIM"; private final List<Group> myTree = new ArrayList<>(); private final Map<String, String> myFeaturedPlugins = new LinkedHashMap<>(); private final Map<String, List<IdSet>> myGroups = new LinkedHashMap<>(); private final Map<String, String> myDescriptions = new LinkedHashMap<>(); private final List<IdeaPluginDescriptor> myPluginsFromRepository = new ArrayList<>(); private final Collection<PluginId> myDisabledPluginIds = new HashSet<>(); private final List<? extends IdeaPluginDescriptor> myAllPlugins; private boolean myInitialized; private final Set<String> myFeaturedIds = new HashSet<>(); private Runnable myLoadingCallback; public PluginGroups() { myAllPlugins = PluginManagerCore.loadUncachedDescriptors(); SwingWorker worker = new SwingWorker<List<IdeaPluginDescriptor>, Object>() { @Override protected List<IdeaPluginDescriptor> doInBackground() { try { return RepositoryHelper.loadPlugins(null); } catch (Exception e) { //OK, it's offline return Collections.emptyList(); } } @Override protected void done() { try { myPluginsFromRepository.addAll(get()); if (myLoadingCallback != null) myLoadingCallback.run(); } catch (InterruptedException | ExecutionException e) { if (myLoadingCallback != null) myLoadingCallback.run(); } } }; worker.execute(); PluginManagerCore.loadDisabledPlugins(new File(PathManager.getConfigPath()).getPath(), myDisabledPluginIds); initGroups(myTree, myFeaturedPlugins); Map<String, Pair<Icon, List<String>>> treeMap = new LinkedHashMap<>(); initGroups(treeMap, myFeaturedPlugins); for (Entry<String, Pair<Icon, List<String>>> entry : treeMap.entrySet()) { myTree.add(new Group(entry.getKey(), entry.getValue().getFirst(), null, entry.getValue().getSecond())); } initCloudPlugins(); } void setLoadingCallback(Runnable loadingCallback) { myLoadingCallback = loadingCallback; if (!myPluginsFromRepository.isEmpty()) { myLoadingCallback.run(); } } private void initCloudPlugins() { CloudConfigProvider provider = CloudConfigProvider.getProvider(); if (provider == null) { return; } List<PluginId> plugins = provider.getInstalledPlugins(); if (plugins.isEmpty()) { return; } for (Iterator<Entry<String, String>> I = myFeaturedPlugins.entrySet().iterator(); I.hasNext(); ) { String value = I.next().getValue(); if (ContainerUtil.find(plugins, plugin -> value.endsWith(":" + plugin)) != null) { I.remove(); } } for (PluginId plugin : plugins) { myFeaturedPlugins.put(plugin.getIdString(), "#Cloud:#Cloud:" + plugin); } } /** * @deprecated use {@link #initGroups(List, Map)} instead */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2020.3") protected void initGroups(Map<String, Pair<Icon, List<String>>> tree, Map<String, String> featuredPlugins) { } protected void initGroups(@NotNull List<Group> groups, @NotNull Map<String, String> featuredPlugins) { groups.add(new Group(CORE, null, null, Arrays.asList( "com.intellij.copyright", "com.intellij.java-i18n", "org.intellij.intelliLang", "com.intellij.properties", "Refactor-X",//? "Type Migration", "ZKM" ))); groups.add(new Group("Java Frameworks", PlatformImplIcons.JavaFrameworks, null, Arrays.asList( "Spring:com.intellij.spring.batch," + "com.intellij.spring.data," + "com.intellij.spring.integration," + "com.intellij.spring.osgi," + "com.intellij.spring.security," + "com.intellij.spring," + "com.intellij.spring.webflow," + "com.intellij.spring.ws,com.intellij.aop", "Java EE:com.intellij.javaee.batch," + "com.intellij.beanValidation," + "com.intellij.cdi," + "com.intellij.javaee," + "com.intellij.jsf," + "com.intellij.javaee.extensions," + "com.jetbrains.restWebServices," + "Web Services (JAX-WS)," + "com.intellij.javaee.webSocket," + "com.intellij.jsp," + "com.intellij.persistence", "com.intellij.hibernate", "com.intellij.reactivestreams", "com.intellij.frameworks.java.sql", // preview ends "org.intellij.grails", "com.intellij.micronaut", "com.intellij.quarkus", "com.intellij.helidon", "com.intellij.appengine", "com.intellij.gwt", "JBoss Seam:com.intellij.seam,com.intellij.seam.pages,com.intellij.seam.pageflow", "JBoss jBPM:JBPM", "Struts:com.intellij.struts2", "com.intellij.tapestry", "com.intellij.guice", "com.intellij.freemarker", "com.intellij.velocity", "com.intellij.aspectj", "Osmorc" ))); groups.add(new Group("Build Tools", PlatformImplIcons.BuildTools, null, Arrays.asList( "AntSupport", "Maven:org.jetbrains.idea.maven,org.jetbrains.idea.maven.ext", "org.jetbrains.plugins.gradle" ))); groups.add(new Group("JavaScript Development", PlatformImplIcons.WebDevelopment, "HTML, style sheets, JavaScript, TypeScript, Node.js...", Arrays.asList( "HTML:HtmlTools,W3Validators", "JavaScript and TypeScript:JavaScript,JavaScriptDebugger,JSIntentionPowerPack", "Node.js:NodeJS", "com.intellij.css", "org.jetbrains.plugins.less", "org.jetbrains.plugins.sass", "org.jetbrains.plugins.stylus", "org.jetbrains.plugins.haml", "AngularJS", "org.coffeescript", "com.intellij.flex", "com.jetbrains.restClient", "com.intellij.swagger" ))); addVcsGroup(groups); groups.add(new Group("Test Tools", PlatformImplIcons.TestTools, null, Arrays.asList( "JUnit", "TestNG-J", "cucumber-java", "cucumber", "Coverage:Coverage,Emma" ))); groups.add(new Group("Application Servers", PlatformImplIcons.ApplicationServers, null, Arrays.asList( "com.intellij.javaee.view", "Geronimo", "GlassFish", "JBoss", "Jetty", "Resin", "Tomcat", "Weblogic", "WebSphere", "com.intellij.dmserver", "JSR45Plugin" ))); groups.add(new Group("Clouds", PlatformImplIcons.Clouds, null, Collections.singletonList( "CloudFoundry" ))); //myTree.put("Groovy", Arrays.asList("org.intellij.grails")); //TODO Scala -> Play 2.x (Play 2.0 Support) groups.add(new Group("Swing", PlatformImplIcons.Swing, null, Collections.singletonList( "com.intellij.uiDesigner"//TODO JavaFX? ))); groups.add(new Group("Android", PlatformImplIcons.Android, null, Arrays.asList( "org.jetbrains.android", "com.intellij.android-designer"))); groups.add(new Group("Database Tools", PlatformImplIcons.DatabaseTools, null, Collections.singletonList( "com.intellij.database" ))); groups.add(new Group("Other Tools", PlatformImplIcons.OtherTools, null, Arrays.asList( "ByteCodeViewer", "com.intellij.dsm", "org.jetbrains.idea.eclipse", "org.jetbrains.debugger.streams", "Remote Access:com.jetbrains.plugins.webDeployment,org.jetbrains.plugins.remote-run", "Task Management:com.intellij.tasks,com.intellij.tasks.timeTracking", "org.jetbrains.plugins.terminal", "com.intellij.diagram", "org.jetbrains.plugins.yaml", "XSLT and XPath:XPathView,XSLT-Debugger" ))); groups.add(new Group("Plugin Development", PlatformImplIcons.PluginDevelopment, null, Collections.singletonList("DevKit"))); initFeaturedPlugins(featuredPlugins); } protected void initFeaturedPlugins(@NotNull Map<String, String> featuredPlugins) { featuredPlugins.put("Scala", "Custom Languages:Plugin for Scala language support:org.intellij.scala"); featuredPlugins.put("Live Edit Tool", "Web Development:Provides live edit HTML/CSS/JavaScript:com.intellij.plugins.html.instantEditing"); addVimPlugin(featuredPlugins); featuredPlugins.put("Atlassian Connector", "Tools Integration:Integration for Atlassian JIRA, Bamboo, Crucible, FishEye:atlassian-idea-plugin"); addTrainingPlugin(featuredPlugins); } protected static void addVcsGroup(@NotNull List<Group> groups) { groups.add(new Group("Version Controls", PlatformImplIcons.VersionControls, null, Arrays.asList( "CVS", "Git4Idea", "org.jetbrains.plugins.github", "hg4idea", "PerforceDirectPlugin", "Subversion", "TFS" ))); } public static void addVimPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("IdeaVim", "Editor:Emulates Vim editor:" + IDEA_VIM_PLUGIN_ID); } public static void addAwsPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("AWS Toolkit", "Cloud Support:Create, test, and debug serverless applications built using the AWS Serverless Application Model:aws.toolkit"); } public static void addTrainingPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("IDE Features Trainer", "Code tools:Learn basic shortcuts and essential features interactively:training"); } protected static void addLuaPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("Lua", "Custom Languages:Lua language support:Lua"); } public static void addRustPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("Rust", "Custom Languages:Rust language support:org.rust.lang"); } public static void addMarkdownPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("Markdown", "Custom Languages:Markdown language support:org.intellij.plugins.markdown"); } public static void addRPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("R", "Custom Languages:R language support:R4Intellij"); } protected static void addConfigurationServerPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("Configuration Server", "Team Work:Supports sharing settings between installations of IntelliJ Platform based products used by the same developer on different computers:IdeaServerPlugin"); } public static void addTeamCityPlugin(Map<String, String> featuredPlugins) { featuredPlugins.put("TeamCity Integration", "Tools Integration:Integration with JetBrains TeamCity - innovative solution for continuous integration and build management:JetBrains TeamCity Plugin"); } private void initIfNeeded() { if (myInitialized) return; myInitialized = true; for (Group g : myTree) { final String group = g.getName(); if (CORE.equals(group)) continue; List<IdSet> idSets = new ArrayList<>(); StringBuilder description = new StringBuilder(); for (String idDescription : g.getPluginIdDescription()) { IdSet idSet = new IdSet(this, idDescription); String idSetTitle = idSet.getTitle(); if (idSetTitle == null) continue; idSets.add(idSet); if (description.length() > 0) { description.append(", "); } description.append(idSetTitle); } myGroups.put(group, idSets); if (description.length() > MAX_DESCR_LENGTH) { int lastWord = description.lastIndexOf(",", MAX_DESCR_LENGTH); description.delete(lastWord, description.length()).append("..."); } String groupDescription = g.getDescription(); if (groupDescription != null) { description = new StringBuilder(groupDescription); } description.insert(0, "<html><body><center><i>"); myDescriptions.put(group, description.toString()); } } @NotNull List<Group> getTree() { initIfNeeded(); return myTree; } Map<String, String> getFeaturedPlugins() { initIfNeeded(); return myFeaturedPlugins; } public String getDescription(String group) { initIfNeeded(); return myDescriptions.get(group); } public List<IdSet> getSets(String group) { initIfNeeded(); return myGroups.get(group); } @Nullable IdeaPluginDescriptor findPlugin(@NotNull PluginId id) { for (IdeaPluginDescriptor pluginDescriptor : myAllPlugins) { if (pluginDescriptor.getPluginId() == id) { return pluginDescriptor; } } return null; } boolean isIdSetAllEnabled(IdSet set) { for (PluginId id : set.getIds()) { if (!isPluginEnabled(id)) { return false; } } return true; } void setIdSetEnabled(@NotNull IdSet set, boolean enabled) { for (PluginId id : set.getIds()) { setPluginEnabledWithDependencies(id, enabled); } } @NotNull Collection<PluginId> getDisabledPluginIds() { return Collections.unmodifiableCollection(myDisabledPluginIds); } List<IdeaPluginDescriptor> getPluginsFromRepository() { return myPluginsFromRepository; } boolean isPluginEnabled(@NotNull PluginId pluginId) { initIfNeeded(); return !myDisabledPluginIds.contains(pluginId); } private IdSet getSet(@NotNull PluginId pluginId) { initIfNeeded(); for (List<IdSet> sets : myGroups.values()) { for (IdSet set : sets) { for (PluginId id : set.getIds()) { if (id == pluginId) { return set; } } } } return null; } void setFeaturedPluginEnabled(String pluginId, boolean enabled) { if (enabled) { myFeaturedIds.add(pluginId); } else { myFeaturedIds.remove(pluginId); } WelcomeWizardUtil.setFeaturedPluginsToInstall(myFeaturedIds); } void setPluginEnabledWithDependencies(@NotNull PluginId pluginId, boolean enabled) { initIfNeeded(); Set<PluginId> ids = new HashSet<>(); collectInvolvedIds(pluginId, enabled, ids); Set<IdSet> sets = new HashSet<>(); for (PluginId id : ids) { IdSet set = getSet(id); if (set != null) { sets.add(set); } } for (IdSet set : sets) { ids.addAll(set.getIds()); } for (PluginId id : ids) { if (enabled) { myDisabledPluginIds.remove(id); } else { myDisabledPluginIds.add(id); } } } private void collectInvolvedIds(PluginId pluginId, boolean toEnable, Set<PluginId> ids) { ids.add(pluginId); if (toEnable) { for (PluginId id : getNonOptionalDependencies(pluginId)) { collectInvolvedIds(id, true, ids); } } else { Condition<PluginId> condition = id -> pluginId == id; for (final IdeaPluginDescriptor plugin : myAllPlugins) { if (null != ContainerUtil.find(plugin.getDependentPluginIds(), condition) && null == ContainerUtil.find(plugin.getOptionalDependentPluginIds(), condition)) { collectInvolvedIds(plugin.getPluginId(), false, ids); } } } } @NotNull private List<PluginId> getNonOptionalDependencies(PluginId id) { List<PluginId> result = new ArrayList<>(); IdeaPluginDescriptor descriptor = findPlugin(id); if (descriptor != null) { for (PluginId pluginId : descriptor.getDependentPluginIds()) { if (pluginId == PluginManagerCore.CORE_ID) { continue; } if (!ArrayUtil.contains(pluginId, descriptor.getOptionalDependentPluginIds())) { result.add(pluginId); } } } return result; } public static class Group { private final String myName; private final Icon myIcon; private final String myDescription; private final List<String> myPluginIdDescription; public Group(@NotNull String name, @Nullable Icon icon, @Nullable String description, @NotNull List<String> pluginIdDescription) { myName = name; myIcon = icon; myDescription = description; myPluginIdDescription = pluginIdDescription; } @NotNull public String getName() { return myName; } @Nullable public Icon getIcon() { return myIcon; } @Nullable public String getDescription() { return myDescription; } @NotNull public List<String> getPluginIdDescription() { return myPluginIdDescription; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.ext.proxy.context; import java.net.URI; import org.apache.olingo.ext.proxy.commons.EntityInvocationHandler; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Entity context. */ public class EntityContext implements Iterable<AttachedEntity> { /** * Attached entities with not null key. * <br/> * This map have to be used to search for entities by key. */ private final Map<EntityUUID, EntityInvocationHandler> searchableEntities = new HashMap<EntityUUID, EntityInvocationHandler>(); /** * All attached entities (new entities included). * <br/> * Attachment order will be maintained. */ private final Map<EntityInvocationHandler, AttachedEntityStatus> allAttachedEntities = new LinkedHashMap<EntityInvocationHandler, AttachedEntityStatus>(); /** * Deletes to be performed excluding entities. * <br/> * Attachment order will be maintained. */ private final List<URI> furtherDeletes = new ArrayList<URI>(); /** * Attaches an entity with status <tt>NEW</tt>. * <br/> * Use this method to attach a new created entity. * * @see AttachedEntityStatus * @param entity entity to be attached. */ public void attachNew(final EntityInvocationHandler entity) { if (allAttachedEntities.containsKey(entity)) { throw new IllegalStateException("An entity with the same key has already been attached"); } allAttachedEntities.put(entity, AttachedEntityStatus.NEW); } /** * Attaches an existing entity with status <tt>ATTACHED</tt>. * <br/> * Use this method to attach an existing entity. * * @see AttachedEntityStatus * @param entity entity to be attached. */ public void attach(final EntityInvocationHandler entity) { attach(entity, AttachedEntityStatus.ATTACHED); } /** * Attaches an entity with specified status. * <br/> * Use this method to attach an existing entity. * * @see AttachedEntityStatus * @param entity entity to be attached. * @param status status. */ public void attach(final EntityInvocationHandler entity, final AttachedEntityStatus status) { attach(entity, status, false); } /** * Attaches an entity with specified status. * <br/> * Use this method to attach an existing entity. * * @param entity entity to be attached. * @param status status. * @param force force attach. */ public void attach(final EntityInvocationHandler entity, final AttachedEntityStatus status, final boolean force) { if (isAttached(entity)) { throw new IllegalStateException("An entity with the same profile has already been attached"); } if (force || entity.getUUID().getEntitySetURI() != null) { allAttachedEntities.put(entity, status); if (entity.getUUID().getKey() != null) { searchableEntities.put(entity.getUUID(), entity); } } } /** * Detaches entity. * * @param entity entity to be detached. */ public void detach(final EntityInvocationHandler entity) { searchableEntities.remove(entity.getUUID()); allAttachedEntities.remove(entity); } /** * Detaches all attached entities. * <br/> * Use this method to clears the entity context. */ public void detachAll() { allAttachedEntities.clear(); searchableEntities.clear(); furtherDeletes.clear(); } /** * Searches an entity with the specified key. * * @param uuid entity key. * @return retrieved entity. */ public EntityInvocationHandler getEntity(final EntityUUID uuid) { return searchableEntities.get(uuid); } /** * Gets entity status. * * @param entity entity to be retrieved. * @return attached entity status. */ public AttachedEntityStatus getStatus(final EntityInvocationHandler entity) { if (!isAttached(entity)) { throw new IllegalStateException("Entity is not in the context"); } return allAttachedEntities.get(entity); } /** * Changes attached entity status. * * @param entity attached entity to be modified. * @param status new status. */ public void setStatus(final EntityInvocationHandler entity, final AttachedEntityStatus status) { if (!isAttached(entity)) { throw new IllegalStateException("Entity is not in the context"); } final AttachedEntityStatus current = allAttachedEntities.get(entity); // Previously deleted object cannot be modified anymore. if (current == AttachedEntityStatus.DELETED) { throw new IllegalStateException("Entity has been previously deleted"); } if (status == AttachedEntityStatus.NEW || status == AttachedEntityStatus.ATTACHED) { throw new IllegalStateException("Entity status has already been initialized"); } if ((status == AttachedEntityStatus.LINKED && current == AttachedEntityStatus.ATTACHED) || (status == AttachedEntityStatus.CHANGED && current == AttachedEntityStatus.ATTACHED) || (status == AttachedEntityStatus.CHANGED && current == AttachedEntityStatus.LINKED) || (status == AttachedEntityStatus.DELETED)) { allAttachedEntities.put(entity, status); } } /** * Checks if an entity is already attached. * * @param entity entity. * @return <tt>true</tt> if is attached; <tt>false</tt> otherwise. */ public boolean isAttached(final EntityInvocationHandler entity) { return entity == null // avoid attach for null entities (coming from complexes created from container ...) || allAttachedEntities.containsKey(entity) || (entity.getUUID().getKey() != null && searchableEntities.containsKey(entity.getUUID())); } /** * Iterator. * * @return attached entities iterator. */ @Override public Iterator<AttachedEntity> iterator() { final List<AttachedEntity> res = new ArrayList<AttachedEntity>(); for (Map.Entry<EntityInvocationHandler, AttachedEntityStatus> entity : allAttachedEntities.entrySet()) { res.add(new AttachedEntity(entity.getKey(), entity.getValue())); } return res.iterator(); } public List<URI> getFurtherDeletes() { return furtherDeletes; } public void addFurtherDeletes(final URI uri) { furtherDeletes.add(uri); } }
//Copyright 2014 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.awreporting.server.kratu; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import com.google.api.ads.adwords.awreporting.model.entities.Report; import com.google.api.ads.adwords.awreporting.model.entities.ReportAccount; import com.google.api.ads.adwords.awreporting.model.entities.ReportAd; import com.google.api.ads.adwords.awreporting.model.entities.ReportAdGroup; import com.google.api.ads.adwords.awreporting.model.entities.ReportCampaign; import com.google.api.ads.adwords.awreporting.model.entities.ReportKeywords; import com.google.api.ads.adwords.awreporting.model.entities.ReportPlaceholderFeedItem; import com.google.api.ads.adwords.awreporting.model.util.DateUtil; import com.google.api.ads.adwords.awreporting.server.entities.Account; import com.google.api.ads.adwords.awreporting.server.entities.Kratu; import com.google.api.ads.adwords.awreporting.server.util.StorageHelper; import com.google.api.client.util.Lists; import com.google.common.collect.ImmutableList; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Calendar; import java.util.Date; import java.util.List; /** * Test case for the {@code KratuCompute} class. * * @author jtoledo@google.com (Julian Toledo) */ public class KratuComputeTest { private static final RoundingMode ROUNDING = RoundingMode.HALF_UP; private static final int SCALE = 2; private final List<Kratu> dailyKratus = Lists.newArrayList(); private final Calendar day1 = Calendar.getInstance(); private final Calendar day1LastMinute = Calendar.getInstance(); private final Calendar day2 = Calendar.getInstance(); private Kratu kratu1; private Kratu kratu2; private Account account; @Mock private StorageHelper storageHelper; @Captor ArgumentCaptor<Class<? extends Report>> classReportCaptor; @Before public <R extends Report> void setUp() { day1.setTime(DateUtil.parseDateTime("20140601").toDate()); day1.set(Calendar.HOUR_OF_DAY, 0); day1.set(Calendar.MINUTE, 0); day1.set(Calendar.SECOND, 0); day1.add(Calendar.HOUR, 12); day1LastMinute.setTime(DateUtil.parseDateTime("20140601").toDate()); day1LastMinute.set(Calendar.HOUR_OF_DAY, 23); day1LastMinute.set(Calendar.MINUTE, 59); day1LastMinute.set(Calendar.SECOND, 59); day1LastMinute.add(Calendar.HOUR, 12); day2.setTime(DateUtil.parseDateTime("20140602").toDate()); day2.set(Calendar.HOUR_OF_DAY, 0); day2.set(Calendar.MINUTE, 0); day2.set(Calendar.SECOND, 0); day2.add(Calendar.HOUR, 12); account = new Account(); account.setCurrencyCode("EUR"); account.setDateTimeZone("Europe/Paris"); account.setId("777"); account.setName("Account1"); kratu1 = new Kratu(123L, account, day1.getTime()); kratu2 = new Kratu(123L, account, day2.getTime()); kratu1.setAccountActive("Yes"); kratu1.setAccountSuspended(false); kratu1.setAverageCpcDisplay(new BigDecimal(1L)); kratu1.setAverageCpcSearch(new BigDecimal(2L)); kratu1.setAverageCpmDisplay(new BigDecimal(3L)); kratu1.setAverageCpmSearch(new BigDecimal(4L)); kratu1.setAveragePositionDisplay(new BigDecimal(5L)); kratu1.setAveragePositionSearch(new BigDecimal(6L)); kratu1.setConversions(7L); kratu1.setCtrDisplay(new BigDecimal(8L)); kratu1.setCtrSearch(new BigDecimal(9L)); kratu1.setElegibleImpressionsDisplay(new BigDecimal(10L)); kratu1.setElegibleImpressionsSearch(new BigDecimal(11L)); kratu1.setImpressionsDisplay(12L); kratu1.setImpressionsSearch(13L); kratu1.setLostImpressionsDueToBidAdRankDisplay(new BigDecimal(14L)); kratu1.setLostImpressionsDueToBidAdRankSearch(new BigDecimal(15L)); kratu1.setLostImpressionsDueToBudgetDisplay(new BigDecimal(16L)); kratu1.setLostImpressionsDueToBudgetSearch(new BigDecimal(17L)); kratu1.setNumberOfActiveAdGroups(new BigDecimal(18L)); kratu1.setNumberOfActiveAds(new BigDecimal(19L)); kratu1.setNumberOfActiveAverageQualityScoreKeywords(new BigDecimal(20L)); kratu1.setNumberOfActiveBroadMatchingKeywords(new BigDecimal(21L)); kratu1.setNumberOfActiveCampaigns(new BigDecimal(22L)); kratu1.setNumberOfActiveExactMatchingKeywords(new BigDecimal(23L)); kratu1.setNumberOfActiveGoodQualityScoreKeywords(new BigDecimal(24L)); kratu1.setNumberOfActivePhraseMatchingKeywords(new BigDecimal(25L)); kratu1.setNumberOfActivePoorQualityScoreKeywords(new BigDecimal(26L)); kratu1.setNumberOfAdGroupNegativeActiveKeywords(new BigDecimal(27L)); kratu1.setNumberOfAdgroupsWithoneActiveAd(new BigDecimal(28L)); kratu1.setNumberOfAdgroupsWithTwoActiveAds(new BigDecimal(29L)); kratu1.setNumberOfCampaignNegativeActiveKeywords(new BigDecimal(30L)); kratu1.setNumberOfCampaignsWithCallExtensionEnabled(new BigDecimal(31L)); kratu1.setNumberOfCampaignsWithLocationExtensionEnabled(new BigDecimal(32L)); kratu1.setNumberOfCampaignsWithSiteLinksEnabled(new BigDecimal(33L)); kratu1.setNumberOfDisapprovedAds(new BigDecimal(34L)); kratu1.setNumberOfNegativeActiveKeywords(new BigDecimal(35L)); kratu1.setNumberOfPositiveActiveKeywords(new BigDecimal(36L)); kratu1.setSpend(new BigDecimal(37L)); kratu1.setSumBudget(new BigDecimal(38L)); kratu1.setTotalClicksDisplay(39L); kratu1.setTotalClicksSearch(40L); kratu1.setWeightedAverageKeywordPosition(new BigDecimal(41L)); kratu1.setWeightedAverageQualityScore(new BigDecimal(42L)); kratu2.setAccountActive("Yes"); kratu2.setAccountSuspended(false); kratu2.setAverageCpcDisplay(new BigDecimal(99.99)); kratu2.setAverageCpcSearch(new BigDecimal(199.99)); kratu2.setAverageCpmDisplay(new BigDecimal(299.99)); kratu2.setAverageCpmSearch(new BigDecimal(399.99)); kratu2.setAveragePositionDisplay(new BigDecimal(99.99)); kratu2.setAveragePositionSearch(new BigDecimal(99.99)); kratu2.setConversions(33L); kratu2.setCtrDisplay(new BigDecimal(99.99)); kratu2.setCtrSearch(new BigDecimal(99.99)); kratu2.setElegibleImpressionsDisplay(new BigDecimal(99.99)); kratu2.setElegibleImpressionsSearch(new BigDecimal(99.99)); kratu2.setImpressionsDisplay(3000L); kratu2.setImpressionsSearch(5000L); kratu2.setLostImpressionsDueToBidAdRankDisplay(new BigDecimal(99.99)); kratu2.setLostImpressionsDueToBidAdRankSearch(new BigDecimal(99.99)); kratu2.setLostImpressionsDueToBudgetDisplay(new BigDecimal(99.99)); kratu2.setLostImpressionsDueToBudgetSearch(new BigDecimal(99.99)); kratu2.setNumberOfActiveAdGroups(new BigDecimal(99.99)); kratu2.setNumberOfActiveAds(new BigDecimal(99.99)); kratu2.setNumberOfActiveAverageQualityScoreKeywords(new BigDecimal(99.99)); kratu2.setNumberOfActiveBroadMatchingKeywords(new BigDecimal(99.99)); kratu2.setNumberOfActiveCampaigns(new BigDecimal(99.99)); kratu2.setNumberOfActiveExactMatchingKeywords(new BigDecimal(99.99)); kratu2.setNumberOfActiveGoodQualityScoreKeywords(new BigDecimal(99.99)); kratu2.setNumberOfActivePhraseMatchingKeywords(new BigDecimal(99.99)); kratu2.setNumberOfActivePoorQualityScoreKeywords(new BigDecimal(99.99)); kratu2.setNumberOfAdGroupNegativeActiveKeywords(new BigDecimal(99.99)); kratu2.setNumberOfAdgroupsWithoneActiveAd(new BigDecimal(99.99)); kratu2.setNumberOfAdgroupsWithTwoActiveAds(new BigDecimal(99.99)); kratu2.setNumberOfCampaignNegativeActiveKeywords(new BigDecimal(99.99)); kratu2.setNumberOfCampaignsWithCallExtensionEnabled(new BigDecimal(99.99)); kratu2.setNumberOfCampaignsWithLocationExtensionEnabled(new BigDecimal(99.99)); kratu2.setNumberOfCampaignsWithSiteLinksEnabled(new BigDecimal(99.99)); kratu2.setNumberOfDisapprovedAds(new BigDecimal(99.99)); kratu2.setNumberOfNegativeActiveKeywords(new BigDecimal(99.99)); kratu2.setNumberOfPositiveActiveKeywords(new BigDecimal(99.99)); kratu2.setSpend(new BigDecimal(99.99)); kratu2.setSumBudget(new BigDecimal(99.99)); kratu2.setTotalClicksDisplay(60L); kratu2.setTotalClicksSearch(888L); kratu2.setWeightedAverageKeywordPosition(new BigDecimal(99.99)); kratu2.setWeightedAverageQualityScore(new BigDecimal(99.99)); dailyKratus.add(kratu1); dailyKratus.add(kratu2); MockitoAnnotations.initMocks(this); Mockito.doAnswer(new Answer<List<? extends Report>>() { @Override public List<? extends Report> answer(InvocationOnMock invocation) throws Throwable { if (invocation.getArguments()[0].equals(ReportAccount.class)) { ReportAccount reportAccount = new ReportAccount(); reportAccount.setCost(new BigDecimal(9.99)); reportAccount.setConvertedClicks(100L); return ImmutableList.of(reportAccount); } else { return Lists.newArrayList(); } } }).when(storageHelper) .getReportByAccountId(Mockito.<Class<? extends Report>>anyObject(), Mockito.any(Long.class),Mockito.any(Date.class), Mockito.any(Date.class)); } /* * Test for createKratuSummary */ @Test public void test_createKratuSummary() { Kratu kratuSummarized = KratuCompute.createKratuSummary(dailyKratus, day1.getTime(), day2.getTime()); // From Account assertEquals(kratuSummarized.getTopAccountId(), new Long(123)); assertEquals(kratuSummarized.getAccountName(), "Account1"); assertEquals(kratuSummarized.getExternalCustomerId(), new Long(777)); assertEquals(kratuSummarized.getCurrencyCode(), "EUR"); assertEquals(kratuSummarized.getDateTimeZone(), "Europe/Paris"); assertEquals(kratuSummarized.getAccountSuspended(), false); // General assertEquals(kratuSummarized.getSpend(), kratu1.getSpend().add(kratu2.getSpend()).setScale(SCALE, ROUNDING)); assertEquals(kratuSummarized.getSumBudget(), kratu1.getSumBudget().add(kratu2.getSumBudget())); assertEquals(kratuSummarized.getConversions(), new Long(kratu1.getConversions() + kratu2.getConversions())); assertEquals(kratuSummarized.getAccountActive(), "Yes"); // Search Info assertEquals(kratuSummarized.getTotalClicksSearch(), new Long(kratu1.getTotalClicksSearch() + kratu2.getTotalClicksSearch())); assertEquals(kratuSummarized.getImpressionsSearch(), new Long(kratu1.getImpressionsSearch() + kratu2.getImpressionsSearch())); assertEquals(kratuSummarized.getElegibleImpressionsSearch(), kratu1.getElegibleImpressionsSearch().add(kratu2.getElegibleImpressionsSearch())); assertEquals(kratuSummarized.getLostImpressionsDueToBudgetSearch(), kratu1.getLostImpressionsDueToBudgetSearch().add(kratu2.getLostImpressionsDueToBudgetSearch())); assertEquals(kratuSummarized.getLostImpressionsDueToBidAdRankSearch(), kratu1.getLostImpressionsDueToBidAdRankSearch().add(kratu2.getLostImpressionsDueToBidAdRankSearch())); assertEquals(kratuSummarized.getCtrSearch(), dailyAverage(kratu1.getCtrSearch(), kratu2.getCtrSearch())); assertEquals(kratuSummarized.getAverageCpcSearch(), dailyAverage(kratu1.getAverageCpcSearch(), kratu2.getAverageCpcSearch())); assertEquals(kratuSummarized.getAverageCpmSearch(), dailyAverage(kratu1.getAverageCpmSearch(), kratu2.getAverageCpmSearch())); assertEquals(kratuSummarized.getAveragePositionSearch(), dailyAverage(kratu1.getAveragePositionSearch(), kratu2.getAveragePositionSearch())); // Display Info assertEquals(kratuSummarized.getTotalClicksDisplay(), new Long(kratu1.getTotalClicksDisplay() + kratu2.getTotalClicksDisplay())); assertEquals(kratuSummarized.getImpressionsDisplay(), new Long(kratu1.getImpressionsDisplay() + kratu2.getImpressionsDisplay())); assertEquals(kratuSummarized.getElegibleImpressionsDisplay(), kratu1.getElegibleImpressionsDisplay().add(kratu2.getElegibleImpressionsDisplay())); assertEquals(kratuSummarized.getLostImpressionsDueToBudgetDisplay(), kratu1.getLostImpressionsDueToBudgetDisplay().add(kratu2.getLostImpressionsDueToBudgetDisplay())); assertEquals(kratuSummarized.getLostImpressionsDueToBidAdRankDisplay(), kratu1.getLostImpressionsDueToBidAdRankDisplay().add(kratu2.getLostImpressionsDueToBidAdRankDisplay())); assertEquals(kratuSummarized.getCtrDisplay(), dailyAverage(kratu1.getCtrDisplay(), kratu2.getCtrDisplay())); assertEquals(kratuSummarized.getAverageCpcDisplay(), dailyAverage(kratu1.getAverageCpcDisplay(), kratu2.getAverageCpcDisplay())); assertEquals(kratuSummarized.getAverageCpmDisplay(), dailyAverage(kratu1.getAverageCpmDisplay(), kratu2.getAverageCpmDisplay())); assertEquals(kratuSummarized.getAveragePositionDisplay(), dailyAverage(kratu1.getAveragePositionDisplay(), kratu2.getAveragePositionDisplay())); // Structural Info assertEquals(kratuSummarized.getNumberOfActiveCampaigns(), dailyAverage(kratu1.getNumberOfActiveCampaigns(), kratu2.getNumberOfActiveCampaigns())); assertEquals(kratuSummarized.getNumberOfActiveAdGroups(), dailyAverage(kratu1.getNumberOfActiveAdGroups(), kratu2.getNumberOfActiveAdGroups())); assertEquals(kratuSummarized.getNumberOfActiveAds(), dailyAverage(kratu1.getNumberOfActiveAds(), kratu2.getNumberOfActiveAds())); assertEquals(kratuSummarized.getNumberOfPositiveActiveKeywords(), dailyAverage(kratu1.getNumberOfPositiveActiveKeywords(), kratu2.getNumberOfPositiveActiveKeywords())); assertEquals(kratuSummarized.getNumberOfActiveBroadMatchingKeywords(), dailyAverage(kratu1.getNumberOfActiveBroadMatchingKeywords(), kratu2.getNumberOfActiveBroadMatchingKeywords())); assertEquals(kratuSummarized.getNumberOfActivePhraseMatchingKeywords(), dailyAverage(kratu1.getNumberOfActivePhraseMatchingKeywords(), kratu2.getNumberOfActivePhraseMatchingKeywords())); assertEquals(kratuSummarized.getNumberOfAdGroupNegativeActiveKeywords(), dailyAverage(kratu1.getNumberOfAdGroupNegativeActiveKeywords(), kratu2.getNumberOfAdGroupNegativeActiveKeywords())); assertEquals(kratuSummarized.getNumberOfActiveGoodQualityScoreKeywords(), dailyAverage(kratu1.getNumberOfActiveGoodQualityScoreKeywords(), kratu2.getNumberOfActiveGoodQualityScoreKeywords())); assertEquals(kratuSummarized.getNumberOfActiveAverageQualityScoreKeywords(), dailyAverage(kratu1.getNumberOfActiveAverageQualityScoreKeywords(), kratu2.getNumberOfActiveAverageQualityScoreKeywords())); assertEquals(kratuSummarized.getNumberOfActivePoorQualityScoreKeywords(), dailyAverage(kratu1.getNumberOfActivePoorQualityScoreKeywords(), kratu2.getNumberOfActivePoorQualityScoreKeywords())); assertEquals(kratuSummarized.getNumberOfCampaignsWithCallExtensionEnabled(), dailyAverage(kratu1.getNumberOfCampaignsWithCallExtensionEnabled(), kratu2.getNumberOfCampaignsWithCallExtensionEnabled())); assertEquals(kratuSummarized.getNumberOfCampaignsWithLocationExtensionEnabled(), dailyAverage(kratu1.getNumberOfCampaignsWithLocationExtensionEnabled(), kratu2.getNumberOfCampaignsWithLocationExtensionEnabled())); assertEquals(kratuSummarized.getNumberOfCampaignsWithSiteLinksEnabled(), dailyAverage(kratu1.getNumberOfCampaignsWithSiteLinksEnabled(), kratu2.getNumberOfCampaignsWithSiteLinksEnabled())); assertEquals(kratuSummarized.getNumberOfAdgroupsWithoneActiveAd(), dailyAverage(kratu1.getNumberOfAdgroupsWithoneActiveAd(), kratu2.getNumberOfAdgroupsWithoneActiveAd())); assertEquals(kratuSummarized.getNumberOfAdgroupsWithTwoActiveAds(), dailyAverage(kratu1.getNumberOfAdgroupsWithTwoActiveAds(), kratu2.getNumberOfAdgroupsWithTwoActiveAds())); assertEquals(kratuSummarized.getNumberOfDisapprovedAds(), dailyAverage(kratu1.getNumberOfDisapprovedAds(), kratu2.getNumberOfDisapprovedAds())); assertEquals(kratuSummarized.getWeightedAverageKeywordPosition(), dailyAverage(kratu1.getWeightedAverageKeywordPosition(), kratu2.getWeightedAverageKeywordPosition())); assertEquals(kratuSummarized.getWeightedAverageQualityScore(), dailyAverage(kratu1.getWeightedAverageQualityScore(), kratu2.getWeightedAverageQualityScore())); } /* * Test for createDailyKratuFromDB */ @Test public void test_createDailyKratuFromDB() { KratuCompute.createDailyKratuFromDB(storageHelper, 1L, account, day1.getTime()); ArgumentCaptor<Long> accountIdCaptor = ArgumentCaptor.forClass(Long.class); ArgumentCaptor<Date> date1Captor = ArgumentCaptor.forClass(Date.class); ArgumentCaptor<Date> date2Captor = ArgumentCaptor.forClass(Date.class); verify(storageHelper, times(6)).getReportByAccountId( classReportCaptor.capture(), accountIdCaptor.capture(), date1Captor.capture(), date2Captor.capture()); List<Class<? extends Report>> classes = classReportCaptor.getAllValues(); assertEquals(classes.get(0), ReportAccount.class); assertEquals(classes.get(1), ReportCampaign.class); assertEquals(classes.get(2), ReportAdGroup.class); assertEquals(classes.get(3), ReportAd.class); assertEquals(classes.get(4), ReportKeywords.class); assertEquals(classes.get(5), ReportPlaceholderFeedItem.class); List<Long> accountIds = accountIdCaptor.getAllValues(); assertEquals(accountIds.get(0), new Long(777)); assertEquals(accountIds.get(1), new Long(777)); assertEquals(accountIds.get(2), new Long(777)); assertEquals(accountIds.get(3), new Long(777)); assertEquals(accountIds.get(4), new Long(777)); assertEquals(accountIds.get(5), new Long(777)); List<Date> date1Captors = date1Captor.getAllValues(); assertEquals(date1Captors.get(0), day1.getTime()); assertEquals(date1Captors.get(1), day1.getTime()); assertEquals(date1Captors.get(2), day1.getTime()); assertEquals(date1Captors.get(3), day1.getTime()); assertEquals(date1Captors.get(4), day1.getTime()); assertEquals(date1Captors.get(5), day1.getTime()); List<Date> date2Captors = date2Captor.getAllValues(); assertEquals(date2Captors.get(0), day1LastMinute.getTime()); assertEquals(date2Captors.get(1), day1LastMinute.getTime()); assertEquals(date2Captors.get(2), day1LastMinute.getTime()); assertEquals(date2Captors.get(3), day1LastMinute.getTime()); assertEquals(date2Captors.get(4), day1LastMinute.getTime()); assertEquals(date2Captors.get(5), day1LastMinute.getTime()); } private BigDecimal dailyAverage(BigDecimal bigDecimal1, BigDecimal bigDecimal2) { BigDecimal daysInRange = new BigDecimal(2); return bigDecimal1.divide(daysInRange, SCALE, ROUNDING).add(bigDecimal2.divide(daysInRange, SCALE, ROUNDING)); } }
/* * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.jndi.ldap; import javax.naming.NamingException; import javax.naming.directory.InvalidSearchFilterException; import java.io.IOException; /** * LDAP (RFC-1960) and LDAPv3 (RFC-2254) search filters. * * @author Xuelei Fan * @author Vincent Ryan * @author Jagane Sundar * @author Rosanna Lee */ final class Filter { /** * First convert filter string into byte[]. * For LDAP v3, the conversion uses Unicode -> UTF8 * For LDAP v2, the conversion uses Unicode -> ISO 8859 (Latin-1) * * Then parse the byte[] as a filter, converting \hh to * a single byte, and encoding the resulting filter * into the supplied BER buffer */ static void encodeFilterString(BerEncoder ber, String filterStr, boolean isLdapv3) throws IOException, NamingException { if ((filterStr == null) || (filterStr.equals(""))) { throw new InvalidSearchFilterException("Empty filter"); } byte[] filter; int filterLen; if (isLdapv3) { filter = filterStr.getBytes("UTF8"); } else { filter = filterStr.getBytes("8859_1"); } filterLen = filter.length; if (dbg) { dbgIndent = 0; System.err.println("String filter: " + filterStr); System.err.println("size: " + filterLen); dprint("original: ", filter, 0, filterLen); } encodeFilter(ber, filter, 0, filterLen); } private static void encodeFilter(BerEncoder ber, byte[] filter, int filterStart, int filterEnd) throws IOException, NamingException { if (dbg) { dprint("encFilter: ", filter, filterStart, filterEnd); dbgIndent++; } if ((filterEnd - filterStart) <= 0) { throw new InvalidSearchFilterException("Empty filter"); } int nextOffset; int parens, balance; boolean escape; parens = 0; int filtOffset[] = new int[1]; for (filtOffset[0] = filterStart; filtOffset[0] < filterEnd;) { switch (filter[filtOffset[0]]) { case '(': filtOffset[0]++; parens++; switch (filter[filtOffset[0]]) { case '&': encodeComplexFilter(ber, filter, LDAP_FILTER_AND, filtOffset, filterEnd); // filtOffset[0] has pointed to char after right paren parens--; break; case '|': encodeComplexFilter(ber, filter, LDAP_FILTER_OR, filtOffset, filterEnd); // filtOffset[0] has pointed to char after right paren parens--; break; case '!': encodeComplexFilter(ber, filter, LDAP_FILTER_NOT, filtOffset, filterEnd); // filtOffset[0] has pointed to char after right paren parens--; break; default: balance = 1; escape = false; nextOffset = filtOffset[0]; while (nextOffset < filterEnd && balance > 0) { if (!escape) { if (filter[nextOffset] == '(') balance++; else if (filter[nextOffset] == ')') balance--; } if (filter[nextOffset] == '\\' && !escape) escape = true; else escape = false; if (balance > 0) nextOffset++; } if (balance != 0) throw new InvalidSearchFilterException( "Unbalanced parenthesis"); encodeSimpleFilter(ber, filter, filtOffset[0], nextOffset); // points to the char after right paren. filtOffset[0] = nextOffset + 1; parens--; break; } break; case ')': // // End of sequence // ber.endSeq(); filtOffset[0]++; parens--; break; case ' ': filtOffset[0]++; break; default: // assume simple type=value filter encodeSimpleFilter(ber, filter, filtOffset[0], filterEnd); filtOffset[0] = filterEnd; // force break from outer break; } if (parens < 0) { throw new InvalidSearchFilterException( "Unbalanced parenthesis"); } } if (parens != 0) { throw new InvalidSearchFilterException("Unbalanced parenthesis"); } if (dbg) { dbgIndent--; } } /** * convert character 'c' that represents a hexadecimal digit to an integer. * if 'c' is not a hexadecimal digit [0-9A-Fa-f], -1 is returned. * otherwise the converted value is returned. */ private static int hexchar2int( byte c ) { if ( c >= '0' && c <= '9' ) { return( c - '0' ); } if ( c >= 'A' && c <= 'F' ) { return( c - 'A' + 10 ); } if ( c >= 'a' && c <= 'f' ) { return( c - 'a' + 10 ); } return( -1 ); } // called by the LdapClient.compare method static byte[] unescapeFilterValue(byte[] orig, int start, int end) throws NamingException { boolean escape = false, escStart = false; int ival; byte ch; if (dbg) { dprint("unescape: " , orig, start, end); } int len = end - start; byte tbuf[] = new byte[len]; int j = 0; for (int i = start; i < end; i++) { ch = orig[i]; if (escape) { // Try LDAP V3 escape (\xx) if ((ival = hexchar2int(ch)) < 0) { /** * If there is no hex char following a '\' when * parsing a LDAP v3 filter (illegal by v3 way) * we fallback to the way we unescape in v2. */ if (escStart) { // V2: \* \( \) escape = false; tbuf[j++] = ch; } else { // escaping already started but we can't find 2nd hex throw new InvalidSearchFilterException("invalid escape sequence: " + orig); } } else { if (escStart) { tbuf[j] = (byte)(ival<<4); escStart = false; } else { tbuf[j++] |= (byte)ival; escape = false; } } } else if (ch != '\\') { tbuf[j++] = ch; escape = false; } else { escStart = escape = true; } } byte[] answer = new byte[j]; System.arraycopy(tbuf, 0, answer, 0, j); if (dbg) { Ber.dumpBER(System.err, "", answer, 0, j); } return answer; } private static int indexOf(byte[] str, char ch, int start, int end) { for (int i = start; i < end; i++) { if (str[i] == ch) return i; } return -1; } private static int indexOf(byte[] str, String target, int start, int end) { int where = indexOf(str, target.charAt(0), start, end); if (where >= 0) { for (int i = 1; i < target.length(); i++) { if (str[where+i] != target.charAt(i)) { return -1; } } } return where; } private static int findUnescaped(byte[] str, char ch, int start, int end) { while (start < end) { int where = indexOf(str, ch, start, end); /* * Count the immediate preceding '\' to find out if * this is an escaped '*'. This is a made-up way for * parsing an escaped '*' in v2. This is how the other leading * SDK vendors interpret v2. * For v3 we fallback to the way we parse "\*" in v2. * It's not legal in v3 to use "\*" to escape '*'; the right * way is to use "\2a" instead. */ int backSlashPos; int backSlashCnt = 0; for (backSlashPos = where - 1; ((backSlashPos >= start) && (str[backSlashPos] == '\\')); backSlashPos--, backSlashCnt++); // if at start of string, or not there at all, or if not escaped if (where == start || where == -1 || ((backSlashCnt % 2) == 0)) return where; // start search after escaped star start = where + 1; } return -1; } private static void encodeSimpleFilter(BerEncoder ber, byte[] filter, int filtStart, int filtEnd) throws IOException, NamingException { if (dbg) { dprint("encSimpleFilter: ", filter, filtStart, filtEnd); dbgIndent++; } String type, value; int valueStart, valueEnd, typeStart, typeEnd; int eq; if ((eq = indexOf(filter, '=', filtStart, filtEnd)) == -1) { throw new InvalidSearchFilterException("Missing 'equals'"); } valueStart = eq + 1; // value starts after equal sign valueEnd = filtEnd; typeStart = filtStart; // beginning of string int ftype; switch (filter[eq - 1]) { case '<': ftype = LDAP_FILTER_LE; typeEnd = eq - 1; break; case '>': ftype = LDAP_FILTER_GE; typeEnd = eq - 1; break; case '~': ftype = LDAP_FILTER_APPROX; typeEnd = eq - 1; break; case ':': ftype = LDAP_FILTER_EXT; typeEnd = eq - 1; break; default: typeEnd = eq; //initializing ftype to make the compiler happy ftype = 0x00; break; } if (dbg) { System.err.println("type: " + typeStart + ", " + typeEnd); System.err.println("value: " + valueStart + ", " + valueEnd); } // check validity of type // // RFC4512 defines the type as the following ABNF: // attr = attributedescription // attributedescription = attributetype options // attributetype = oid // oid = descr / numericoid // descr = keystring // keystring = leadkeychar *keychar // leadkeychar = ALPHA // keychar = ALPHA / DIGIT / HYPHEN // numericoid = number 1*( DOT number ) // number = DIGIT / ( LDIGIT 1*DIGIT ) // options = *( SEMI option ) // option = 1*keychar // // And RFC4515 defines the extensible type as the following ABNF: // attr [dnattrs] [matchingrule] / [dnattrs] matchingrule int optionsStart = -1; int extensibleStart = -1; if ((filter[typeStart] >= '0' && filter[typeStart] <= '9') || (filter[typeStart] >= 'A' && filter[typeStart] <= 'Z') || (filter[typeStart] >= 'a' && filter[typeStart] <= 'z')) { boolean isNumericOid = filter[typeStart] >= '0' && filter[typeStart] <= '9'; for (int i = typeStart + 1; i < typeEnd; i++) { // ';' is an indicator of attribute options if (filter[i] == ';') { if (isNumericOid && filter[i - 1] == '.') { throw new InvalidSearchFilterException( "invalid attribute description"); } // attribute options optionsStart = i; break; } // ':' is an indicator of extensible rules if (filter[i] == ':' && ftype == LDAP_FILTER_EXT) { if (isNumericOid && filter[i - 1] == '.') { throw new InvalidSearchFilterException( "invalid attribute description"); } // extensible matching extensibleStart = i; break; } if (isNumericOid) { // numeric object identifier if ((filter[i] == '.' && filter[i - 1] == '.') || (filter[i] != '.' && !(filter[i] >= '0' && filter[i] <= '9'))) { throw new InvalidSearchFilterException( "invalid attribute description"); } } else { // descriptor // The underscore ("_") character is not allowed by // the LDAP specification. We allow it here to // tolerate the incorrect use in practice. if (filter[i] != '-' && filter[i] != '_' && !(filter[i] >= '0' && filter[i] <= '9') && !(filter[i] >= 'A' && filter[i] <= 'Z') && !(filter[i] >= 'a' && filter[i] <= 'z')) { throw new InvalidSearchFilterException( "invalid attribute description"); } } } } else if (ftype == LDAP_FILTER_EXT && filter[typeStart] == ':') { // extensible matching extensibleStart = typeStart; } else { throw new InvalidSearchFilterException( "invalid attribute description"); } // check attribute options if (optionsStart > 0) { for (int i = optionsStart + 1; i < typeEnd; i++) { if (filter[i] == ';') { if (filter[i - 1] == ';') { throw new InvalidSearchFilterException( "invalid attribute description"); } continue; } // ':' is an indicator of extensible rules if (filter[i] == ':' && ftype == LDAP_FILTER_EXT) { if (filter[i - 1] == ';') { throw new InvalidSearchFilterException( "invalid attribute description"); } // extensible matching extensibleStart = i; break; } // The underscore ("_") character is not allowed by // the LDAP specification. We allow it here to // tolerate the incorrect use in practice. if (filter[i] != '-' && filter[i] != '_' && !(filter[i] >= '0' && filter[i] <= '9') && !(filter[i] >= 'A' && filter[i] <= 'Z') && !(filter[i] >= 'a' && filter[i] <= 'z')) { throw new InvalidSearchFilterException( "invalid attribute description"); } } } // check extensible matching if (extensibleStart > 0) { boolean isMatchingRule = false; for (int i = extensibleStart + 1; i < typeEnd; i++) { if (filter[i] == ':') { throw new InvalidSearchFilterException( "invalid attribute description"); } else if ((filter[i] >= '0' && filter[i] <= '9') || (filter[i] >= 'A' && filter[i] <= 'Z') || (filter[i] >= 'a' && filter[i] <= 'z')) { boolean isNumericOid = filter[i] >= '0' && filter[i] <= '9'; i++; for (int j = i; j < typeEnd; j++, i++) { // allows no more than two extensible rules if (filter[j] == ':') { if (isMatchingRule) { throw new InvalidSearchFilterException( "invalid attribute description"); } if (isNumericOid && filter[j - 1] == '.') { throw new InvalidSearchFilterException( "invalid attribute description"); } isMatchingRule = true; break; } if (isNumericOid) { // numeric object identifier if ((filter[j] == '.' && filter[j - 1] == '.') || (filter[j] != '.' && !(filter[j] >= '0' && filter[j] <= '9'))) { throw new InvalidSearchFilterException( "invalid attribute description"); } } else { // descriptor // The underscore ("_") character is not allowed by // the LDAP specification. We allow it here to // tolerate the incorrect use in practice. if (filter[j] != '-' && filter[j] != '_' && !(filter[j] >= '0' && filter[j] <= '9') && !(filter[j] >= 'A' && filter[j] <= 'Z') && !(filter[j] >= 'a' && filter[j] <= 'z')) { throw new InvalidSearchFilterException( "invalid attribute description"); } } } } else { throw new InvalidSearchFilterException( "invalid attribute description"); } } } // ensure the latest byte is not isolated if (filter[typeEnd - 1] == '.' || filter[typeEnd - 1] == ';' || filter[typeEnd - 1] == ':') { throw new InvalidSearchFilterException( "invalid attribute description"); } if (typeEnd == eq) { // filter type is of "equal" if (findUnescaped(filter, '*', valueStart, valueEnd) == -1) { ftype = LDAP_FILTER_EQUALITY; } else if (filter[valueStart] == '*' && valueStart == (valueEnd - 1)) { ftype = LDAP_FILTER_PRESENT; } else { encodeSubstringFilter(ber, filter, typeStart, typeEnd, valueStart, valueEnd); return; } } if (ftype == LDAP_FILTER_PRESENT) { ber.encodeOctetString(filter, ftype, typeStart, typeEnd-typeStart); } else if (ftype == LDAP_FILTER_EXT) { encodeExtensibleMatch(ber, filter, typeStart, typeEnd, valueStart, valueEnd); } else { ber.beginSeq(ftype); ber.encodeOctetString(filter, Ber.ASN_OCTET_STR, typeStart, typeEnd - typeStart); ber.encodeOctetString( unescapeFilterValue(filter, valueStart, valueEnd), Ber.ASN_OCTET_STR); ber.endSeq(); } if (dbg) { dbgIndent--; } } private static void encodeSubstringFilter(BerEncoder ber, byte[] filter, int typeStart, int typeEnd, int valueStart, int valueEnd) throws IOException, NamingException { if (dbg) { dprint("encSubstringFilter: type ", filter, typeStart, typeEnd); dprint(", val : ", filter, valueStart, valueEnd); dbgIndent++; } ber.beginSeq(LDAP_FILTER_SUBSTRINGS); ber.encodeOctetString(filter, Ber.ASN_OCTET_STR, typeStart, typeEnd-typeStart); ber.beginSeq(LdapClient.LBER_SEQUENCE); int index; int previndex = valueStart; while ((index = findUnescaped(filter, '*', previndex, valueEnd)) != -1) { if (previndex == valueStart) { if (previndex < index) { if (dbg) System.err.println( "initial: " + previndex + "," + index); ber.encodeOctetString( unescapeFilterValue(filter, previndex, index), LDAP_SUBSTRING_INITIAL); } } else { if (previndex < index) { if (dbg) System.err.println("any: " + previndex + "," + index); ber.encodeOctetString( unescapeFilterValue(filter, previndex, index), LDAP_SUBSTRING_ANY); } } previndex = index + 1; } if (previndex < valueEnd) { if (dbg) System.err.println("final: " + previndex + "," + valueEnd); ber.encodeOctetString( unescapeFilterValue(filter, previndex, valueEnd), LDAP_SUBSTRING_FINAL); } ber.endSeq(); ber.endSeq(); if (dbg) { dbgIndent--; } } // The complex filter types look like: // "&(type=val)(type=val)" // "|(type=val)(type=val)" // "!(type=val)" // // The filtOffset[0] pointing to the '&', '|', or '!'. // private static void encodeComplexFilter(BerEncoder ber, byte[] filter, int filterType, int filtOffset[], int filtEnd) throws IOException, NamingException { if (dbg) { dprint("encComplexFilter: ", filter, filtOffset[0], filtEnd); dprint(", type: " + Integer.toString(filterType, 16)); dbgIndent++; } filtOffset[0]++; ber.beginSeq(filterType); int[] parens = findRightParen(filter, filtOffset, filtEnd); encodeFilterList(ber, filter, filterType, parens[0], parens[1]); ber.endSeq(); if (dbg) { dbgIndent--; } } // // filter at filtOffset[0] - 1 points to a (. Find ) that matches it // and return substring between the parens. Adjust filtOffset[0] to // point to char after right paren // private static int[] findRightParen(byte[] filter, int filtOffset[], int end) throws IOException, NamingException { int balance = 1; boolean escape = false; int nextOffset = filtOffset[0]; while (nextOffset < end && balance > 0) { if (!escape) { if (filter[nextOffset] == '(') balance++; else if (filter[nextOffset] == ')') balance--; } if (filter[nextOffset] == '\\' && !escape) escape = true; else escape = false; if (balance > 0) nextOffset++; } if (balance != 0) { throw new InvalidSearchFilterException("Unbalanced parenthesis"); } // String tmp = filter.substring(filtOffset[0], nextOffset); int[] tmp = new int[] {filtOffset[0], nextOffset}; filtOffset[0] = nextOffset + 1; return tmp; } // // Encode filter list of type "(filter1)(filter2)..." // private static void encodeFilterList(BerEncoder ber, byte[] filter, int filterType, int start, int end) throws IOException, NamingException { if (dbg) { dprint("encFilterList: ", filter, start, end); dbgIndent++; } int filtOffset[] = new int[1]; int listNumber = 0; for (filtOffset[0] = start; filtOffset[0] < end; filtOffset[0]++) { if (Character.isSpaceChar((char)filter[filtOffset[0]])) continue; if ((filterType == LDAP_FILTER_NOT) && (listNumber > 0)) { throw new InvalidSearchFilterException( "Filter (!) cannot be followed by more than one filters"); } if (filter[filtOffset[0]] == '(') { continue; } int[] parens = findRightParen(filter, filtOffset, end); // add enclosing parens int len = parens[1]-parens[0]; byte[] newfilter = new byte[len+2]; System.arraycopy(filter, parens[0], newfilter, 1, len); newfilter[0] = (byte)'('; newfilter[len+1] = (byte)')'; encodeFilter(ber, newfilter, 0, newfilter.length); listNumber++; } if (dbg) { dbgIndent--; } } // // Encode extensible match // private static void encodeExtensibleMatch(BerEncoder ber, byte[] filter, int matchStart, int matchEnd, int valueStart, int valueEnd) throws IOException, NamingException { boolean matchDN = false; int colon; int colon2; int i; ber.beginSeq(LDAP_FILTER_EXT); // test for colon separator if ((colon = indexOf(filter, ':', matchStart, matchEnd)) >= 0) { // test for match DN if ((i = indexOf(filter, ":dn", colon, matchEnd)) >= 0) { matchDN = true; } // test for matching rule if (((colon2 = indexOf(filter, ':', colon + 1, matchEnd)) >= 0) || (i == -1)) { if (i == colon) { ber.encodeOctetString(filter, LDAP_FILTER_EXT_RULE, colon2 + 1, matchEnd - (colon2 + 1)); } else if ((i == colon2) && (i >= 0)) { ber.encodeOctetString(filter, LDAP_FILTER_EXT_RULE, colon + 1, colon2 - (colon + 1)); } else { ber.encodeOctetString(filter, LDAP_FILTER_EXT_RULE, colon + 1, matchEnd - (colon + 1)); } } // test for attribute type if (colon > matchStart) { ber.encodeOctetString(filter, LDAP_FILTER_EXT_TYPE, matchStart, colon - matchStart); } } else { ber.encodeOctetString(filter, LDAP_FILTER_EXT_TYPE, matchStart, matchEnd - matchStart); } ber.encodeOctetString( unescapeFilterValue(filter, valueStart, valueEnd), LDAP_FILTER_EXT_VAL); /* * This element is defined in RFC-2251 with an ASN.1 DEFAULT tag. * However, for Active Directory interoperability it is transmitted * even when FALSE. */ ber.encodeBoolean(matchDN, LDAP_FILTER_EXT_DN); ber.endSeq(); } //////////////////////////////////////////////////////////////////////////// // // some debug print code that does indenting. Useful for debugging // the filter generation code // //////////////////////////////////////////////////////////////////////////// private static final boolean dbg = false; private static int dbgIndent = 0; private static void dprint(String msg) { dprint(msg, new byte[0], 0, 0); } private static void dprint(String msg, byte[] str) { dprint(msg, str, 0, str.length); } private static void dprint(String msg, byte[] str, int start, int end) { String dstr = " "; int i = dbgIndent; while (i-- > 0) { dstr += " "; } dstr += msg; System.err.print(dstr); for (int j = start; j < end; j++) { System.err.print((char)str[j]); } System.err.println(); } /////////////// Constants used for encoding filter ////////////// static final int LDAP_FILTER_AND = 0xa0; static final int LDAP_FILTER_OR = 0xa1; static final int LDAP_FILTER_NOT = 0xa2; static final int LDAP_FILTER_EQUALITY = 0xa3; static final int LDAP_FILTER_SUBSTRINGS = 0xa4; static final int LDAP_FILTER_GE = 0xa5; static final int LDAP_FILTER_LE = 0xa6; static final int LDAP_FILTER_PRESENT = 0x87; static final int LDAP_FILTER_APPROX = 0xa8; static final int LDAP_FILTER_EXT = 0xa9; // LDAPv3 static final int LDAP_FILTER_EXT_RULE = 0x81; // LDAPv3 static final int LDAP_FILTER_EXT_TYPE = 0x82; // LDAPv3 static final int LDAP_FILTER_EXT_VAL = 0x83; // LDAPv3 static final int LDAP_FILTER_EXT_DN = 0x84; // LDAPv3 static final int LDAP_SUBSTRING_INITIAL = 0x80; static final int LDAP_SUBSTRING_ANY = 0x81; static final int LDAP_SUBSTRING_FINAL = 0x82; }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.openxml4j.opc.internal.marshallers; import java.io.OutputStream; import javax.xml.XMLConstants; import javax.xml.stream.XMLEventFactory; import javax.xml.stream.events.Namespace; import org.apache.poi.openxml4j.exceptions.OpenXML4JException; import org.apache.poi.openxml4j.opc.PackagePart; import org.apache.poi.openxml4j.opc.internal.PackagePropertiesPart; import org.apache.poi.openxml4j.opc.internal.PartMarshaller; import org.apache.poi.openxml4j.util.Nullable; import org.apache.poi.util.DocumentHelper; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * Package properties marshaller. */ public class PackagePropertiesMarshaller implements PartMarshaller { private final static Namespace namespaceDC, namespaceCoreProperties, namespaceDcTerms, namespaceXSI; static { final XMLEventFactory f = XMLEventFactory.newInstance(); namespaceDC = f.createNamespace("dc", PackagePropertiesPart.NAMESPACE_DC_URI); namespaceCoreProperties = f.createNamespace("cp", PackagePropertiesPart.NAMESPACE_CP_URI); namespaceDcTerms = f.createNamespace("dcterms", PackagePropertiesPart.NAMESPACE_DCTERMS_URI); namespaceXSI = f.createNamespace("xsi", XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); } protected static final String KEYWORD_CATEGORY = "category"; protected static final String KEYWORD_CONTENT_STATUS = "contentStatus"; protected static final String KEYWORD_CONTENT_TYPE = "contentType"; protected static final String KEYWORD_CREATED = "created"; protected static final String KEYWORD_CREATOR = "creator"; protected static final String KEYWORD_DESCRIPTION = "description"; protected static final String KEYWORD_IDENTIFIER = "identifier"; protected static final String KEYWORD_KEYWORDS = "keywords"; protected static final String KEYWORD_LANGUAGE = "language"; protected static final String KEYWORD_LAST_MODIFIED_BY = "lastModifiedBy"; protected static final String KEYWORD_LAST_PRINTED = "lastPrinted"; protected static final String KEYWORD_MODIFIED = "modified"; protected static final String KEYWORD_REVISION = "revision"; protected static final String KEYWORD_SUBJECT = "subject"; protected static final String KEYWORD_TITLE = "title"; protected static final String KEYWORD_VERSION = "version"; PackagePropertiesPart propsPart; // The document Document xmlDoc = null; /** * Marshall package core properties to an XML document. Always return * <code>true</code>. */ @Override public boolean marshall(PackagePart part, OutputStream out) throws OpenXML4JException { if (!(part instanceof PackagePropertiesPart)) throw new IllegalArgumentException( "'part' must be a PackagePropertiesPart instance."); propsPart = (PackagePropertiesPart) part; // Configure the document xmlDoc = DocumentHelper.createDocument(); Element rootElem = xmlDoc.createElementNS(namespaceCoreProperties.getNamespaceURI(), getQName("coreProperties", namespaceCoreProperties)); DocumentHelper.addNamespaceDeclaration(rootElem, namespaceCoreProperties); DocumentHelper.addNamespaceDeclaration(rootElem, namespaceDC); DocumentHelper.addNamespaceDeclaration(rootElem, namespaceDcTerms); DocumentHelper.addNamespaceDeclaration(rootElem, namespaceXSI); xmlDoc.appendChild(rootElem); addCategory(); addContentStatus(); addContentType(); addCreated(); addCreator(); addDescription(); addIdentifier(); addKeywords(); addLanguage(); addLastModifiedBy(); addLastPrinted(); addModified(); addRevision(); addSubject(); addTitle(); addVersion(); return true; } /** * Sets the given element's text content, creating it if necessary. */ private Element setElementTextContent(String localName, Namespace namespace, Nullable<String> property) { return setElementTextContent(localName, namespace, property, property.getValue()); } private String getQName(String localName, Namespace namespace) { return namespace.getPrefix().isEmpty() ? localName : namespace.getPrefix() + ':' + localName; } private Element setElementTextContent(String localName, Namespace namespace, Nullable<?> property, String propertyValue) { if (!property.hasValue()) return null; Element root = xmlDoc.getDocumentElement(); Element elem = (Element) root.getElementsByTagNameNS(namespace.getNamespaceURI(), localName).item(0); if (elem == null) { // missing, we add it elem = xmlDoc.createElementNS(namespace.getNamespaceURI(), getQName(localName, namespace)); root.appendChild(elem); } elem.setTextContent(propertyValue); return elem; } private Element setElementTextContent(String localName, Namespace namespace, Nullable<?> property, String propertyValue, String xsiType) { Element element = setElementTextContent(localName, namespace, property, propertyValue); if (element != null) { element.setAttributeNS(namespaceXSI.getNamespaceURI(), getQName("type", namespaceXSI), xsiType); } return element; } /** * Add category property element if needed. */ private void addCategory() { setElementTextContent(KEYWORD_CATEGORY, namespaceCoreProperties, propsPart.getCategoryProperty()); } /** * Add content status property element if needed. */ private void addContentStatus() { setElementTextContent(KEYWORD_CONTENT_STATUS, namespaceCoreProperties, propsPart.getContentStatusProperty()); } /** * Add content type property element if needed. */ private void addContentType() { setElementTextContent(KEYWORD_CONTENT_TYPE, namespaceCoreProperties, propsPart.getContentTypeProperty()); } /** * Add created property element if needed. */ private void addCreated() { setElementTextContent(KEYWORD_CREATED, namespaceDcTerms, propsPart.getCreatedProperty(), propsPart.getCreatedPropertyString(), "dcterms:W3CDTF"); } /** * Add creator property element if needed. */ private void addCreator() { setElementTextContent(KEYWORD_CREATOR, namespaceDC, propsPart.getCreatorProperty()); } /** * Add description property element if needed. */ private void addDescription() { setElementTextContent(KEYWORD_DESCRIPTION, namespaceDC, propsPart.getDescriptionProperty()); } /** * Add identifier property element if needed. */ private void addIdentifier() { setElementTextContent(KEYWORD_IDENTIFIER, namespaceDC, propsPart.getIdentifierProperty()); } /** * Add keywords property element if needed. */ private void addKeywords() { setElementTextContent(KEYWORD_KEYWORDS, namespaceCoreProperties, propsPart.getKeywordsProperty()); } /** * Add language property element if needed. */ private void addLanguage() { setElementTextContent(KEYWORD_LANGUAGE, namespaceDC, propsPart.getLanguageProperty()); } /** * Add 'last modified by' property if needed. */ private void addLastModifiedBy() { setElementTextContent(KEYWORD_LAST_MODIFIED_BY, namespaceCoreProperties, propsPart.getLastModifiedByProperty()); } /** * Add 'last printed' property if needed. * */ private void addLastPrinted() { setElementTextContent(KEYWORD_LAST_PRINTED, namespaceCoreProperties, propsPart.getLastPrintedProperty(), propsPart.getLastPrintedPropertyString()); } /** * Add modified property element if needed. */ private void addModified() { setElementTextContent(KEYWORD_MODIFIED, namespaceDcTerms, propsPart.getModifiedProperty(), propsPart.getModifiedPropertyString(), "dcterms:W3CDTF"); } /** * Add revision property if needed. */ private void addRevision() { setElementTextContent(KEYWORD_REVISION, namespaceCoreProperties, propsPart.getRevisionProperty()); } /** * Add subject property if needed. */ private void addSubject() { setElementTextContent(KEYWORD_SUBJECT, namespaceDC, propsPart.getSubjectProperty()); } /** * Add title property if needed. */ private void addTitle() { setElementTextContent(KEYWORD_TITLE, namespaceDC, propsPart.getTitleProperty()); } private void addVersion() { setElementTextContent(KEYWORD_VERSION, namespaceCoreProperties, propsPart.getVersionProperty()); } }
/** * Point.java * Copyright 2020 Innovatics Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.pdfjet; /** * Used to create point objects with different shapes and draw them on a page. * Please note: When we are mentioning (x, y) coordinates of a point - we are talking about the coordinates of the center of the point. * * Please see Example_05. */ public class Point implements Drawable { public static final int INVISIBLE = -1; public static final int CIRCLE = 0; public static final int DIAMOND = 1; public static final int BOX = 2; public static final int PLUS = 3; public static final int H_DASH = 4; public static final int V_DASH = 5; public static final int MULTIPLY = 6; public static final int STAR = 7; public static final int X_MARK = 8; public static final int UP_ARROW = 9; public static final int DOWN_ARROW = 10; public static final int LEFT_ARROW = 11; public static final int RIGHT_ARROW = 12; public static final boolean CONTROL_POINT = true; protected float x; protected float y; protected float r = 2f; protected int shape = Point.CIRCLE; protected int color = Color.black; protected int align = Align.RIGHT; protected float lineWidth = 0.3f; protected String linePattern = "[] 0"; protected boolean fillShape = false; protected boolean isControlPoint = false; protected boolean drawPath = false; private String text; private int textColor; private int textDirection; private String uri; private float xBox; private float yBox; /** * The default constructor. */ public Point() { } /** * Constructor for creating point objects. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. */ public Point(double x, double y) { this((float) x, (float) y); } /** * Constructor for creating point objects. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. */ public Point(float x, float y) { this.x = x; this.y = y; } /** * Constructor for creating point objects. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. * @param isControlPoint true if this point is one of the points specifying a curve. */ public Point(double x, double y, boolean isControlPoint) { this((float) x, (float) y, isControlPoint); } /** * Constructor for creating point objects. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. * @param isControlPoint true if this point is one of the points specifying a curve. */ public Point(float x, float y, boolean isControlPoint) { this.x = x; this.y = y; this.isControlPoint = isControlPoint; } /** * Sets the position (x, y) of this point. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. */ public void setPosition(float x, float y) { setLocation(x, y); } /** * Sets the position (x, y) of this point. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. */ public void setPosition(double x, double y) { setLocation(x, y); } public void setXY(float x, float y) { setLocation(x, y); } public void setXY(double x, double y) { setLocation(x, y); } /** * Sets the location (x, y) of this point. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. * @return the location of the point. */ public Point setLocation(float x, float y) { this.x = x; this.y = y; return this; } /** * Sets the location (x, y) of this point. * * @param x the x coordinate of this point when drawn on the page. * @param y the y coordinate of this point when drawn on the page. * @return the location of the point. */ public Point setLocation(double x, double y) { return setLocation((float) x, (float) y); } /** * Sets the x coordinate of this point. * * @param x the x coordinate of this point when drawn on the page. */ public void setX(double x) { this.x = (float) x; } /** * Sets the x coordinate of this point. * * @param x the x coordinate of this point when drawn on the page. */ public void setX(float x) { this.x = x; } /** * Returns the x coordinate of this point. * * @return the x coordinate of this point. */ public float getX() { return x; } /** * Sets the y coordinate of this point. * * @param y the y coordinate of this point when drawn on the page. */ public void setY(double y) { this.y = (float) y; } /** * Sets the y coordinate of this point. * * @param y the y coordinate of this point when drawn on the page. */ public void setY(float y) { this.y = y; } /** * Returns the y coordinate of this point. * * @return the y coordinate of this point. */ public float getY() { return y; } /** * Sets the radius of this point. * * @param r the radius. */ public void setRadius(double r) { this.r = (float) r; } /** * Sets the radius of this point. * * @param r the radius. */ public void setRadius(float r) { this.r = r; } /** * Returns the radius of this point. * * @return the radius of this point. */ public float getRadius() { return r; } /** * Sets the shape of this point. * * @param shape the shape of this point. Supported values: * <pre> * Point.INVISIBLE * Point.CIRCLE * Point.DIAMOND * Point.BOX * Point.PLUS * Point.H_DASH * Point.V_DASH * Point.MULTIPLY * Point.STAR * Point.X_MARK * Point.UP_ARROW * Point.DOWN_ARROW * Point.LEFT_ARROW * Point.RIGHT_ARROW * </pre> */ public void setShape(int shape) { this.shape = shape; } /** * Returns the point shape code value. * * @return the shape code value. */ public int getShape() { return shape; } /** * Sets the private fillShape variable. * * @param fillShape if true - fill the point with the specified brush color. */ public void setFillShape(boolean fillShape) { this.fillShape = fillShape; } /** * Returns the value of the fillShape private variable. * * @return the value of the private fillShape variable. */ public boolean getFillShape() { return this.fillShape; } /** * Sets the pen color for this point. * * @param color the color specified as an integer. * @return the point. */ public Point setColor(int color) { this.color = color; return this; } /** * Returns the point color as an integer. * * @return the color. */ public int getColor() { return this.color; } /** * Sets the width of the lines of this point. * * @param lineWidth the line width. */ public void setLineWidth(double lineWidth) { this.lineWidth = (float) lineWidth; } /** * Sets the width of the lines of this point. * * @param lineWidth the line width. */ public void setLineWidth(float lineWidth) { this.lineWidth = lineWidth; } /** * Returns the width of the lines used to draw this point. * * @return the width of the lines used to draw this point. */ public float getLineWidth() { return lineWidth; } /** * * The line dash pattern controls the pattern of dashes and gaps used to stroke paths. * It is specified by a dash array and a dash phase. * The elements of the dash array are positive numbers that specify the lengths of * alternating dashes and gaps. * The dash phase specifies the distance into the dash pattern at which to start the dash. * The elements of both the dash array and the dash phase are expressed in user space units. * <pre> * Examples of line dash patterns: * * "[Array] Phase" Appearance Description * _______________ _________________ ____________________________________ * * "[] 0" ----------------- Solid line * "[3] 0" --- --- --- 3 units on, 3 units off, ... * "[2] 1" - -- -- -- -- 1 on, 2 off, 2 on, 2 off, ... * "[2 1] 0" -- -- -- -- -- -- 2 on, 1 off, 2 on, 1 off, ... * "[3 5] 6" --- --- 2 off, 3 on, 5 off, 3 on, 5 off, ... * "[2 3] 11" - -- -- -- 1 on, 3 off, 2 on, 3 off, 2 on, ... * </pre> * * @param linePattern the line dash pattern. */ public void setLinePattern(String linePattern) { this.linePattern = linePattern; } /** * Returns the line dash pattern. * * @return the line dash pattern. */ public String getLinePattern() { return linePattern; } /** * Sets this point as the start of a path that will be drawn on the chart. * * @return the point. */ public Point setDrawPath() { this.drawPath = true; return this; } /** * Sets the URI for the "click point" action. * * @param uri the URI */ public void setURIAction(String uri) { this.uri = uri; } /** * Returns the URI for the "click point" action. * * @return the URI for the "click point" action. */ public String getURIAction() { return uri; } /** * Sets the point text. * * @param text the text. */ public void setText(String text) { this.text = text; } /** * Returns the text associated with this point. * * @return the text. */ public String getText() { return this.text; } /** * Sets the point's text color. * * @param textColor the text color. */ public void setTextColor(int textColor) { this.textColor = textColor; } /** * Returns the point's text color. * * @return the text color. */ public int getTextColor() { return this.textColor; } /** * Sets the point's text direction. * * @param textDirection the text direction. */ public void setTextDirection(int textDirection) { this.textDirection = textDirection; } /** * Returns the point's text direction. * * @return the text direction. */ public int getTextDirection() { return this.textDirection; } /** * Sets the point alignment inside table cell. * * @param align the alignment value. */ public void setAlignment(int align) { this.align = align; } /** * Returns the point alignment. * * @return align the alignment value. */ public int getAlignment() { return this.align; } /** * Places this point in the specified box at position (0f, 0f). * * @param box the specified box. */ public void placeIn(Box box) { placeIn(box, 0f, 0f); } /** * Places this point in the specified box. * * @param box the specified box. * @param xOffset the x offset from the top left corner of the box. * @param yOffset the y offset from the top left corner of the box. */ public void placeIn( Box box, double xOffset, double yOffset) { placeIn(box, (float) xOffset, (float) yOffset); } /** * Places this point in the specified box. * * @param box the specified box. * @param xOffset the x offset from the top left corner of the box. * @param yOffset the y offset from the top left corner of the box. */ public void placeIn( Box box, float xOffset, float yOffset) { xBox = box.x + xOffset; yBox = box.y + yOffset; } /** * Draws this point on the specified page. * * @param page the page to draw this point on. * @return x and y coordinates of the bottom right corner of this component. * @throws Exception If an input or output exception occurred */ public float[] drawOn(Page page) throws Exception { page.setPenWidth(lineWidth); page.setLinePattern(linePattern); if (fillShape) { page.setBrushColor(color); } else { page.setPenColor(color); } x += xBox; y += yBox; page.drawPoint(this); x -= xBox; y -= yBox; return new float[] {x + xBox + r, y + yBox + r}; } } // End of Point.java
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.skylark; import com.google.common.base.Joiner; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Interner; import com.google.devtools.build.lib.actions.ActionKeyContext; import com.google.devtools.build.lib.actions.CommandLine; import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.actions.CommandLineItem; import com.google.devtools.build.lib.actions.SingleStringArgFormatter; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.concurrent.BlazeInterners; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter; import com.google.devtools.build.lib.syntax.BaseFunction; import com.google.devtools.build.lib.syntax.Environment; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.Mutability; import com.google.devtools.build.lib.syntax.Printer; import com.google.devtools.build.lib.syntax.Runtime; import com.google.devtools.build.lib.syntax.SkylarkList; import com.google.devtools.build.lib.syntax.SkylarkSemantics; import com.google.devtools.build.lib.util.Fingerprint; import java.util.ArrayList; import java.util.HashSet; import java.util.IllegalFormatException; import java.util.List; import java.util.UUID; import java.util.function.Consumer; import javax.annotation.Nullable; /** Supports ctx.actions.args() from Skylark. */ @AutoCodec public class SkylarkCustomCommandLine extends CommandLine { private final SkylarkSemantics skylarkSemantics; private final ImmutableList<Object> arguments; private static final Joiner LINE_JOINER = Joiner.on("\n").skipNulls(); private static final Joiner FIELD_JOINER = Joiner.on(": ").skipNulls(); @AutoCodec static final class VectorArg { private static final Interner<VectorArg> interner = BlazeInterners.newStrongInterner(); private static final int HAS_LOCATION = 1; private static final int HAS_MAP_ALL = 1 << 1; private static final int HAS_MAP_EACH = 1 << 2; private static final int IS_NESTED_SET = 1 << 3; private static final int UNIQUIFY = 1 << 4; private static final int OMIT_IF_EMPTY = 1 << 5; private static final int HAS_ARG_NAME = 1 << 6; private static final int HAS_FORMAT_EACH = 1 << 7; private static final int HAS_BEFORE_EACH = 1 << 8; private static final int HAS_JOIN_WITH = 1 << 9; private static final int HAS_FORMAT_JOINED = 1 << 10; private static final int HAS_TERMINATE_WITH = 1 << 11; private static final UUID UNIQUIFY_UUID = UUID.fromString("7f494c3e-faea-4498-a521-5d3bc6ee19eb"); private static final UUID OMIT_IF_EMPTY_UUID = UUID.fromString("923206f1-6474-4a8f-b30f-4dd3143622e6"); private static final UUID ARG_NAME_UUID = UUID.fromString("2bc00382-7199-46ec-ad52-1556577cde1a"); private static final UUID FORMAT_EACH_UUID = UUID.fromString("8e974aec-df07-4a51-9418-f4c1172b4045"); private static final UUID BEFORE_EACH_UUID = UUID.fromString("f7e101bc-644d-4277-8562-6515ad55a988"); private static final UUID JOIN_WITH_UUID = UUID.fromString("c227dbd3-edad-454e-bc8a-c9b5ba1c38a3"); private static final UUID FORMAT_JOINED_UUID = UUID.fromString("528af376-4233-4c27-be4d-b0ff24ed68db"); private static final UUID TERMINATE_WITH_UUID = UUID.fromString("a4e5e090-0dbd-4d41-899a-77cfbba58655"); private final int features; private VectorArg(int features) { this.features = features; } @AutoCodec.VisibleForSerialization @AutoCodec.Instantiator static VectorArg create(int features) { return interner.intern(new VectorArg(features)); } private static void push(ImmutableList.Builder<Object> arguments, Builder arg) { int features = 0; features |= arg.mapAll != null ? HAS_MAP_ALL : 0; features |= arg.mapEach != null ? HAS_MAP_EACH : 0; features |= arg.nestedSet != null ? IS_NESTED_SET : 0; features |= arg.uniquify ? UNIQUIFY : 0; features |= arg.omitIfEmpty ? OMIT_IF_EMPTY : 0; features |= arg.argName != null ? HAS_ARG_NAME : 0; features |= arg.formatEach != null ? HAS_FORMAT_EACH : 0; features |= arg.beforeEach != null ? HAS_BEFORE_EACH : 0; features |= arg.joinWith != null ? HAS_JOIN_WITH : 0; features |= arg.formatJoined != null ? HAS_FORMAT_JOINED : 0; features |= arg.terminateWith != null ? HAS_TERMINATE_WITH : 0; boolean hasLocation = arg.location != null && (features & (HAS_FORMAT_EACH | HAS_FORMAT_JOINED | HAS_MAP_ALL | HAS_MAP_EACH)) != 0; features |= hasLocation ? HAS_LOCATION : 0; Preconditions.checkState( (features & (HAS_MAP_ALL | HAS_MAP_EACH)) != (HAS_MAP_ALL | HAS_MAP_EACH), "Cannot use both map_all and map_each"); VectorArg vectorArg = VectorArg.create(features); arguments.add(vectorArg); if (hasLocation) { arguments.add(arg.location); } if (arg.mapAll != null) { arguments.add(arg.mapAll); } if (arg.mapEach != null) { arguments.add(arg.mapEach); } if (arg.nestedSet != null) { arguments.add(arg.nestedSet); } else { ImmutableList<?> list = arg.list.getImmutableList(); int count = list.size(); arguments.add(count); for (int i = 0; i < count; ++i) { arguments.add(list.get(i)); } } if (arg.argName != null) { arguments.add(arg.argName); } if (arg.formatEach != null) { arguments.add(arg.formatEach); } if (arg.beforeEach != null) { arguments.add(arg.beforeEach); } if (arg.joinWith != null) { arguments.add(arg.joinWith); } if (arg.formatJoined != null) { arguments.add(arg.formatJoined); } if (arg.terminateWith != null) { arguments.add(arg.terminateWith); } } private int eval( List<Object> arguments, int argi, ImmutableList.Builder<String> builder, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { final Location location = ((features & HAS_LOCATION) != 0) ? (Location) arguments.get(argi++) : null; final List<Object> originalValues; BaseFunction mapAll = ((features & HAS_MAP_ALL) != 0) ? (BaseFunction) arguments.get(argi++) : null; BaseFunction mapEach = ((features & HAS_MAP_EACH) != 0) ? (BaseFunction) arguments.get(argi++) : null; if ((features & IS_NESTED_SET) != 0) { NestedSet<Object> nestedSet = (NestedSet<Object>) arguments.get(argi++); originalValues = nestedSet.toList(); } else { int count = (Integer) arguments.get(argi++); originalValues = arguments.subList(argi, argi + count); argi += count; } List<String> stringValues; if (mapEach != null) { stringValues = new ArrayList<>(originalValues.size()); applyMapEach(mapEach, originalValues, stringValues::add, location, skylarkSemantics); } else if (mapAll != null) { Object result = applyMapFn(mapAll, originalValues, location, skylarkSemantics); if (!(result instanceof List)) { throw new CommandLineExpansionException( errorMessage( "map_fn must return a list, got " + result.getClass().getSimpleName(), location, null)); } List resultAsList = (List) result; if (resultAsList.size() != originalValues.size()) { throw new CommandLineExpansionException( errorMessage( String.format( "map_fn must return a list of the same length as the input. " + "Found list of length %d, expected %d.", resultAsList.size(), originalValues.size()), location, null)); } int count = resultAsList.size(); stringValues = new ArrayList<>(count); // map_fn contract doesn't guarantee that the values returned are strings, // so convert here for (int i = 0; i < count; ++i) { stringValues.add(CommandLineItem.expandToCommandLine(resultAsList.get(i))); } } else { int count = originalValues.size(); stringValues = new ArrayList<>(originalValues.size()); for (int i = 0; i < count; ++i) { stringValues.add(CommandLineItem.expandToCommandLine(originalValues.get(i))); } } // It's safe to uniquify at this stage, any transformations after this // will ensure continued uniqueness of the values if ((features & UNIQUIFY) != 0) { HashSet<String> seen = new HashSet<>(stringValues.size()); int count = stringValues.size(); int addIndex = 0; for (int i = 0; i < count; ++i) { String val = stringValues.get(i); if (seen.add(val)) { stringValues.set(addIndex++, val); } } stringValues = stringValues.subList(0, addIndex); } boolean isEmptyAndShouldOmit = stringValues.isEmpty() && (features & OMIT_IF_EMPTY) != 0; if ((features & HAS_ARG_NAME) != 0) { String argName = (String) arguments.get(argi++); if (!isEmptyAndShouldOmit) { builder.add(argName); } } if ((features & HAS_FORMAT_EACH) != 0) { String formatStr = (String) arguments.get(argi++); Formatter formatter = Formatter.get(location, skylarkSemantics); try { int count = stringValues.size(); for (int i = 0; i < count; ++i) { stringValues.set(i, formatter.format(formatStr, stringValues.get(i))); } } catch (IllegalFormatException e) { throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, null)); } } if ((features & HAS_BEFORE_EACH) != 0) { String beforeEach = (String) arguments.get(argi++); int count = stringValues.size(); for (int i = 0; i < count; ++i) { builder.add(beforeEach); builder.add(stringValues.get(i)); } } else if ((features & HAS_JOIN_WITH) != 0) { String joinWith = (String) arguments.get(argi++); String formatJoined = ((features & HAS_FORMAT_JOINED) != 0) ? (String) arguments.get(argi++) : null; if (!isEmptyAndShouldOmit) { String result = Joiner.on(joinWith).join(stringValues); if (formatJoined != null) { Formatter formatter = Formatter.get(location, skylarkSemantics); try { result = formatter.format(formatJoined, result); } catch (IllegalFormatException e) { throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, null)); } } builder.add(result); } } else { builder.addAll(stringValues); } if ((features & HAS_TERMINATE_WITH) != 0) { String terminateWith = (String) arguments.get(argi++); if (!isEmptyAndShouldOmit) { builder.add(terminateWith); } } return argi; } private int addToFingerprint( List<Object> arguments, int argi, ActionKeyContext actionKeyContext, Fingerprint fingerprint, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { if ((features & HAS_MAP_ALL) != 0) { return addToFingerprintLegacy(arguments, argi, fingerprint, skylarkSemantics); } final Location location = ((features & HAS_LOCATION) != 0) ? (Location) arguments.get(argi++) : null; BaseFunction mapEach = ((features & HAS_MAP_EACH) != 0) ? (BaseFunction) arguments.get(argi++) : null; if ((features & IS_NESTED_SET) != 0) { NestedSet<Object> values = (NestedSet<Object>) arguments.get(argi++); if (mapEach != null) { CommandLineItem.MapFn<Object> commandLineItemMapFn = new CommandLineItemMapEachAdaptor(mapEach, location, skylarkSemantics); try { actionKeyContext.addNestedSetToFingerprint(commandLineItemMapFn, fingerprint, values); } catch (UncheckedCommandLineExpansionException e) { // We wrap the CommandLineExpansionException below, unwrap here throw e.cause; } } else { actionKeyContext.addNestedSetToFingerprint(fingerprint, values); } } else { int count = (Integer) arguments.get(argi++); final List<Object> originalValues = arguments.subList(argi, argi + count); argi += count; if (mapEach != null) { List<String> stringValues = new ArrayList<>(count); applyMapEach(mapEach, originalValues, stringValues::add, location, skylarkSemantics); for (String s : stringValues) { fingerprint.addString(s); } } else { for (int i = 0; i < count; ++i) { fingerprint.addString(CommandLineItem.expandToCommandLine(originalValues.get(i))); } } } if ((features & UNIQUIFY) != 0) { fingerprint.addUUID(UNIQUIFY_UUID); } if ((features & OMIT_IF_EMPTY) != 0) { fingerprint.addUUID(OMIT_IF_EMPTY_UUID); } if ((features & HAS_ARG_NAME) != 0) { String argName = (String) arguments.get(argi++); fingerprint.addUUID(ARG_NAME_UUID); fingerprint.addString(argName); } if ((features & HAS_FORMAT_EACH) != 0) { String formatStr = (String) arguments.get(argi++); fingerprint.addUUID(FORMAT_EACH_UUID); fingerprint.addString(formatStr); } if ((features & HAS_BEFORE_EACH) != 0) { String beforeEach = (String) arguments.get(argi++); fingerprint.addUUID(BEFORE_EACH_UUID); fingerprint.addString(beforeEach); } else if ((features & HAS_JOIN_WITH) != 0) { String joinWith = (String) arguments.get(argi++); fingerprint.addUUID(JOIN_WITH_UUID); fingerprint.addString(joinWith); if ((features & HAS_FORMAT_JOINED) != 0) { String formatJoined = (String) arguments.get(argi++); fingerprint.addUUID(FORMAT_JOINED_UUID); fingerprint.addString(formatJoined); } } if ((features & HAS_TERMINATE_WITH) != 0) { String terminateWith = (String) arguments.get(argi++); fingerprint.addUUID(TERMINATE_WITH_UUID); fingerprint.addString(terminateWith); } return argi; } private int addToFingerprintLegacy( List<Object> arguments, int argi, Fingerprint fingerprint, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { ImmutableList.Builder<String> builder = ImmutableList.builder(); argi = eval(arguments, argi, builder, skylarkSemantics); for (String s : builder.build()) { fingerprint.addString(s); } return argi; } static class Builder { @Nullable private final SkylarkList<?> list; @Nullable private final NestedSet<?> nestedSet; private Location location; public String argName; private BaseFunction mapAll; private BaseFunction mapEach; private String formatEach; private String beforeEach; private String joinWith; private String formatJoined; private boolean omitIfEmpty; private boolean uniquify; private String terminateWith; Builder(SkylarkList<?> list) { this.list = list; this.nestedSet = null; } Builder(NestedSet<?> nestedSet) { this.list = null; this.nestedSet = nestedSet; } Builder setLocation(Location location) { this.location = location; return this; } Builder setArgName(String argName) { this.argName = argName; return this; } Builder setMapAll(BaseFunction mapAll) { this.mapAll = mapAll; return this; } Builder setMapEach(BaseFunction mapEach) { this.mapEach = mapEach; return this; } Builder setFormatEach(String format) { this.formatEach = format; return this; } Builder setBeforeEach(String beforeEach) { this.beforeEach = beforeEach; return this; } Builder setJoinWith(String joinWith) { this.joinWith = joinWith; return this; } Builder setFormatJoined(String formatJoined) { this.formatJoined = formatJoined; return this; } Builder omitIfEmpty(boolean omitIfEmpty) { this.omitIfEmpty = omitIfEmpty; return this; } Builder uniquify(boolean uniquify) { this.uniquify = uniquify; return this; } Builder setTerminateWith(String terminateWith) { this.terminateWith = terminateWith; return this; } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } VectorArg vectorArg = (VectorArg) o; return features == vectorArg.features; } @Override public int hashCode() { return Objects.hashCode(features); } } @AutoCodec static final class ScalarArg { private static final Interner<ScalarArg> interner = BlazeInterners.newStrongInterner(); private static final UUID FORMAT_UUID = UUID.fromString("8cb96642-a235-4fe0-b3ed-ebfdae8a0bd9"); private final boolean hasFormat; private final boolean hasMapFn; private final boolean hasLocation; private ScalarArg(boolean hasFormat, boolean hasMapFn, boolean hasLocation) { this.hasFormat = hasFormat; this.hasMapFn = hasMapFn; this.hasLocation = hasLocation; } @AutoCodec.VisibleForSerialization @AutoCodec.Instantiator static ScalarArg create(boolean hasFormat, boolean hasMapFn, boolean hasLocation) { return interner.intern(new ScalarArg(hasFormat, hasMapFn, hasLocation)); } private static void push(ImmutableList.Builder<Object> arguments, Builder arg) { boolean wantsLocation = arg.format != null || arg.mapFn != null; boolean hasLocation = arg.location != null && wantsLocation; ScalarArg scalarArg = ScalarArg.create(arg.format != null, arg.mapFn != null, hasLocation); arguments.add(scalarArg); arguments.add(arg.object); if (hasLocation) { arguments.add(arg.location); } if (scalarArg.hasMapFn) { arguments.add(arg.mapFn); } if (scalarArg.hasFormat) { arguments.add(arg.format); } } private int eval( List<Object> arguments, int argi, ImmutableList.Builder<String> builder, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { Object object = arguments.get(argi++); final Location location = hasLocation ? (Location) arguments.get(argi++) : null; if (hasMapFn) { BaseFunction mapFn = (BaseFunction) arguments.get(argi++); object = applyMapFn(mapFn, object, location, skylarkSemantics); } String stringValue = CommandLineItem.expandToCommandLine(object); if (hasFormat) { String formatStr = (String) arguments.get(argi++); Formatter formatter = Formatter.get(location, skylarkSemantics); stringValue = formatter.format(formatStr, stringValue); } builder.add(stringValue); return argi; } private int addToFingerprint( List<Object> arguments, int argi, Fingerprint fingerprint, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { if (hasMapFn) { return addToFingerprintLegacy(arguments, argi, fingerprint, skylarkSemantics); } Object object = arguments.get(argi++); String stringValue = CommandLineItem.expandToCommandLine(object); fingerprint.addString(stringValue); if (hasLocation) { argi++; // Skip past location slot } if (hasFormat) { String formatStr = (String) arguments.get(argi++); fingerprint.addUUID(FORMAT_UUID); fingerprint.addString(formatStr); } return argi; } private int addToFingerprintLegacy( List<Object> arguments, int argi, Fingerprint fingerprint, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { ImmutableList.Builder<String> builder = ImmutableList.builderWithExpectedSize(1); argi = eval(arguments, argi, builder, skylarkSemantics); for (String s : builder.build()) { fingerprint.addString(s); } return argi; } static class Builder { private Object object; private String format; private BaseFunction mapFn; private Location location; Builder(Object object) { this.object = object; } Builder setLocation(Location location) { this.location = location; return this; } Builder setFormat(String format) { this.format = format; return this; } Builder setMapFn(BaseFunction mapFn) { this.mapFn = mapFn; return this; } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ScalarArg scalarArg = (ScalarArg) o; return hasFormat == scalarArg.hasFormat && hasMapFn == scalarArg.hasMapFn && hasLocation == scalarArg.hasLocation; } @Override public int hashCode() { return Objects.hashCode(hasFormat, hasMapFn, hasLocation); } } static class Builder { private final SkylarkSemantics skylarkSemantics; private final ImmutableList.Builder<Object> arguments = ImmutableList.builder(); public Builder(SkylarkSemantics skylarkSemantics) { this.skylarkSemantics = skylarkSemantics; } Builder add(Object object) { arguments.add(object); return this; } Builder add(VectorArg.Builder vectorArg) { VectorArg.push(arguments, vectorArg); return this; } Builder add(ScalarArg.Builder scalarArg) { ScalarArg.push(arguments, scalarArg); return this; } SkylarkCustomCommandLine build() { return new SkylarkCustomCommandLine(skylarkSemantics, arguments.build()); } } @AutoCodec.VisibleForSerialization @AutoCodec.Instantiator SkylarkCustomCommandLine(SkylarkSemantics skylarkSemantics, ImmutableList<Object> arguments) { this.arguments = arguments; this.skylarkSemantics = skylarkSemantics; } @Override public Iterable<String> arguments() throws CommandLineExpansionException { ImmutableList.Builder<String> result = ImmutableList.builder(); for (int argi = 0; argi < arguments.size(); ) { Object arg = arguments.get(argi++); if (arg instanceof VectorArg) { argi = ((VectorArg) arg).eval(arguments, argi, result, skylarkSemantics); } else if (arg instanceof ScalarArg) { argi = ((ScalarArg) arg).eval(arguments, argi, result, skylarkSemantics); } else { result.add(CommandLineItem.expandToCommandLine(arg)); } } return result.build(); } @Override public void addToFingerprint(ActionKeyContext actionKeyContext, Fingerprint fingerprint) throws CommandLineExpansionException { for (int argi = 0; argi < arguments.size(); ) { Object arg = arguments.get(argi++); if (arg instanceof VectorArg) { argi = ((VectorArg) arg) .addToFingerprint(arguments, argi, actionKeyContext, fingerprint, skylarkSemantics); } else if (arg instanceof ScalarArg) { argi = ((ScalarArg) arg).addToFingerprint(arguments, argi, fingerprint, skylarkSemantics); } else { fingerprint.addString(CommandLineItem.expandToCommandLine(arg)); } } } private interface Formatter { String format(String formatStr, String subject) throws CommandLineExpansionException; static Formatter get(Location location, SkylarkSemantics skylarkSemantics) { return skylarkSemantics.incompatibleDisallowOldStyleArgsAdd() ? SingleStringArgFormatter::format : new LegacyFormatter(location); } } private static class LegacyFormatter implements Formatter { @Nullable private final Location location; private final ArrayList<Object> args; public LegacyFormatter(Location location) { this.location = location; this.args = new ArrayList<>(1); // Reused arg list to reduce GC this.args.add(null); } @Override public String format(String formatStr, String subject) throws CommandLineExpansionException { try { args.set(0, subject); SkylarkPrinter printer = Printer.getPrinter(); return printer.formatWithList(formatStr, args).toString(); } catch (IllegalFormatException e) { throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, null)); } } } private static Object applyMapFn( BaseFunction mapFn, Object arg, Location location, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { ImmutableList<Object> args = ImmutableList.of(arg); try (Mutability mutability = Mutability.create("map_fn")) { Environment env = Environment.builder(mutability) .setSemantics(skylarkSemantics) .setEventHandler(NullEventHandler.INSTANCE) .build(); return mapFn.call(args, ImmutableMap.of(), null, env); } catch (EvalException e) { throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, e.getCause())); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new CommandLineExpansionException( errorMessage("Thread was interrupted", location, null)); } } private static void applyMapEach( BaseFunction mapFn, List<Object> originalValues, Consumer<String> consumer, Location location, SkylarkSemantics skylarkSemantics) throws CommandLineExpansionException { try (Mutability mutability = Mutability.create("map_each")) { Environment env = Environment.builder(mutability) .setSemantics(skylarkSemantics) // TODO(b/77140311): Error if we issue print statements .setEventHandler(NullEventHandler.INSTANCE) .build(); Object[] args = new Object[1]; int count = originalValues.size(); for (int i = 0; i < count; ++i) { args[0] = originalValues.get(i); Object ret = mapFn.callWithArgArray(args, null, env, location); if (ret instanceof String) { consumer.accept((String) ret); } else if (ret instanceof SkylarkList) { for (Object val : ((SkylarkList) ret)) { if (!(val instanceof String)) { throw new CommandLineExpansionException( "Expected map_each to return string, None, or list of strings, " + "found list containing " + val.getClass().getSimpleName()); } consumer.accept((String) val); } } else if (ret != Runtime.NONE) { throw new CommandLineExpansionException( "Expected map_each to return string, None, or list of strings, found " + ret.getClass().getSimpleName()); } } } catch (EvalException e) { throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, e.getCause())); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new CommandLineExpansionException( errorMessage("Thread was interrupted", location, null)); } } private static class CommandLineItemMapEachAdaptor extends CommandLineItem.ParametrizedMapFn<Object> { private final BaseFunction mapFn; private final Location location; private final SkylarkSemantics skylarkSemantics; CommandLineItemMapEachAdaptor( BaseFunction mapFn, Location location, SkylarkSemantics skylarkSemantics) { this.mapFn = mapFn; this.location = location; this.skylarkSemantics = skylarkSemantics; } @Override public void expandToCommandLine(Object object, Consumer<String> args) { try { applyMapEach(mapFn, ImmutableList.of(object), args, location, skylarkSemantics); } catch (CommandLineExpansionException e) { // Rather than update CommandLineItem#expandToCommandLine and the numerous callers, // we wrap this in a runtime exception and handle it above throw new UncheckedCommandLineExpansionException(e); } } @Override public boolean equals(Object obj) { if (!(obj instanceof CommandLineItemMapEachAdaptor)) { return false; } CommandLineItemMapEachAdaptor other = (CommandLineItemMapEachAdaptor) obj; // Instance compare intentional // The normal implementation uses location + name of function, // which can conceivably conflict in tests return mapFn == other.mapFn; } @Override public int hashCode() { // identity hashcode intentional return System.identityHashCode(mapFn); } @Override public int maxInstancesAllowed() { // No limit to these, as this is just a wrapper for Skylark functions, which are // always static return Integer.MAX_VALUE; } } private static String errorMessage( String message, @Nullable Location location, @Nullable Throwable cause) { return LINE_JOINER.join( "\n", FIELD_JOINER.join(location, message), getCauseMessage(cause, message)); } private static String getCauseMessage(@Nullable Throwable cause, String message) { if (cause == null) { return null; } String causeMessage = cause.getMessage(); if (causeMessage == null) { return null; } if (message == null) { return causeMessage; } // Skip the cause if it is redundant with the message so far. if (message.contains(causeMessage)) { return null; } return causeMessage; } private static class UncheckedCommandLineExpansionException extends RuntimeException { final CommandLineExpansionException cause; UncheckedCommandLineExpansionException(CommandLineExpansionException cause) { this.cause = cause; } } }
/* Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved Apache 2.0 License Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. */ package com.microsoft.windowsazure.mobileservices.zumoe2etestapp.framework; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.Hashtable; import java.util.List; import java.util.Locale; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import java.util.TimeZone; import org.apache.http.Header; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import com.microsoft.windowsazure.mobileservices.MobileServiceClient; import com.microsoft.windowsazure.mobileservices.http.ServiceFilterResponse; import com.microsoft.windowsazure.mobileservices.table.serialization.DateSerializer; public class Util { public interface IPredicate<T> { boolean evaluate(T type); } public final static String LogTimeFormat = "yyyy-MM-dd HH:mm:ss'.'SSS"; private final static Hashtable<String, String> globalTestParameters = new Hashtable<String, String>(); public static Hashtable<String, String> getGlobalTestParameters() { return globalTestParameters; } public static String createComplexRandomString(Random rndGen, int size) { if (rndGen.nextInt(3) > 0) { return createSimpleRandomString(rndGen, size); } else { return createSimpleRandomString(rndGen, size, ' ', 0xfffe); } } public static String createSimpleRandomString(Random rndGen, int size) { int minChar = ' '; int maxChar = '~'; return createSimpleRandomString(rndGen, size, minChar, maxChar); } public static String createSimpleRandomString(Random rndGen, int size, int minChar, int maxChar) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < size; i++) { int charRand; char c; do { charRand = rndGen.nextInt(maxChar - minChar); c = (char) (minChar + charRand); } while (Character.isLowSurrogate(c) || Character.isHighSurrogate(c)); sb.append(c); } return sb.toString(); } public static <E> boolean compareLists(List<E> l1, List<E> l2) { return compareArrays(l1.toArray(), l2.toArray()); } public static boolean compareArrays(Object[] arr1, Object[] arr2) { if (arr1 == null && arr2 == null) { return true; } if (arr1 == null || arr2 == null) { return false; } if (arr1.length != arr2.length) { return false; } for (int i = 0; i < arr1.length; i++) { Object o1 = arr1[i]; Object o2 = arr2[i]; if (!compare(o1, o2)) { return false; } } return true; } public static <E> String listToString(List<E> list) { return arrayToString(list.toArray()); } public static String arrayToString(Object[] arr) { if (arr == null) { return "<<NULL>>"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < arr.length; i++) { Object elem = arr[i]; sb.append(elem.toString()); if (i != arr.length - 1) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static String dateToString(Date date) { return dateToString(date, "yyyy-MM-dd'T'HH:mm:ss'.'SSS'Z'"); } public static String dateToString(Date date, String dateFormatStr) { if (date == null) { return "NULL"; } SimpleDateFormat dateFormat = new SimpleDateFormat(dateFormatStr, Locale.getDefault()); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); String formatted = dateFormat.format(date); return formatted; } public static boolean compare(Object o1, Object o2) { if (o1 == null && o2 == null) { return true; } if (o1 == null || o2 == null) { return false; } return o1.equals(o2); } public static TestCase createSeparatorTest(String testName) { return new TestCase(testName) { @Override protected void executeTest(MobileServiceClient client, TestExecutionCallback callback) { TestResult testResult = new TestResult(); testResult.setTestCase(this); testResult.setStatus(TestStatus.Passed); callback.onTestComplete(this, testResult); } }; } public static boolean compareJson(JsonElement e1, JsonElement e2) { // NOTE: if every property defined in e1 is in e2, the objects are // considered equal. if (e1 == null && e2 == null) { return true; } if (e1 == null || e2 == null) { return false; } if (e1.getClass() != e2.getClass()) { return false; } if (e1 instanceof JsonPrimitive) { JsonPrimitive p1 = (JsonPrimitive) e1; JsonPrimitive p2 = (JsonPrimitive) e2; if (p1.isString()) { try { Date d1 = DateSerializer.deserialize(p1.getAsString()); Date d2 = DateSerializer.deserialize(p2.getAsString()); if (!d1.equals(d2) && !e1.equals(e2)) { return false; } } catch (Throwable t) { if (!e1.equals(e2)) { return false; } } } else if (!e1.equals(e2)) { return false; } } else if (e1 instanceof JsonArray) { JsonArray a1 = (JsonArray) e1; JsonArray a2 = (JsonArray) e2; if (a1.size() != a2.size()) { return false; } for (int i = 0; i < a1.size(); i++) { if (!compareJson(a1.get(i), a2.get(i))) { return false; } } } else if (e1 instanceof JsonObject) { JsonObject o1 = (JsonObject) e1; JsonObject o2 = (JsonObject) e2; Set<Entry<String, JsonElement>> entrySet1 = o1.entrySet(); for (Entry<String, JsonElement> entry : entrySet1) { if (entry.getKey().toLowerCase(Locale.getDefault()).equals("id")) { continue; } String propertyName1 = entry.getKey(); String propertyName2 = null; for (Entry<String, JsonElement> entry2 : o2.entrySet()) { if (propertyName1.toLowerCase(Locale.getDefault()).equals(entry2.getKey().toLowerCase(Locale.getDefault()))) { propertyName2 = entry2.getKey(); } } if (propertyName2 == null) { return false; } if (!compareJson(entry.getValue(), o2.get(propertyName2))) { return false; } } } return true; } public static Date getUTCNow() { return new GregorianCalendar(TimeZone.getTimeZone("utc")).getTime(); } public static Date getUTCDate(int year, int month, int day) { return getUTCDate(year, month, day, 0, 0, 0); } public static Date getUTCDate(int year, int month, int day, int hour, int minute, int second) { GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("utc")); int dateMonth = month - 1; calendar.set(year, dateMonth, day, hour, minute, second); calendar.set(Calendar.MILLISECOND, 0); return calendar.getTime(); } public static Calendar getUTCCalendar(Date date) { Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("utc"), Locale.getDefault()); cal.setTime(date); return cal; } public static boolean responseContainsHeader(ServiceFilterResponse response, String headerName) { for (Header header : response.getHeaders()) { if (header.getName().equals(headerName)) { return true; } } return false; } public static String getHeaderValue(ServiceFilterResponse response, String headerName) { for (Header header : response.getHeaders()) { if (header.getName().equals(headerName)) { return header.getValue(); } } return null; } public static <T> List<T> filter(List<T> list, IPredicate<T> predicate) { if (list == null) { return null; } else { List<T> filteredList = new ArrayList<T>(); for (T element : list) { if (predicate.evaluate(element)) { filteredList.add(element); } } return filteredList; } } }
/* This class is a type of user This class implements type AccountHolder */ package DataSource; import java.sql.Connection; import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; public class Admin implements AccountHolder{ private String loginID,workingID; private String passWord; private String first; private String last; private String middle; private String gender; private String branch; private String department; private String phone; Date createDate, timeStamp; private String email; private java.sql.Timestamp date = new java.sql.Timestamp(new java.util.Date().getTime()); private ArrayList<Account> accounts=new ArrayList<Account>(); //accessors and mutator @Override public String getLoginID() { return loginID; } @Override public void setLoginID(String input) { loginID = input; } @Override public String getGender() { return gender; } @Override public void setGender(String input) { gender = input; } @Override public String getPhone() { return phone; } @Override public void setPhone(String input) { phone = input; } @Override public String getPassword() { return passWord; } @Override public void setPassword(String input) { passWord = input; } @Override public Date getCreateDate() { return createDate; } @Override public void setcreateDate(Date date) { createDate =date; } @Override public Date getTimeStamp() { return timeStamp; } @Override public void setTimeStamp(Date date) { timeStamp = date; } @Override public void setAccounts(ArrayList<Account> accounts) { this.accounts =accounts; } @Override public ArrayList<Account> getAccounts() { return accounts; } public String getBranch() { return branch; } public void setBranch(String input) { branch = input; } public String getDepartment() { return branch; } public void setDepartment(String input) { department = input; } public String getWorkingID() { return workingID; } public void setWorkingID(String input) { workingID = input; } @Override public String getFirst() { return first; } @Override public void setFirst(String input) { first = input; } @Override public String getMiddle() { return middle; } @Override public void setMiddle(String input) { middle = input; } @Override public String getLast() { return null; } @Override public void setLast(String input) { last = input; } public void accountsCleaner(){ accounts.clear(); } @Override public String getEmail() { return email; } @Override public void setEmail(String input) { email = input; } //update database of user with current LoginID and password with current fields @Override public boolean update(Connection conn) { String query = "UPDATE Admin SET FirstName = ?, MiddleName = ?, LastName=?, Password= ?,TimeStamp=?,Gender=?,Branch=?,Department=?,Phone=?,WorkingID=?,WHERE LoginID = ? AND Password =? "; PreparedStatement preparedStmt; try { preparedStmt = conn.prepareStatement(query); preparedStmt.setString (1, this.getFirst()); preparedStmt.setString (2, this.getMiddle()); preparedStmt.setString (3, this.getLast()); preparedStmt.setString(4,this.getPassword()); preparedStmt.setTimestamp(5, date); preparedStmt.setTimestamp(6, date); preparedStmt.setString(7, this.getGender()); preparedStmt.setString(8, this.getBranch()); preparedStmt.setString(9,this.getDepartment()); preparedStmt.setString(10,this.getPhone()); preparedStmt.setString(11, this.getWorkingID()); preparedStmt.execute(); preparedStmt.close(); } catch (SQLException e) { System.err.println("Login ID has been registered"); return false; } return true; } //create new Admin user in database with current fields @Override public boolean create(Connection conn) { String query = " insert into Admin (FirstName,MiddleName, LastName, LoginID,Password,CreateDate,TimeStamp,Gender,Branch,Department,Phone,WorkingID,Email)" + " values (?, ?, ?, ?, ?,?, ?,?,?, ?, ?, ?,?)"; PreparedStatement preparedStmt; try { preparedStmt = conn.prepareStatement(query); preparedStmt.setString (1, this.getFirst()); preparedStmt.setString (2, this.getMiddle()); preparedStmt.setString (3, this.getLast()); preparedStmt.setString(4, this.getLoginID()); preparedStmt.setString(5,this.getPassword()); preparedStmt.setTimestamp(6, date); preparedStmt.setTimestamp(7, date); preparedStmt.setString(8, this.getGender()); preparedStmt.setString(9, this.getBranch()); preparedStmt.setString(10,this.getDepartment()); preparedStmt.setString(11,this.getPhone()); preparedStmt.setString(12, this.getWorkingID()); preparedStmt.setString(13, this.getEmail()); preparedStmt.execute(); preparedStmt.close(); } catch (SQLException e) { System.err.println("Login ID has been registered"); return false; } return true; } //get account details from database @Override public boolean view(Connection conn) { String query = "select * from Admin where LoginID = ? and Password =?"; PreparedStatement statement; try { statement = conn.prepareStatement(query); statement.setString(1, this.getLoginID()); statement.setString(2, this.getPassword()); ResultSet table = statement.executeQuery(); if(table.next()){ this.setFirst( table.getString("FirstName")); this.setMiddle( table.getString("MiddleName")); this.setLast( table.getString("LastName")); this.setLoginID( table.getString("LoginID")); this.setPassword( table.getString("Password")); this.setcreateDate( table.getDate("CreateDate")); this.setTimeStamp(table.getDate("TimeStamp")); this.setGender(table.getString("Gender")); this.setDepartment( table.getString("Department")); this.setPhone( table.getString("Phone")); this.setBranch (table.getString("Branch")); this.setWorkingID( table.getString("WorkingID")); this.setEmail( table.getString("Email")); System.out.println("1"); return true; } else{ System.out.println("2"); return false; } }catch (SQLException e) { System.out.println("3"); e.printStackTrace(); return false; } } //get user details from database public int viewAccount(Connection conn){ String query="select * from Account "; PreparedStatement statement; try { statement = conn.prepareStatement(query); ResultSet table = statement.executeQuery(); while(table.next()){ Account account = null; if(table.getInt("AccountType") ==1){ account = new Credit(); } else if ( table.getInt("AccountType") ==2){ account = new Debit(); } account.setAccountType(table.getInt("AccountType")); account.setLoginID(table.getString("UserLoginID")); account.setAccountNo (table.getInt("AccountNo")); account.setAccountStauts(table.getInt("AccountStatus")); account.setBalance( table.getDouble("Balance")); account.setcreateDate( table.getDate("OpenDate")); account.setTimeStamp ( table.getDate("timeStamp")); accounts.add(account); } }catch (SQLException e) { e.printStackTrace(); return 0; } return accounts.size(); } }
/* * Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. */ package com.intellij.openapi.wm.impl; import com.intellij.ide.actions.ContextHelpAction; import com.intellij.ide.actions.ResizeToolWindowAction; import com.intellij.ide.actions.ToggleToolbarAction; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Queryable; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.*; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.ui.UIBundle; import com.intellij.ui.components.panels.NonOpaquePanel; import com.intellij.ui.content.Content; import com.intellij.util.EventDispatcher; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.accessibility.AccessibleContext; import javax.swing.*; import javax.swing.border.Border; import java.awt.*; import java.awt.event.*; import java.util.Map; /** * @author Eugene Belyaev * @author Vladimir Kondratyev */ public final class InternalDecorator extends JPanel implements Queryable, DataProvider { private Project myProject; private WindowInfoImpl myInfo; private final ToolWindowImpl myToolWindow; private final MyDivider myDivider; private final EventDispatcher<InternalDecoratorListener> myDispatcher = EventDispatcher.create(InternalDecoratorListener.class); /* * Actions */ private final TogglePinnedModeAction myToggleAutoHideModeAction; private final ToggleDockModeAction myToggleDockModeAction; private final ToggleFloatingModeAction myToggleFloatingModeAction; private final ToggleWindowedModeAction myToggleWindowedModeAction; private final ToggleSideModeAction myToggleSideModeAction; private final ToggleContentUiTypeAction myToggleContentUiTypeAction; private final RemoveStripeButtonAction myHideStripeButtonAction; private ActionGroup myAdditionalGearActions; /** * Catches all event from tool window and modifies decorator's appearance. */ @NonNls static final String HIDE_ACTIVE_WINDOW_ACTION_ID = "HideActiveWindow"; @NonNls public static final String TOGGLE_PINNED_MODE_ACTION_ID = "TogglePinnedMode"; @NonNls public static final String TOGGLE_DOCK_MODE_ACTION_ID = "ToggleDockMode"; @NonNls public static final String TOGGLE_FLOATING_MODE_ACTION_ID = "ToggleFloatingMode"; @NonNls public static final String TOGGLE_WINDOWED_MODE_ACTION_ID = "ToggleWindowedMode"; @NonNls public static final String TOGGLE_SIDE_MODE_ACTION_ID = "ToggleSideMode"; @NonNls private static final String TOGGLE_CONTENT_UI_TYPE_ACTION_ID = "ToggleContentUiTypeMode"; private ToolWindowHeader myHeader; private ActionGroup myToggleToolbarGroup; InternalDecorator(final Project project, @NotNull WindowInfoImpl info, final ToolWindowImpl toolWindow, boolean dumbAware) { super(new BorderLayout()); myProject = project; myToolWindow = toolWindow; myToolWindow.setDecorator(this); myDivider = new MyDivider(); myToggleFloatingModeAction = new ToggleFloatingModeAction(); myToggleWindowedModeAction = new ToggleWindowedModeAction(); myToggleSideModeAction = new ToggleSideModeAction(); myToggleDockModeAction = new ToggleDockModeAction(); myToggleAutoHideModeAction = new TogglePinnedModeAction(); myToggleContentUiTypeAction = new ToggleContentUiTypeAction(); myHideStripeButtonAction = new RemoveStripeButtonAction(); myToggleToolbarGroup = ToggleToolbarAction.createToggleToolbarGroup(myProject, myToolWindow); setFocusable(false); setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); myHeader = new ToolWindowHeader(toolWindow, info, () -> { return /*createGearPopupGroup()*/createPopupGroup(true); }) { @Override protected boolean isActive() { return myToolWindow.isActive(); } @Override protected void hideToolWindow() { fireHidden(); } @Override protected void toolWindowTypeChanged(@NotNull ToolWindowType type) { fireTypeChanged(type); } @Override protected void sideHidden() { fireHiddenSide(); } }; init(dumbAware); apply(info); } @Override public String toString() { return myToolWindow.getId(); } public boolean isFocused() { IdeFocusManager fm = IdeFocusManager.getInstance(myProject); Component component = fm.getFocusedDescendantFor(myToolWindow.getComponent()); if (component != null) return true; Component owner = fm.getLastFocusedFor(WindowManager.getInstance().getIdeFrame(myProject)); return owner != null && SwingUtilities.isDescendingFrom(owner, myToolWindow.getComponent()); } /** * Applies specified decoration. */ public final void apply(@NotNull WindowInfoImpl info) { if (Comparing.equal(myInfo, info) || myProject == null || myProject.isDisposed()) { return; } myInfo = info; // Anchor final ToolWindowAnchor anchor = myInfo.getAnchor(); if (info.isSliding()) { myDivider.invalidate(); if (ToolWindowAnchor.TOP == anchor) { add(myDivider, BorderLayout.SOUTH); } else if (ToolWindowAnchor.LEFT == anchor) { add(myDivider, BorderLayout.EAST); } else if (ToolWindowAnchor.BOTTOM == anchor) { add(myDivider, BorderLayout.NORTH); } else if (ToolWindowAnchor.RIGHT == anchor) { add(myDivider, BorderLayout.WEST); } myDivider.setPreferredSize(new Dimension(0, 0)); } else { // docked and floating windows don't have divider remove(myDivider); } validate(); repaint(); // Push "apply" request forward if (myInfo.isFloating() && myInfo.isVisible()) { final FloatingDecorator floatingDecorator = (FloatingDecorator)SwingUtilities.getAncestorOfClass(FloatingDecorator.class, this); if (floatingDecorator != null) { floatingDecorator.apply(myInfo); } } myToolWindow.getContentUI().setType(myInfo.getContentUiType()); setBorder(new InnerPanelBorder(myToolWindow)); } @Nullable @Override public Object getData(@NonNls String dataId) { if (PlatformDataKeys.TOOL_WINDOW.is(dataId)) { return myToolWindow; } return null; } final void addInternalDecoratorListener(InternalDecoratorListener l) { myDispatcher.addListener(l); } final void removeInternalDecoratorListener(InternalDecoratorListener l) { myDispatcher.removeListener(l); } final void dispose() { removeAll(); Disposer.dispose(myHeader); myHeader = null; myProject = null; } private void fireAnchorChanged(@NotNull ToolWindowAnchor anchor) { myDispatcher.getMulticaster().anchorChanged(this, anchor); } private void fireAutoHideChanged(boolean autoHide) { myDispatcher.getMulticaster().autoHideChanged(this, autoHide); } /** * Fires event that "hide" button has been pressed. */ final void fireHidden() { myDispatcher.getMulticaster().hidden(this); } /** * Fires event that "hide" button has been pressed. */ final void fireHiddenSide() { myDispatcher.getMulticaster().hiddenSide(this); } /** * Fires event that user performed click into the title bar area. */ final void fireActivated() { myDispatcher.getMulticaster().activated(this); } private void fireTypeChanged(@NotNull ToolWindowType type) { myDispatcher.getMulticaster().typeChanged(this, type); } final void fireResized() { myDispatcher.getMulticaster().resized(this); } private void fireSideStatusChanged(boolean isSide) { myDispatcher.getMulticaster().sideStatusChanged(this, isSide); } private void fireContentUiTypeChanges(@NotNull ToolWindowContentUiType type) { myDispatcher.getMulticaster().contentUiTypeChanges(this, type); } private void fireVisibleOnPanelChanged(final boolean visibleOnPanel) { myDispatcher.getMulticaster().visibleStripeButtonChanged(this, visibleOnPanel); } private void init(boolean dumbAware) { enableEvents(AWTEvent.COMPONENT_EVENT_MASK); final JPanel contentPane = new JPanel(new BorderLayout()); contentPane.add(myHeader, BorderLayout.NORTH); JPanel innerPanel = new JPanel(new BorderLayout()); JComponent toolWindowComponent = myToolWindow.getComponent(); if (!dumbAware) { toolWindowComponent = DumbService.getInstance(myProject).wrapGently(toolWindowComponent, myProject); } innerPanel.add(toolWindowComponent, BorderLayout.CENTER); final NonOpaquePanel inner = new NonOpaquePanel(innerPanel); contentPane.add(inner, BorderLayout.CENTER); add(contentPane, BorderLayout.CENTER); if (SystemInfo.isMac) { setBackground(new JBColor(Gray._200, Gray._90)); } // Add listeners registerKeyboardAction(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { ToolWindowManager.getInstance(myProject).activateEditorComponent(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); } public void setTitleActions(AnAction[] actions) { myHeader.setAdditionalTitleActions(actions); } private class InnerPanelBorder implements Border { private final ToolWindow myWindow; private InnerPanelBorder(ToolWindow window) { myWindow = window; } @Override public void paintBorder(final Component c, final Graphics g, final int x, final int y, final int width, final int height) { if (UIUtil.isUnderDarcula()) { g.setColor(Gray._40); doPaintBorder(c, g, x, y, width, height); } else { g.setColor(SystemInfo.isMac && UIUtil.isUnderIntelliJLaF() ? Gray.xC9 : Gray._155); doPaintBorder(c, g, x, y, width, height); } } private void doPaintBorder(Component c, Graphics g, int x, int y, int width, int height) { Insets insets = getBorderInsets(c); if (insets.top > 0) { UIUtil.drawLine(g, x, y + insets.top - 1, x + width - 1, y + insets.top - 1); UIUtil.drawLine(g, x, y + insets.top, x + width - 1, y + insets.top); } if (insets.left > 0) { UIUtil.drawLine(g, x, y, x, y + height); UIUtil.drawLine(g, x + 1, y, x + 1, y + height); } if (insets.right > 0) { UIUtil.drawLine(g, x + width - 1, y + insets.top, x + width - 1, y + height); UIUtil.drawLine(g, x + width, y + insets.top, x + width, y + height); } if (insets.bottom > 0) { UIUtil.drawLine(g, x, y + height - 1, x + width, y + height - 1); UIUtil.drawLine(g, x, y + height, x + width, y + height); } } @Override public Insets getBorderInsets(final Component c) { if (myProject == null) return new Insets(0, 0, 0, 0); ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject); if (!(toolWindowManager instanceof ToolWindowManagerImpl) || !((ToolWindowManagerImpl)toolWindowManager).isToolWindowRegistered(myInfo.getId()) || myWindow.getType() == ToolWindowType.FLOATING || myWindow.getType() == ToolWindowType.WINDOWED) { return new Insets(0, 0, 0, 0); } ToolWindowAnchor anchor = myWindow.getAnchor(); Component component = myWindow.getComponent(); Container parent = component.getParent(); boolean isSplitter = false; boolean isFirstInSplitter = false; boolean isVerticalSplitter = false; while(parent != null) { if (parent instanceof Splitter) { Splitter splitter = (Splitter)parent; isSplitter = true; isFirstInSplitter = splitter.getFirstComponent() == component; isVerticalSplitter = splitter.isVertical(); break; } component = parent; parent = component.getParent(); } int top = isSplitter && (anchor == ToolWindowAnchor.RIGHT || anchor == ToolWindowAnchor.LEFT) && myInfo.isSplit() && isVerticalSplitter ? -1 : 0; int left = anchor == ToolWindowAnchor.RIGHT && (!isSplitter || isVerticalSplitter || isFirstInSplitter) ? 1 : 0; int bottom = 0; int right = anchor == ToolWindowAnchor.LEFT && (!isSplitter || isVerticalSplitter || !isFirstInSplitter) ? 1 : 0; return new Insets(top, left, bottom, right); } @Override public boolean isBorderOpaque() { return false; } } public final ActionGroup createPopupGroup() { return createPopupGroup(false); } public final ActionGroup createPopupGroup(boolean skipHideAction) { final DefaultActionGroup group = createGearPopupGroup(); if (!ToolWindowId.PREVIEW.equals(myInfo.getId())) { group.add(myToggleContentUiTypeAction); } final DefaultActionGroup moveGroup = new DefaultActionGroup(UIBundle.message("tool.window.move.to.action.group.name"), true); final ToolWindowAnchor anchor = myInfo.getAnchor(); if (anchor != ToolWindowAnchor.TOP) { final AnAction topAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.top.action.name"), ToolWindowAnchor.TOP); moveGroup.add(topAction); } if (anchor != ToolWindowAnchor.LEFT) { final AnAction leftAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.left.action.name"), ToolWindowAnchor.LEFT); moveGroup.add(leftAction); } if (anchor != ToolWindowAnchor.BOTTOM) { final AnAction bottomAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.bottom.action.name"), ToolWindowAnchor.BOTTOM); moveGroup.add(bottomAction); } if (anchor != ToolWindowAnchor.RIGHT) { final AnAction rightAction = new ChangeAnchorAction(UIBundle.message("tool.window.move.to.right.action.name"), ToolWindowAnchor.RIGHT); moveGroup.add(rightAction); } group.add(moveGroup); DefaultActionGroup resize = new DefaultActionGroup(ActionsBundle.groupText("ResizeToolWindowGroup"), true); resize.add(new ResizeToolWindowAction.Left(myToolWindow, this)); resize.add(new ResizeToolWindowAction.Right(myToolWindow, this)); resize.add(new ResizeToolWindowAction.Up(myToolWindow, this)); resize.add(new ResizeToolWindowAction.Down(myToolWindow, this)); resize.add(ActionManager.getInstance().getAction("MaximizeToolWindow")); group.add(resize); if (!skipHideAction) { group.addSeparator(); group.add(new HideAction()); } group.addSeparator(); group.add(new ContextHelpAction() { @Nullable @Override protected String getHelpId(DataContext dataContext) { Content content = myToolWindow.getContentManager().getSelectedContent(); if (content != null) { String helpId = content.getHelpId(); if (helpId != null) { return helpId; } } return myToolWindow.getHelpId(); } @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setEnabledAndVisible(getHelpId(e.getDataContext()) != null); } }); return group; } private DefaultActionGroup createGearPopupGroup() { final DefaultActionGroup group = new DefaultActionGroup(); if (myAdditionalGearActions != null) { addSorted(group, myAdditionalGearActions); group.addSeparator(); } group.addAction(myToggleToolbarGroup).setAsSecondary(true); if (myInfo.isDocked()) { group.add(myToggleAutoHideModeAction); group.add(myToggleDockModeAction); group.add(myToggleFloatingModeAction); group.add(myToggleWindowedModeAction); group.add(myToggleSideModeAction); } else if (myInfo.isFloating()) { group.add(myToggleAutoHideModeAction); group.add(myToggleFloatingModeAction); group.add(myToggleWindowedModeAction); } else if (myInfo.isWindowed()) { group.add(myToggleFloatingModeAction); group.add(myToggleWindowedModeAction); } else if (myInfo.isSliding()) { if (!ToolWindowId.PREVIEW.equals(myInfo.getId())) { group.add(myToggleDockModeAction); } group.add(myToggleFloatingModeAction); group.add(myToggleWindowedModeAction); group.add(myToggleSideModeAction); } group.add(myHideStripeButtonAction); return group; } private static void addSorted(DefaultActionGroup main, ActionGroup group) { final AnAction[] children = group.getChildren(null); boolean hadSecondary = false; for (AnAction action : children) { if (group.isPrimary(action)) { main.add(action); } else { hadSecondary = true; } } if (hadSecondary) { main.addSeparator(); for (AnAction action : children) { if (!group.isPrimary(action)) { main.addAction(action).setAsSecondary(true); } } } String separatorText = group.getTemplatePresentation().getText(); if (children.length > 0 && !StringUtil.isEmpty(separatorText)) { main.addAction(new Separator(separatorText), Constraints.FIRST); } } /** * @return tool window associated with the decorator. */ final ToolWindowImpl getToolWindow() { return myToolWindow; } /** * @return last window info applied to the decorator. */ @NotNull final WindowInfoImpl getWindowInfo() { return myInfo; } public int getHeaderHeight() { return myHeader.getPreferredSize().height; } public void setHeaderVisible(boolean value) { myHeader.setVisible(value); } @Override protected final void processComponentEvent(final ComponentEvent e) { super.processComponentEvent(e); if (ComponentEvent.COMPONENT_RESIZED == e.getID()) { fireResized(); } } private final class ChangeAnchorAction extends AnAction implements DumbAware { @NotNull private final ToolWindowAnchor myAnchor; public ChangeAnchorAction(@NotNull String title, @NotNull ToolWindowAnchor anchor) { super(title); myAnchor = anchor; } @Override public final void actionPerformed(@NotNull final AnActionEvent e) { fireAnchorChanged(myAnchor); } } private final class TogglePinnedModeAction extends ToggleAction implements DumbAware { public TogglePinnedModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_PINNED_MODE_ACTION_ID)); } @Override public final boolean isSelected(final AnActionEvent event) { return !myInfo.isAutoHide(); } @Override public final void setSelected(final AnActionEvent event, final boolean flag) { fireAutoHideChanged(!myInfo.isAutoHide()); } @Override public void update(@NotNull AnActionEvent e) { super.update(e); e.getPresentation().setVisible(myInfo.getType() != ToolWindowType.FLOATING && myInfo.getType() != ToolWindowType.WINDOWED); } } private final class ToggleDockModeAction extends ToggleAction implements DumbAware { public ToggleDockModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_DOCK_MODE_ACTION_ID)); } @Override public final boolean isSelected(final AnActionEvent event) { return myInfo.isDocked(); } @Override public final void setSelected(final AnActionEvent event, final boolean flag) { if (myInfo.isDocked()) { fireTypeChanged(ToolWindowType.SLIDING); } else if (myInfo.isSliding()) { fireTypeChanged(ToolWindowType.DOCKED); } } } private final class ToggleFloatingModeAction extends ToggleAction implements DumbAware { public ToggleFloatingModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_FLOATING_MODE_ACTION_ID)); } @Override public final boolean isSelected(final AnActionEvent event) { return myInfo.isFloating(); } @Override public final void setSelected(final AnActionEvent event, final boolean flag) { if (myInfo.isFloating()) { fireTypeChanged(myInfo.getInternalType()); } else { fireTypeChanged(ToolWindowType.FLOATING); } } } private final class ToggleWindowedModeAction extends ToggleAction implements DumbAware { public ToggleWindowedModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_WINDOWED_MODE_ACTION_ID)); } @Override public final boolean isSelected(final AnActionEvent event) { return myInfo.isWindowed(); } @Override public final void setSelected(final AnActionEvent event, final boolean flag) { if (myInfo.isWindowed()) { fireTypeChanged(myInfo.getInternalType()); } else { fireTypeChanged(ToolWindowType.WINDOWED); } } } private final class ToggleSideModeAction extends ToggleAction implements DumbAware { public ToggleSideModeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_SIDE_MODE_ACTION_ID)); } @Override public final boolean isSelected(final AnActionEvent event) { return myInfo.isSplit(); } @Override public final void setSelected(final AnActionEvent event, final boolean flag) { fireSideStatusChanged(flag); } } private final class RemoveStripeButtonAction extends AnAction implements DumbAware { public RemoveStripeButtonAction() { Presentation presentation = getTemplatePresentation(); presentation.setText(ActionsBundle.message("action.RemoveStripeButton.text")); presentation.setDescription(ActionsBundle.message("action.RemoveStripeButton.description")); } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabledAndVisible(myInfo.isShowStripeButton()); } @Override public void actionPerformed(AnActionEvent e) { fireVisibleOnPanelChanged(false); if (getToolWindow().isActive()) { fireHidden(); } } } private final class HideAction extends AnAction implements DumbAware { @NonNls public static final String HIDE_ACTIVE_WINDOW_ACTION_ID = InternalDecorator.HIDE_ACTIVE_WINDOW_ACTION_ID; public HideAction() { copyFrom(ActionManager.getInstance().getAction(HIDE_ACTIVE_WINDOW_ACTION_ID)); getTemplatePresentation().setText(UIBundle.message("tool.window.hide.action.name")); } @Override public final void actionPerformed(@NotNull final AnActionEvent e) { fireHidden(); } @Override public final void update(@NotNull final AnActionEvent event) { final Presentation presentation = event.getPresentation(); presentation.setEnabled(myInfo.isVisible()); } } private final class ToggleContentUiTypeAction extends ToggleAction implements DumbAware { private boolean myHadSeveralContents; private ToggleContentUiTypeAction() { copyFrom(ActionManager.getInstance().getAction(TOGGLE_CONTENT_UI_TYPE_ACTION_ID)); } @Override public void update(@NotNull AnActionEvent e) { myHadSeveralContents = myHadSeveralContents || myToolWindow.getContentManager().getContentCount() > 1; super.update(e); e.getPresentation().setVisible(myHadSeveralContents); } @Override public boolean isSelected(AnActionEvent e) { return myInfo.getContentUiType() == ToolWindowContentUiType.COMBO; } @Override public void setSelected(AnActionEvent e, boolean state) { fireContentUiTypeChanges(state ? ToolWindowContentUiType.COMBO : ToolWindowContentUiType.TABBED); } } private final class MyDivider extends JPanel { private boolean myDragging; private Point myLastPoint; private Disposable myDisposable; private IdeGlassPane myGlassPane; private final MouseAdapter myListener = new MyMouseAdapter(); @Override public void addNotify() { super.addNotify(); myGlassPane = IdeGlassPaneUtil.find(this); myDisposable = Disposer.newDisposable(); myGlassPane.addMouseMotionPreprocessor(myListener, myDisposable); myGlassPane.addMousePreprocessor(myListener, myDisposable); } @Override public void removeNotify() { super.removeNotify(); if (myDisposable != null && !Disposer.isDisposed(myDisposable)) { Disposer.dispose(myDisposable); } } boolean isInDragZone(MouseEvent e) { final Point p = SwingUtilities.convertMouseEvent(e.getComponent(), e, this).getPoint(); return Math.abs(myInfo.getAnchor().isHorizontal() ? p.y : p.x) < 6; } private class MyMouseAdapter extends MouseAdapter { private void updateCursor(MouseEvent e) { if (isInDragZone(e)) { myGlassPane.setCursor(MyDivider.this.getCursor(), MyDivider.this); e.consume(); } } @Override public void mousePressed(MouseEvent e) { myDragging = isInDragZone(e); updateCursor(e); } @Override public void mouseClicked(MouseEvent e) { updateCursor(e); } @Override public void mouseReleased(MouseEvent e) { updateCursor(e); myDragging = false; } @Override public void mouseMoved(MouseEvent e) { updateCursor(e); } @Override public void mouseDragged(MouseEvent e) { if (!myDragging) return; MouseEvent event = SwingUtilities.convertMouseEvent(e.getComponent(), e, MyDivider.this); final ToolWindowAnchor anchor = myInfo.getAnchor(); final Point point = event.getPoint(); final Container windowPane = InternalDecorator.this.getParent(); myLastPoint = SwingUtilities.convertPoint(MyDivider.this, point, windowPane); myLastPoint.x = Math.min(Math.max(myLastPoint.x, 0), windowPane.getWidth()); myLastPoint.y = Math.min(Math.max(myLastPoint.y, 0), windowPane.getHeight()); final Rectangle bounds = InternalDecorator.this.getBounds(); if (anchor == ToolWindowAnchor.TOP) { InternalDecorator.this.setBounds(0, 0, bounds.width, myLastPoint.y); } else if (anchor == ToolWindowAnchor.LEFT) { InternalDecorator.this.setBounds(0, 0, myLastPoint.x, bounds.height); } else if (anchor == ToolWindowAnchor.BOTTOM) { InternalDecorator.this.setBounds(0, myLastPoint.y, bounds.width, windowPane.getHeight() - myLastPoint.y); } else if (anchor == ToolWindowAnchor.RIGHT) { InternalDecorator.this.setBounds(myLastPoint.x, 0, windowPane.getWidth() - myLastPoint.x, bounds.height); } InternalDecorator.this.validate(); e.consume(); } } @NotNull @Override public Cursor getCursor() { final boolean isVerticalCursor = myInfo.isDocked() ? myInfo.getAnchor().isSplitVertically() : myInfo.getAnchor().isHorizontal(); return isVerticalCursor ? Cursor.getPredefinedCursor(Cursor.S_RESIZE_CURSOR) : Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR); } } @Override public void putInfo(@NotNull Map<String, String> info) { info.put("toolWindowTitle", myToolWindow.getTitle()); final Content selection = myToolWindow.getContentManager().getSelectedContent(); if (selection != null) { info.put("toolWindowTab", selection.getTabName()); } } public void setAdditionalGearActions(@Nullable ActionGroup additionalGearActions) { myAdditionalGearActions = additionalGearActions; } @Override public AccessibleContext getAccessibleContext() { if (accessibleContext == null) { accessibleContext = new AccessibleInternalDecorator(); } return accessibleContext; } protected class AccessibleInternalDecorator extends AccessibleJPanel { @Override public String getAccessibleName() { String name = super.getAccessibleName(); if (name == null) { String title = StringUtil.defaultIfEmpty(myToolWindow.getTitle(), myToolWindow.getStripeTitle()); title = StringUtil.defaultIfEmpty(title, myToolWindow.getId()); name = StringUtil.notNullize(title) + " Tool Window"; } return name; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.resourcemanager.resources.fluentcore.dag; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.resources.fluentcore.model.Indexable; import com.azure.resourcemanager.resources.fluentcore.utils.ResourceManagerUtils; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.core.scheduler.Schedulers; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; /** * Type representing a group of task entries with dependencies between them. Initially a task * group will have only one task entry known as root task entry, then more entries can be * added by taking dependency on other task groups or adding "post-run" task group dependents. * <p> * The method {@link TaskGroup#invokeAsync(InvocationContext)} ()} kick-off invocation of tasks * in the group, task are invoked in topological sorted order. * <p> * {@link TaskGroup#addDependencyTaskGroup(TaskGroup)}: A task group "A" can take dependency on * another task group "B" through this method e.g. `A.addDependencyTaskGroup(B)` indicates that * completion of tasks in the dependency task group "B" is required before the invocation of root * task in group "A". A.invokeAsync(cxt) will ensure this order. * <p> * {@link TaskGroup#addPostRunDependentTaskGroup(TaskGroup)}: there are scenarios where a subset * of dependent task groups say "H", "I" may required to run after the invocation of a task group * "K" when K.invokeAsync(cxt) is called. Such special dependents can be added via * K.addPostRunDependentTaskGroup(H) and K.addPostRunDependentTaskGroup(I). * <p> * The result produced by the tasks in the group are of type {@link Indexable}. */ public class TaskGroup extends DAGraph<TaskItem, TaskGroupEntry<TaskItem>> implements Indexable { /** * The root task in this task group. */ private final TaskGroupEntry<TaskItem> rootTaskEntry; /** * Task group termination strategy to be used once any task in the group error-ed. */ private TaskGroupTerminateOnErrorStrategy taskGroupTerminateOnErrorStrategy; /** * Flag indicating whether this group is marked as cancelled or not. This flag will be used only * when group's terminate on error strategy is set as * {@link TaskGroupTerminateOnErrorStrategy#TERMINATE_ON_IN_PROGRESS_TASKS_COMPLETION}. * Effect of setting this flag can be think as broadcasting a cancellation signal to tasks those * are yet to invoke. */ private final AtomicBoolean isGroupCancelled; /** * The shared exception object used to indicate that a task is not invoked since the group * is marked as cancelled i.e. {@link this#isGroupCancelled} is set. */ private final TaskCancelledException taskCancelledException = new TaskCancelledException(); /** * The helper to operate on proxy TaskGroup of this TaskGroup for supporting dependents marked * for post run. */ protected ProxyTaskGroupWrapper proxyTaskGroupWrapper; private final ClientLogger logger = new ClientLogger(this.getClass()); /** * Creates TaskGroup. * * @param rootTaskEntry the entry holding root task */ private TaskGroup(TaskGroupEntry<TaskItem> rootTaskEntry) { super(rootTaskEntry); this.isGroupCancelled = new AtomicBoolean(false); this.rootTaskEntry = rootTaskEntry; this.proxyTaskGroupWrapper = new ProxyTaskGroupWrapper(this); } /** * Creates TaskGroup. * * @param rootTaskItemId the id of the root task in the group * @param rootTaskItem the root task */ public TaskGroup(String rootTaskItemId, TaskItem rootTaskItem) { this(new TaskGroupEntry<TaskItem>(rootTaskItemId, rootTaskItem)); } /** * Creates TaskGroup. * * @param rootTaskItem the root task */ public TaskGroup(IndexableTaskItem rootTaskItem) { this(new TaskGroupEntry<TaskItem>(rootTaskItem.key(), rootTaskItem)); } /** * @return the key of this task group, which is same as key of the root entry in the group */ @Override public String key() { return this.rootTaskEntry.key(); } /** * Retrieve the result produced by a task with the given id in the group. * <p> * This method can be used to retrieve the result of invocation of both dependency * and "post-run" dependent tasks. If task with the given id does not exists then * IllegalArgumentException exception will be thrown. * * @param taskId the task item id * @return the task result, null will be returned if task has not yet been invoked */ public Indexable taskResult(String taskId) { TaskGroupEntry<TaskItem> taskGroupEntry = super.getNode(taskId); if (taskGroupEntry != null) { return taskGroupEntry.taskResult(); } if (!this.proxyTaskGroupWrapper.isActive()) { throw logger.logExceptionAsError( new IllegalArgumentException("A dependency task with id '" + taskId + "' is not found")); } taskGroupEntry = this.proxyTaskGroupWrapper.proxyTaskGroup.getNode(taskId); if (taskGroupEntry != null) { return taskGroupEntry.taskResult(); } throw logger.logExceptionAsError(new IllegalArgumentException( "A dependency task or 'post-run' dependent task with with id '" + taskId + "' not found")); } /** * Checks this TaskGroup depends on the given TaskGroup. * * @param taskGroup the TaskGroup to check * @return true if TaskGroup is depends on the given TaskGroup */ public boolean dependsOn(TaskGroup taskGroup) { return this.nodeTable.containsKey(taskGroup.root().key()); } /** * @return the root task entry in the group. */ protected TaskGroupEntry<TaskItem> root() { return this.rootTaskEntry; } /** * Mark root of this task task group depends on the given TaskItem. * This ensure this task group's root get picked for execution only after the completion * of invocation of provided TaskItem. * * @param dependencyTaskItem the task item that this task group depends on * @return the key of the dependency */ public String addDependency(FunctionalTaskItem dependencyTaskItem) { IndexableTaskItem dependency = IndexableTaskItem.create(dependencyTaskItem); this.addDependency(dependency); return dependency.key(); } /** * Mark root of this task task group depends on the given item's taskGroup. * This ensure this task group's root get picked for execution only after the completion * of invocation of provided TaskItem. * * @param hasTaskGroup an item with taskGroup that this task group depends on */ public void addDependency(TaskGroup.HasTaskGroup hasTaskGroup) { this.addDependencyTaskGroup(hasTaskGroup.taskGroup()); } /** * Mark root of this task task group depends on the given task group's root. * This ensure this task group's root get picked for execution only after the completion * of all tasks in the given group. * * @param dependencyTaskGroup the task group that this task group depends on */ public void addDependencyTaskGroup(TaskGroup dependencyTaskGroup) { if (dependencyTaskGroup.proxyTaskGroupWrapper.isActive()) { dependencyTaskGroup.proxyTaskGroupWrapper.addDependentTaskGroup(this); } else { DAGraph<TaskItem, TaskGroupEntry<TaskItem>> dependencyGraph = dependencyTaskGroup; super.addDependencyGraph(dependencyGraph); } } /** * Mark the given TaskItem depends on this taskGroup. * * @param dependentTaskItem the task item that depends on this task group * @return key to be used as parameter to taskResult(string) method to retrieve result of * invocation of given task item. */ public String addPostRunDependent(FunctionalTaskItem dependentTaskItem) { IndexableTaskItem taskItem = IndexableTaskItem.create(dependentTaskItem); this.addPostRunDependent(taskItem); return taskItem.key(); } /** * Mark the given TaskItem depends on this taskGroup. * * @param dependentTaskItem the task item that depends on this task group * @param internalContext the internal runtime context * @return key to be used as parameter to taskResult(string) method to retrieve result of * invocation of given task item. */ public String addPostRunDependent( FunctionalTaskItem dependentTaskItem, ResourceManagerUtils.InternalRuntimeContext internalContext) { IndexableTaskItem taskItem = IndexableTaskItem.create(dependentTaskItem, internalContext); this.addPostRunDependent(taskItem); return taskItem.key(); } /** * Mark the given item with taskGroup depends on this taskGroup. * * @param hasTaskGroup an item with as task group that depends on this task group */ public void addPostRunDependent(TaskGroup.HasTaskGroup hasTaskGroup) { this.addPostRunDependentTaskGroup(hasTaskGroup.taskGroup()); } /** * Mark root of the given task group depends on this task group's root. * This ensure given task group's root get picked for invocation only after the completion * of all tasks in this group. Calling invokeAsync(cxt) will run the tasks in the given * dependent task group as well. * * @param dependentTaskGroup the task group depends on this task group */ public void addPostRunDependentTaskGroup(TaskGroup dependentTaskGroup) { this.proxyTaskGroupWrapper.addPostRunTaskGroupForActualTaskGroup(dependentTaskGroup); } /** * Invokes tasks in the group. * It is not guaranteed to return indexable in topological order. * * @param context group level shared context that need be passed to invokeAsync(cxt) * method of each task item in the group when it is selected for invocation. * @return an observable that emits the result of tasks in the order they finishes. */ public Flux<Indexable> invokeAsync(final InvocationContext context) { return Flux.defer(() -> { if (proxyTaskGroupWrapper.isActive()) { return proxyTaskGroupWrapper.taskGroup().invokeInternAsync(context, true, null); } else { Set<String> processedKeys = runBeforeGroupInvoke(null); if (proxyTaskGroupWrapper.isActive()) { // If proxy got activated after 'runBeforeGroupInvoke()' stage due to the addition of direct // 'postRunDependent's then delegate group invocation to proxy group. // return proxyTaskGroupWrapper.taskGroup().invokeInternAsync(context, true, processedKeys); } else { return invokeInternAsync(context, false, null); } } }); } /** * Invokes tasks in the group. * * @return the root result of task group. */ public Mono<Indexable> invokeAsync() { return invokeAsync(this.newInvocationContext()) .then(Mono.defer(() -> { if (proxyTaskGroupWrapper.isActive()) { return Mono.just(proxyTaskGroupWrapper.taskGroup().root().taskResult()); } return Mono.just(root().taskResult()); })); } /** * Invokes dependency tasks in the group, but not. * * @param context group level shared context that need be passed to invokeAsync(cxt) * method of each task item in the group when it is selected for invocation. * @return an observable that emits the result of tasks in the order they finishes. */ public Flux<Indexable> invokeDependencyAsync(final InvocationContext context) { context.put(TaskGroup.InvocationContext.KEY_SKIP_TASKS, Collections.singleton(this.key())); return Flux.defer(() -> { if (proxyTaskGroupWrapper.isActive()) { return Flux.error(new IllegalStateException("postRunDependent is not supported")); } else { Set<String> processedKeys = runBeforeGroupInvoke(null); if (proxyTaskGroupWrapper.isActive()) { return Flux.error(new IllegalStateException("postRunDependent is not supported")); } else { return invokeInternAsync(context, false, null); } } }); } /** * Invokes tasks in the group. * * @param context group level shared context that need be passed to invokeAsync(cxt) * method of each task item in the group when it is selected for invocation. * @param shouldRunBeforeGroupInvoke indicate whether to run the 'beforeGroupInvoke' method * of each tasks before invoking them * @param skipBeforeGroupInvoke the tasks keys for which 'beforeGroupInvoke' should not be called * before invoking them * @return an observable that emits the result of tasks in the order they finishes. */ private Flux<Indexable> invokeInternAsync(final InvocationContext context, final boolean shouldRunBeforeGroupInvoke, final Set<String> skipBeforeGroupInvoke) { if (!isPreparer()) { return Flux.error(new IllegalStateException( "invokeInternAsync(cxt) can be called only from root TaskGroup")); } this.taskGroupTerminateOnErrorStrategy = context.terminateOnErrorStrategy(); if (shouldRunBeforeGroupInvoke) { // Prepare tasks and queue the ready tasks (terminal tasks with no dependencies) // this.runBeforeGroupInvoke(skipBeforeGroupInvoke); } // Runs the ready tasks concurrently // return this.invokeReadyTasksAsync(context); } /** * Run 'beforeGroupInvoke' method of the tasks in this group. The tasks can use beforeGroupInvoke() * method to add additional dependencies or dependents. * * @param skip the keys of the tasks that are previously processed hence they must be skipped * @return the keys of all the tasks those are processed (including previously processed items in skip param) */ private Set<String> runBeforeGroupInvoke(final Set<String> skip) { HashSet<String> processedEntryKeys = new HashSet<>(); if (skip != null) { processedEntryKeys.addAll(skip); } List<TaskGroupEntry<TaskItem>> entries = this.entriesSnapshot(); boolean hasMoreToProcess; // Invokes 'beforeGroupInvoke' on a subset of non-processed tasks in the group. // Initially processing is pending on all task items. do { hasMoreToProcess = false; for (TaskGroupEntry<TaskItem> entry : entries) { if (!processedEntryKeys.contains(entry.key())) { entry.data().beforeGroupInvoke(); processedEntryKeys.add(entry.key()); } } int prevSize = entries.size(); entries = this.entriesSnapshot(); if (entries.size() > prevSize) { // If new task dependencies/dependents added in 'beforeGroupInvoke' then // set the flag which indicates another pass is required to 'prepare' new // task items hasMoreToProcess = true; } } while (hasMoreToProcess); // Run another pass if new dependencies/dependents were added in this pass super.prepareForEnumeration(); return processedEntryKeys; } /** * @return list with current task entries in this task group */ private List<TaskGroupEntry<TaskItem>> entriesSnapshot() { List<TaskGroupEntry<TaskItem>> entries = new ArrayList<>(); super.prepareForEnumeration(); for (TaskGroupEntry<TaskItem> current = super.getNext(); current != null; current = super.getNext()) { entries.add(current); super.reportCompletion(current); } return entries; } /** * Invokes the ready tasks. * * @param context group level shared context that need be passed to * {@link TaskGroupEntry#invokeTaskAsync(boolean, InvocationContext)} * method of each entry in the group when it is selected for execution * @return a {@link Flux} that emits the result of tasks in the order they finishes. */ // Due to it takes approximate 3ms in flux for returning, it cannot be guaranteed to return in topological order. // One simply fix for guaranteeing the last element could be https://github.com/Azure/azure-sdk-for-java/pull/15074 @SuppressWarnings({"unchecked", "rawtypes"}) private Flux<Indexable> invokeReadyTasksAsync(final InvocationContext context) { TaskGroupEntry<TaskItem> readyTaskEntry = super.getNext(); final List<Flux<Indexable>> observables = new ArrayList<>(); // Enumerate the ready tasks (those with dependencies resolved) and kickoff them concurrently // while (readyTaskEntry != null) { final TaskGroupEntry<TaskItem> currentEntry = readyTaskEntry; final TaskItem currentTaskItem = currentEntry.data(); if (currentTaskItem instanceof ProxyTaskItem) { observables.add(invokeAfterPostRunAsync(currentEntry, context)); } else { observables.add(invokeTaskAsync(currentEntry, context)); } readyTaskEntry = super.getNext(); } return Flux.mergeDelayError(32, observables.toArray(new Flux[0])); } /** * Invokes the task stored in the given entry. * <p> * if the task cannot be invoked because the group marked as cancelled then an observable * that emit {@link TaskCancelledException} will be returned. * * @param entry the entry holding task * @param context a group level shared context that is passed to {@link TaskItem#invokeAsync(InvocationContext)} * method of the task item this entry wraps. * @return an observable that emits result of task in the given entry and result of subset of tasks which gets * scheduled after this task. */ private Flux<Indexable> invokeTaskAsync(final TaskGroupEntry<TaskItem> entry, final InvocationContext context) { return Flux.defer(() -> { if (isGroupCancelled.get()) { // One or more tasks are in faulted state, though this task MAYBE invoked if it does not // have faulted tasks as transitive dependencies, we won't do it since group is cancelled // due to termination strategy TERMINATE_ON_IN_PROGRESS_TASKS_COMPLETION. // return processFaultedTaskAsync(entry, taskCancelledException, context); } else { // Any cached result will be ignored for root resource // boolean ignoreCachedResult = isRootEntry(entry) || (entry.proxy() != null && isRootEntry(entry.proxy())); Mono<Indexable> taskObservable; Object skipTasks = context.get(InvocationContext.KEY_SKIP_TASKS); if (skipTasks instanceof Set && ((Set) skipTasks).contains(entry.key())) { taskObservable = Mono.just(new VoidIndexable(entry.key())); } else { taskObservable = entry.invokeTaskAsync(ignoreCachedResult, context); } return taskObservable.flatMapMany((indexable) -> Flux.just(indexable), (throwable) -> processFaultedTaskAsync(entry, throwable, context), () -> processCompletedTaskAsync(entry, context)); } }); } /** * Invokes the {@link TaskItem#invokeAfterPostRunAsync(boolean)} method of an actual TaskItem * if the given entry holds a ProxyTaskItem. * * @param entry the entry holding a ProxyTaskItem * @param context a group level shared context * @return An Observable that represents asynchronous work started by * {@link TaskItem#invokeAfterPostRunAsync(boolean)} method of actual TaskItem and result of subset * of tasks which gets scheduled after proxy task. If group was not in faulted state and * {@link TaskItem#invokeAfterPostRunAsync(boolean)} emits no error then stream also includes * result produced by actual TaskItem. */ private Flux<Indexable> invokeAfterPostRunAsync(final TaskGroupEntry<TaskItem> entry, final InvocationContext context) { return Flux.defer(() -> { final ProxyTaskItem proxyTaskItem = (ProxyTaskItem) entry.data(); if (proxyTaskItem == null) { return Flux.empty(); } final boolean isFaulted = entry.hasFaultedDescentDependencyTasks() || isGroupCancelled.get(); return proxyTaskItem.invokeAfterPostRunAsync(isFaulted) .flatMapMany(indexable -> Flux.error( new IllegalStateException("This onNext should never be called")), (error) -> processFaultedTaskAsync(entry, error, context), () -> { if (isFaulted) { if (entry.hasFaultedDescentDependencyTasks()) { return processFaultedTaskAsync(entry, new ErroredDependencyTaskException(), context); } else { return processFaultedTaskAsync(entry, taskCancelledException, context); } } else { return Flux.concat(Flux.just(proxyTaskItem.result()), processCompletedTaskAsync(entry, context)); } }); }); } /** * Handles successful completion of a task. * <p> * If the task is not root (terminal) task then this kickoff execution of next set of ready tasks * * @param completedEntry the entry holding completed task * @param context the context object shared across all the task entries in this group during execution * @return an observable represents asynchronous operation in the next stage */ private Flux<Indexable> processCompletedTaskAsync(final TaskGroupEntry<TaskItem> completedEntry, final InvocationContext context) { reportCompletion(completedEntry); if (isRootEntry(completedEntry)) { return Flux.empty(); } else { return invokeReadyTasksAsync(context); } } /** * Handles a faulted task. * * @param faultedEntry the entry holding faulted task * @param throwable the reason for fault * @param context the context object shared across all the task entries in this group during execution * @return an observable represents asynchronous operation in the next stage */ private Flux<Indexable> processFaultedTaskAsync(final TaskGroupEntry<TaskItem> faultedEntry, final Throwable throwable, final InvocationContext context) { markGroupAsCancelledIfTerminationStrategyIsIPTC(); reportError(faultedEntry, throwable); if (isRootEntry(faultedEntry)) { if (shouldPropagateException(throwable)) { return toErrorObservable(throwable); } return Flux.empty(); } else if (shouldPropagateException(throwable)) { return Flux.concatDelayError(invokeReadyTasksAsync(context), toErrorObservable(throwable)); } else { return invokeReadyTasksAsync(context); } } /** * Mark this TaskGroup as cancelled if the termination strategy associated with the group * is {@link TaskGroupTerminateOnErrorStrategy#TERMINATE_ON_IN_PROGRESS_TASKS_COMPLETION}. */ private void markGroupAsCancelledIfTerminationStrategyIsIPTC() { this.isGroupCancelled.set(this.taskGroupTerminateOnErrorStrategy == TaskGroupTerminateOnErrorStrategy.TERMINATE_ON_IN_PROGRESS_TASKS_COMPLETION); } /** * Check that given entry is the root entry in this group. * * @param taskGroupEntry the entry * @return true if the entry is root entry in the group, false otherwise. */ private boolean isRootEntry(TaskGroupEntry<TaskItem> taskGroupEntry) { return isRootNode(taskGroupEntry); } /** * Checks the given throwable needs to be propagated to final stream returned by * {@link this#invokeAsync(InvocationContext)} ()} method. * * @param throwable the exception to check * @return true if the throwable needs to be included in the {@link RuntimeException} * emitted by the final stream. */ private static boolean shouldPropagateException(Throwable throwable) { return (!(throwable instanceof ErroredDependencyTaskException) && !(throwable instanceof TaskCancelledException)); } /** * Gets the given throwable as observable. * * @param throwable the throwable to wrap * @return observable with throwable wrapped */ private Flux<Indexable> toErrorObservable(Throwable throwable) { return Flux.error(throwable); } /** * @return a new clean context instance. */ public InvocationContext newInvocationContext() { return new InvocationContext(this); } /** * An interface representing a type composes a TaskGroup. */ public interface HasTaskGroup { /** * @return Gets the task group. */ TaskGroup taskGroup(); } /** * A mutable type that can be used to pass data around task items during the invocation * of the TaskGroup. */ public static final class InvocationContext { public static final String KEY_SKIP_TASKS = "SKIP_TASKS"; private final Map<String, Object> properties; private final TaskGroup taskGroup; private TaskGroupTerminateOnErrorStrategy terminateOnErrorStrategy; private final ClientLogger logger = new ClientLogger(this.getClass()); /** * Creates InvocationContext instance. * * @param taskGroup the task group that uses this context instance. */ private InvocationContext(final TaskGroup taskGroup) { this.properties = new ConcurrentHashMap<>(); this.taskGroup = taskGroup; } /** * @return the TaskGroup this invocation context associated with. */ public TaskGroup taskGroup() { return this.taskGroup; } /** * Sets the group termination strategy to use on error. * * @param strategy the strategy * @return the context */ public InvocationContext withTerminateOnErrorStrategy(TaskGroupTerminateOnErrorStrategy strategy) { if (this.terminateOnErrorStrategy != null) { throw logger.logExceptionAsError(new IllegalStateException( "Termination strategy is already set, it is immutable for a specific context")); } this.terminateOnErrorStrategy = strategy; return this; } /** * @return the termination strategy to use upon error during the current invocation of the TaskGroup. */ public TaskGroupTerminateOnErrorStrategy terminateOnErrorStrategy() { if (this.terminateOnErrorStrategy == null) { return TaskGroupTerminateOnErrorStrategy.TERMINATE_ON_HITTING_LCA_TASK; } return this.terminateOnErrorStrategy; } /** * Put a key-value in the context. * * @param key the key * @param value the value */ public void put(String key, Object value) { this.properties.put(key, value); } /** * Get a value in the context with the given key. * * @param key the key * @return value with the given key if exists, null otherwise. */ public Object get(String key) { return this.properties.get(key); } /** * Check existence of a key in the context. * * @param key the key * @return true if the key exists, false otherwise. */ public boolean hasKey(String key) { return this.get(key) != null; } } /** * Wrapper type to simplify operations on proxy TaskGroup. * <p> * A proxy TaskGroup will be activated for a TaskGroup as soon as a "post-run" dependent * added to the actual TaskGroup via {@link TaskGroup#addPostRunDependentTaskGroup(TaskGroup)}. * "post run" dependents are those TaskGroup which need to be invoked as part of invocation * of actual TaskGroup. */ protected static final class ProxyTaskGroupWrapper { // The "proxy TaskGroup" private TaskGroup proxyTaskGroup; // The "actual TaskGroup" for which above TaskGroup act as proxy private final TaskGroup actualTaskGroup; private final ClientLogger logger = new ClientLogger(this.getClass()); /** * Creates ProxyTaskGroupWrapper. * * @param actualTaskGroup the actual TaskGroup for which proxy TaskGroup will be enabled */ ProxyTaskGroupWrapper(TaskGroup actualTaskGroup) { this.actualTaskGroup = actualTaskGroup; } /** * @return true if the proxy TaskGroup is enabled for original TaskGroup. */ boolean isActive() { return this.proxyTaskGroup != null; } /** * @return the wrapped proxy task group. */ TaskGroup taskGroup() { return this.proxyTaskGroup; } /** * Add "post-run TaskGroup" for the "actual TaskGroup". * * @param postRunTaskGroup the dependency TaskGroup. */ void addPostRunTaskGroupForActualTaskGroup(TaskGroup postRunTaskGroup) { if (this.proxyTaskGroup == null) { this.initProxyTaskGroup(); } postRunTaskGroup.addDependencyGraph(this.actualTaskGroup); if (postRunTaskGroup.proxyTaskGroupWrapper.isActive()) { this.proxyTaskGroup.addDependencyGraph(postRunTaskGroup.proxyTaskGroupWrapper.proxyTaskGroup); } else { this.proxyTaskGroup.addDependencyGraph(postRunTaskGroup); } } /** * Add a dependent for the proxy TaskGroup. * * @param dependentTaskGroup the dependent TaskGroup */ void addDependentTaskGroup(TaskGroup dependentTaskGroup) { if (this.proxyTaskGroup == null) { throw logger.logExceptionAsError(new IllegalStateException( "addDependentTaskGroup() cannot be called in a non-active ProxyTaskGroup")); } dependentTaskGroup.addDependencyGraph(this.proxyTaskGroup); } /** * Initialize the proxy TaskGroup if not initialized yet. */ private void initProxyTaskGroup() { if (this.proxyTaskGroup == null) { // Creates proxy TaskGroup with an instance of ProxyTaskItem as root TaskItem which delegates actions on // it to "actual TaskGroup"'s root. // ProxyTaskItem proxyTaskItem = new ProxyTaskItem(this.actualTaskGroup.root().data()); this.proxyTaskGroup = new TaskGroup("proxy-" + this.actualTaskGroup.root().key(), proxyTaskItem); if (this.actualTaskGroup.hasParents()) { // Once "proxy TaskGroup" is enabled, all existing TaskGroups depends on "actual TaskGroup" should // take dependency on "proxy TaskGroup". // String atgRootKey = this.actualTaskGroup.root().key(); for (DAGraph<TaskItem, TaskGroupEntry<TaskItem>> parentDAG : this.actualTaskGroup.parentDAGs) { parentDAG.root().removeDependency(atgRootKey); parentDAG.addDependencyGraph(this.proxyTaskGroup); } // re-assigned actual's parents as proxy's parents, so clear actual's parent collection. // this.actualTaskGroup.parentDAGs.clear(); } // "Proxy TaskGroup" takes dependency on "actual TaskGroup" // this.proxyTaskGroup.addDependencyGraph(this.actualTaskGroup); // Add a back reference to "proxy" in actual // this.actualTaskGroup.rootTaskEntry.setProxy(this.proxyTaskGroup.rootTaskEntry); } } } /** * A {@link TaskItem} type that act as proxy for another {@link TaskItem}. */ private static final class ProxyTaskItem implements TaskItem { private final TaskItem actualTaskItem; private ProxyTaskItem(final TaskItem actualTaskItem) { this.actualTaskItem = actualTaskItem; } @Override public Indexable result() { return actualTaskItem.result(); } @Override public void beforeGroupInvoke() { // NOP } @Override public boolean isHot() { return actualTaskItem.isHot(); } @Override public Mono<Indexable> invokeAsync(InvocationContext context) { return Mono.just(actualTaskItem.result()); } @Override public Mono<Void> invokeAfterPostRunAsync(final boolean isGroupFaulted) { if (actualTaskItem.isHot()) { return Mono.defer(() -> actualTaskItem.invokeAfterPostRunAsync(isGroupFaulted).subscribeOn(Schedulers.immediate())); } else { return this.actualTaskItem.invokeAfterPostRunAsync(isGroupFaulted) .subscribeOn(Schedulers.immediate()); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.security.util.crypto; import at.favre.lib.crypto.bcrypt.BCrypt; import at.favre.lib.crypto.bcrypt.Radix64Encoder; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.security.util.EncryptionMethod; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.crypto.Cipher; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.SecureRandom; import java.util.Arrays; import java.util.regex.Matcher; import java.util.regex.Pattern; public class BcryptCipherProvider extends RandomIVPBECipherProvider { private static final Logger logger = LoggerFactory.getLogger(BcryptCipherProvider.class); private final int workFactor; /** * This can be calculated automatically using the code {@see BcryptCipherProviderGroovyTest#calculateMinimumWorkFactor} or manually updated by a maintainer */ private static final int DEFAULT_WORK_FACTOR = 12; private static final int DEFAULT_SALT_LENGTH = 16; private static final Pattern BCRYPT_SALT_FORMAT = Pattern.compile("^\\$\\d\\w\\$\\d{2}\\$[\\w\\/\\.]{22}"); /** * Instantiates a Bcrypt cipher provider with the default work factor 12 (2^12 key expansion rounds). */ public BcryptCipherProvider() { this(DEFAULT_WORK_FACTOR); } /** * Instantiates a Bcrypt cipher provider with the specified work factor w (2^w key expansion rounds). * * @param workFactor the (log) number of key expansion rounds [4..30] */ public BcryptCipherProvider(int workFactor) { this.workFactor = workFactor; if (workFactor < DEFAULT_WORK_FACTOR) { logger.warn("The provided work factor {} is below the recommended minimum {}", workFactor, DEFAULT_WORK_FACTOR); } } /** * Returns an initialized cipher for the specified algorithm. The key is derived by the KDF of the implementation. The IV is provided externally to allow for non-deterministic IVs, as IVs * deterministically derived from the password are a potential vulnerability and compromise semantic security. See * <a href="http://crypto.stackexchange.com/a/3970/12569">Ilmari Karonen's answer on Crypto Stack Exchange</a> * * @param encryptionMethod the {@link EncryptionMethod} * @param password the secret input * @param salt the complete salt (e.g. {@code "$2a$10$gUVbkVzp79H8YaCOsCVZNu".getBytes(StandardCharsets.UTF_8)}) * @param iv the IV * @param keyLength the desired key length in bits * @param encryptMode true for encrypt, false for decrypt * @return the initialized cipher * @throws Exception if there is a problem initializing the cipher */ @Override public Cipher getCipher(EncryptionMethod encryptionMethod, String password, byte[] salt, byte[] iv, int keyLength, boolean encryptMode) throws Exception { try { return getInitializedCipher(encryptionMethod, password, salt, iv, keyLength, encryptMode); } catch (IllegalArgumentException e) { throw e; } catch (Exception e) { throw new ProcessException("Error initializing the cipher", e); } } @Override Logger getLogger() { return logger; } /** * Returns an initialized cipher for the specified algorithm. The key (and IV if necessary) are derived by the KDF of the implementation. * <p> * The IV can be retrieved by the calling method using {@link Cipher#getIV()}. * * @param encryptionMethod the {@link EncryptionMethod} * @param password the secret input * @param salt the complete salt (e.g. {@code "$2a$10$gUVbkVzp79H8YaCOsCVZNu".getBytes(StandardCharsets.UTF_8)}) * @param keyLength the desired key length in bits * @param encryptMode true for encrypt, false for decrypt * @return the initialized cipher * @throws Exception if there is a problem initializing the cipher */ @Override public Cipher getCipher(EncryptionMethod encryptionMethod, String password, byte[] salt, int keyLength, boolean encryptMode) throws Exception { return getCipher(encryptionMethod, password, salt, new byte[0], keyLength, encryptMode); } protected Cipher getInitializedCipher(EncryptionMethod encryptionMethod, String password, byte[] salt, byte[] iv, int keyLength, boolean encryptMode) throws Exception { if (encryptionMethod == null) { throw new IllegalArgumentException("The encryption method must be specified"); } if (!encryptionMethod.isCompatibleWithStrongKDFs()) { throw new IllegalArgumentException(encryptionMethod.name() + " is not compatible with Bcrypt"); } if (StringUtils.isEmpty(password)) { throw new IllegalArgumentException("Encryption with an empty password is not supported"); } String algorithm = encryptionMethod.getAlgorithm(); String provider = encryptionMethod.getProvider(); final String cipherName = CipherUtility.parseCipherFromAlgorithm(algorithm); if (!CipherUtility.isValidKeyLength(keyLength, cipherName)) { throw new IllegalArgumentException(keyLength + " is not a valid key length for " + cipherName); } byte[] rawSalt = extractRawSalt(salt); String hash = new String(BCrypt.withDefaults().hash(workFactor, rawSalt, password.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); /* The SHA-512 hash is required in order to derive a key longer than 184 bits (the resulting size of the Bcrypt hash) and ensuring the avalanche effect causes higher key entropy (if all derived keys follow a consistent pattern, it weakens the strength of the encryption) */ MessageDigest digest = MessageDigest.getInstance("SHA-512", provider); byte[] dk = digest.digest(hash.getBytes(StandardCharsets.UTF_8)); dk = Arrays.copyOf(dk, keyLength / 8); SecretKey tempKey = new SecretKeySpec(dk, algorithm); KeyedCipherProvider keyedCipherProvider = new AESKeyedCipherProvider(); return keyedCipherProvider.getCipher(encryptionMethod, tempKey, iv, encryptMode); } private static String formatSaltForBcrypt(byte[] salt) { if (salt == null || salt.length == 0) { throw new IllegalArgumentException("The salt cannot be empty. To generate a salt, use BcryptCipherProvider#generateSalt()"); } String rawSalt = new String(salt, StandardCharsets.UTF_8); Matcher matcher = BCRYPT_SALT_FORMAT.matcher(rawSalt); if (matcher.find()) { return rawSalt; } else { throw new IllegalArgumentException("The salt must be of the format $2a$10$gUVbkVzp79H8YaCOsCVZNu. To generate a salt, use BcryptCipherProvider#generateSalt()"); } } /** * Returns the full salt in a {@code byte[]} for this cipher provider (i.e. {@code $2a$10$abcdef...} format). * * @return the full salt as a byte[] */ @Override public byte[] generateSalt() { byte[] salt = new byte[DEFAULT_SALT_LENGTH]; SecureRandom sr = new SecureRandom(); sr.nextBytes(salt); // TODO: This library allows for 2a, 2b, and 2y versions so this should be changed to be configurable String saltString = "$2a$" + StringUtils.leftPad(String.valueOf(workFactor), 2, "0") + "$" + new String(new Radix64Encoder.Default().encode(salt), StandardCharsets.UTF_8); return saltString.getBytes(StandardCharsets.UTF_8); } /** * Returns the raw salt as a {@code byte[]} extracted from the Bcrypt formatted salt byte[]. * * @param fullSalt the Bcrypt salt sequence as bytes * @return the raw salt (16 bytes) without Radix 64 encoding */ public static byte[] extractRawSalt(byte[] fullSalt) { try { String formattedSalt = formatSaltForBcrypt(fullSalt); String rawSalt = formattedSalt.substring(formattedSalt.lastIndexOf("$") + 1); if (rawSalt.length() != 22) { throw new IllegalArgumentException("The formatted salt did not contain a raw salt"); } return new Radix64Encoder.Default().decode(rawSalt.getBytes(StandardCharsets.UTF_8)); } catch (IllegalArgumentException e) { logger.warn("Unable to extract a raw salt from bcrypt salt {}", new String(fullSalt, StandardCharsets.UTF_8)); throw e; } } /** * Returns the raw salt as a {@code byte[]} extracted from the Bcrypt formatted salt String. * * @param fullSalt the Bcrypt salt sequence * @return the raw salt (16 bytes) */ public static byte[] extractRawSalt(String fullSalt) { if (fullSalt == null) { return new byte[0]; } return extractRawSalt(fullSalt.getBytes(StandardCharsets.UTF_8)); } @Override public int getDefaultSaltLength() { return DEFAULT_SALT_LENGTH; } protected int getWorkFactor() { return workFactor; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.introduceparameterobject; import com.intellij.codeInsight.generation.GenerateMembersUtil; import com.intellij.ide.highlighter.JavaFileType; import com.intellij.ide.util.PackageUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtil; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.impl.source.javadoc.PsiDocParamRef; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.OverridingMethodsSearch; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PropertyUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.refactoring.MoveDestination; import com.intellij.refactoring.RefactorJBundle; import com.intellij.refactoring.introduceparameterobject.usageInfo.*; import com.intellij.refactoring.util.FixableUsageInfo; import com.intellij.refactoring.util.FixableUsagesRefactoringProcessor; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.refactoring.util.VariableData; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewDescriptor; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.VisibilityUtil; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; public class IntroduceParameterObjectProcessor extends FixableUsagesRefactoringProcessor { private static final Logger logger = Logger.getInstance("com.siyeh.rpp.introduceparameterobject.IntroduceParameterObjectProcessor"); private final MoveDestination myMoveDestination; private final PsiMethod method; private final String className; private final String packageName; private final boolean keepMethodAsDelegate; private final boolean myUseExistingClass; private final boolean myCreateInnerClass; private final String myNewVisibility; private final boolean myGenerateAccessors; private final List<ParameterChunk> parameters; private final int[] paramsToMerge; private final List<PsiTypeParameter> typeParams; private final Set<PsiParameter> paramsNeedingSetters = new HashSet<PsiParameter>(); private final Set<PsiParameter> paramsNeedingGetters = new HashSet<PsiParameter>(); private final PsiClass existingClass; private PsiMethod myExistingClassCompatibleConstructor; public IntroduceParameterObjectProcessor(String className, String packageName, MoveDestination moveDestination, PsiMethod method, VariableData[] parameters, boolean keepMethodAsDelegate, final boolean useExistingClass, final boolean createInnerClass, String newVisibility, boolean generateAccessors) { super(method.getProject()); myMoveDestination = moveDestination; this.method = method; this.className = className; this.packageName = packageName; this.keepMethodAsDelegate = keepMethodAsDelegate; myUseExistingClass = useExistingClass; myCreateInnerClass = createInnerClass; myNewVisibility = newVisibility; myGenerateAccessors = generateAccessors; this.parameters = new ArrayList<ParameterChunk>(); for (VariableData parameter : parameters) { this.parameters.add(new ParameterChunk(parameter)); } final PsiParameterList parameterList = method.getParameterList(); final PsiParameter[] methodParams = parameterList.getParameters(); paramsToMerge = new int[parameters.length]; for (int p = 0; p < parameters.length; p++) { VariableData parameter = parameters[p]; for (int i = 0; i < methodParams.length; i++) { final PsiParameter methodParam = methodParams[i]; if (parameter.variable.equals(methodParam)) { paramsToMerge[p] = i; break; } } } final Set<PsiTypeParameter> typeParamSet = new HashSet<PsiTypeParameter>(); final PsiTypeVisitor<Object> typeParametersVisitor = new PsiTypeVisitor<Object>() { @Override public Object visitClassType(PsiClassType classType) { final PsiClass referent = classType.resolve(); if (referent instanceof PsiTypeParameter) { typeParamSet.add((PsiTypeParameter)referent); } return super.visitClassType(classType); } }; for (VariableData parameter : parameters) { parameter.type.accept(typeParametersVisitor); } typeParams = new ArrayList<PsiTypeParameter>(typeParamSet); final String qualifiedName = StringUtil.getQualifiedName(packageName, className); final GlobalSearchScope scope = GlobalSearchScope.allScope(myProject); existingClass = JavaPsiFacade.getInstance(myProject).findClass(qualifiedName, scope); } @NotNull protected UsageViewDescriptor createUsageViewDescriptor(UsageInfo[] usageInfos) { return new IntroduceParameterObjectUsageViewDescriptor(method); } @Override protected boolean preprocessUsages(final Ref<UsageInfo[]> refUsages) { MultiMap<PsiElement, String> conflicts = new MultiMap<PsiElement, String>(); if (myUseExistingClass) { if (existingClass == null) { conflicts.putValue(null, RefactorJBundle.message("cannot.perform.the.refactoring") + "Could not find the selected class"); } if (myExistingClassCompatibleConstructor == null) { conflicts.putValue(existingClass, RefactorJBundle.message("cannot.perform.the.refactoring") + "Selected class has no compatible constructors"); } } else { if (existingClass != null) { conflicts.putValue(existingClass, RefactorJBundle.message("cannot.perform.the.refactoring") + RefactorJBundle.message("there.already.exists.a.class.with.the.chosen.name")); } if (myMoveDestination != null) { if (!myMoveDestination.isTargetAccessible(myProject, method.getContainingFile().getVirtualFile())) { conflicts.putValue(method, "Created class won't be accessible"); } } } for (UsageInfo usageInfo : refUsages.get()) { if (usageInfo instanceof FixableUsageInfo) { final String conflictMessage = ((FixableUsageInfo)usageInfo).getConflictMessage(); if (conflictMessage != null) { conflicts.putValue(usageInfo.getElement(), conflictMessage); } } } return showConflicts(conflicts, refUsages.get()); } public void findUsages(@NotNull List<FixableUsageInfo> usages) { if (myUseExistingClass && existingClass != null) { myExistingClassCompatibleConstructor = existingClassIsCompatible(existingClass, parameters); } findUsagesForMethod(method, usages, true); if (myUseExistingClass && existingClass != null && !(paramsNeedingGetters.isEmpty() && paramsNeedingSetters.isEmpty())) { usages.add(new AppendAccessorsUsageInfo(existingClass, myGenerateAccessors, paramsNeedingGetters, paramsNeedingSetters, parameters)); } final PsiMethod[] overridingMethods = OverridingMethodsSearch.search(method, true).toArray(PsiMethod.EMPTY_ARRAY); for (PsiMethod siblingMethod : overridingMethods) { findUsagesForMethod(siblingMethod, usages, false); } if (myNewVisibility != null) { usages.add(new BeanClassVisibilityUsageInfo(existingClass, usages.toArray(new UsageInfo[usages.size()]), myNewVisibility, myExistingClassCompatibleConstructor)); } } private void findUsagesForMethod(PsiMethod overridingMethod, List<FixableUsageInfo> usages, boolean changeSignature) { final PsiCodeBlock body = overridingMethod.getBody(); final String baseParameterName = StringUtil.decapitalize(className); final String fixedParamName = body != null ? JavaCodeStyleManager.getInstance(myProject).suggestUniqueVariableName(baseParameterName, body.getLBrace(), true) : JavaCodeStyleManager.getInstance(myProject).propertyNameToVariableName(baseParameterName, VariableKind.PARAMETER); usages.add(new MergeMethodArguments(overridingMethod, className, packageName, fixedParamName, paramsToMerge, typeParams, keepMethodAsDelegate, myCreateInnerClass ? method.getContainingClass() : null, changeSignature)); final ParamUsageVisitor visitor = new ParamUsageVisitor(overridingMethod, paramsToMerge); overridingMethod.accept(visitor); final Set<PsiReferenceExpression> values = visitor.getParameterUsages(); for (PsiReferenceExpression paramUsage : values) { final PsiParameter parameter = (PsiParameter)paramUsage.resolve(); assert parameter != null; final PsiMethod containingMethod = (PsiMethod)parameter.getDeclarationScope(); final int index = containingMethod.getParameterList().getParameterIndex(parameter); final PsiParameter replacedParameter = method.getParameterList().getParameters()[index]; final ParameterChunk parameterChunk = ParameterChunk.getChunkByParameter(parameter, parameters); @NonNls String getter = parameterChunk != null ? parameterChunk.getter : null; final String paramName = parameterChunk != null ? parameterChunk.parameter.name : replacedParameter.getName(); final PsiType paramType = parameterChunk != null ? parameterChunk.parameter.type : replacedParameter.getType(); if (getter == null) { getter = GenerateMembersUtil.suggestGetterName(paramName, paramType, myProject); paramsNeedingGetters.add(replacedParameter); } @NonNls String setter = parameterChunk != null ? parameterChunk.setter : null; if (setter == null) { setter = GenerateMembersUtil.suggestSetterName(paramName, paramType, myProject); } if (RefactoringUtil.isPlusPlusOrMinusMinus(paramUsage.getParent())) { usages.add(new ReplaceParameterIncrementDecrement(paramUsage, fixedParamName, setter, getter)); if (parameterChunk == null || parameterChunk.setter == null) { paramsNeedingSetters.add(replacedParameter); } } else if (RefactoringUtil.isAssignmentLHS(paramUsage)) { usages.add(new ReplaceParameterAssignmentWithCall(paramUsage, fixedParamName, setter, getter)); if (parameterChunk == null || parameterChunk.setter == null) { paramsNeedingSetters.add(replacedParameter); } } else { usages.add(new ReplaceParameterReferenceWithCall(paramUsage, fixedParamName, getter)); } } } protected void performRefactoring(UsageInfo[] usageInfos) { final PsiClass psiClass = buildClass(); if (psiClass != null) { fixJavadocForConstructor(psiClass); super.performRefactoring(usageInfos); if (!myUseExistingClass) { for (PsiReference reference : ReferencesSearch.search(method)) { final PsiElement place = reference.getElement(); VisibilityUtil.escalateVisibility(psiClass, place); for (PsiMethod constructor : psiClass.getConstructors()) { VisibilityUtil.escalateVisibility(constructor, place); } } } } } private PsiClass buildClass() { if (existingClass != null) { return existingClass; } final ParameterObjectBuilder beanClassBuilder = new ParameterObjectBuilder(); beanClassBuilder.setVisibility(myCreateInnerClass ? PsiModifier.PRIVATE : PsiModifier.PUBLIC); beanClassBuilder.setProject(myProject); beanClassBuilder.setTypeArguments(typeParams); beanClassBuilder.setClassName(className); beanClassBuilder.setPackageName(packageName); for (ParameterChunk parameterChunk : parameters) { final VariableData parameter = parameterChunk.parameter; final boolean setterRequired = paramsNeedingSetters.contains(parameter.variable); beanClassBuilder.addField((PsiParameter)parameter.variable, parameter.name, parameter.type, setterRequired); } final String classString = beanClassBuilder.buildBeanClass(); try { final PsiFileFactory factory = PsiFileFactory.getInstance(method.getProject()); final PsiJavaFile newFile = (PsiJavaFile)factory.createFileFromText(className + ".java", JavaFileType.INSTANCE, classString); if (myCreateInnerClass) { final PsiClass containingClass = method.getContainingClass(); final PsiClass[] classes = newFile.getClasses(); assert classes.length > 0 : classString; final PsiClass innerClass = (PsiClass)containingClass.add(classes[0]); PsiUtil.setModifierProperty(innerClass, PsiModifier.STATIC, true); return (PsiClass)JavaCodeStyleManager.getInstance(newFile.getProject()).shortenClassReferences(innerClass); } else { final PsiFile containingFile = method.getContainingFile(); final PsiDirectory containingDirectory = containingFile.getContainingDirectory(); final PsiDirectory directory; if (myMoveDestination != null) { directory = myMoveDestination.getTargetDirectory(containingDirectory); } else { final Module module = ModuleUtil.findModuleForPsiElement(containingFile); directory = PackageUtil.findOrCreateDirectoryForPackage(module, packageName, containingDirectory, true, true); } if (directory != null) { final CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(method.getManager().getProject()); final PsiElement shortenedFile = JavaCodeStyleManager.getInstance(newFile.getProject()).shortenClassReferences(newFile); final PsiElement reformattedFile = codeStyleManager.reformat(shortenedFile); return ((PsiJavaFile)directory.add(reformattedFile)).getClasses()[0]; } } } catch (IncorrectOperationException e) { logger.info(e); } return null; } private void fixJavadocForConstructor(PsiClass psiClass) { final PsiDocComment docComment = method.getDocComment(); if (docComment != null) { final List<PsiDocTag> mergedTags = new ArrayList<PsiDocTag>(); final PsiDocTag[] paramTags = docComment.findTagsByName("param"); for (PsiDocTag paramTag : paramTags) { final PsiElement[] dataElements = paramTag.getDataElements(); if (dataElements.length > 0) { if (dataElements[0] instanceof PsiDocParamRef) { final PsiReference reference = dataElements[0].getReference(); if (reference != null) { final PsiElement resolve = reference.resolve(); if (resolve instanceof PsiParameter) { final int parameterIndex = method.getParameterList().getParameterIndex((PsiParameter)resolve); if (ArrayUtil.find(paramsToMerge, parameterIndex) < 0) continue; } } } mergedTags.add((PsiDocTag)paramTag.copy()); } } PsiMethod compatibleParamObjectConstructor = null; if (myExistingClassCompatibleConstructor != null && myExistingClassCompatibleConstructor.getDocComment() == null) { compatibleParamObjectConstructor = myExistingClassCompatibleConstructor; } else if (!myUseExistingClass){ compatibleParamObjectConstructor = psiClass.getConstructors()[0]; } if (compatibleParamObjectConstructor != null) { PsiDocComment psiDocComment = JavaPsiFacade.getElementFactory(myProject).createDocCommentFromText("/**\n*/"); psiDocComment = (PsiDocComment)compatibleParamObjectConstructor.addBefore(psiDocComment, compatibleParamObjectConstructor.getFirstChild()); for (PsiDocTag tag : mergedTags) { psiDocComment.add(tag); } } } } protected String getCommandName() { final PsiClass containingClass = method.getContainingClass(); return RefactorJBundle.message("introduced.parameter.class.command.name", className, containingClass.getName(), method.getName()); } private static class ParamUsageVisitor extends JavaRecursiveElementVisitor { private final Set<PsiParameter> paramsToMerge = new HashSet<PsiParameter>(); private final Set<PsiReferenceExpression> parameterUsages = new HashSet<PsiReferenceExpression>(4); ParamUsageVisitor(PsiMethod method, int[] paramIndicesToMerge) { super(); final PsiParameterList paramList = method.getParameterList(); final PsiParameter[] parameters = paramList.getParameters(); for (int i : paramIndicesToMerge) { paramsToMerge.add(parameters[i]); } } public void visitReferenceExpression(PsiReferenceExpression expression) { super.visitReferenceExpression(expression); final PsiElement referent = expression.resolve(); if (!(referent instanceof PsiParameter)) { return; } final PsiParameter parameter = (PsiParameter)referent; if (paramsToMerge.contains(parameter)) { parameterUsages.add(expression); } } public Set<PsiReferenceExpression> getParameterUsages() { return parameterUsages; } } @Nullable private static PsiMethod existingClassIsCompatible(PsiClass aClass, List<ParameterChunk> params) { if (params.size() == 1) { final ParameterChunk parameterChunk = params.get(0); final PsiType paramType = parameterChunk.parameter.type; if (TypeConversionUtil.isPrimitiveWrapper(aClass.getQualifiedName())) { parameterChunk.setField(aClass.findFieldByName("value", false)); parameterChunk.setGetter(paramType.getCanonicalText() + "Value"); for (PsiMethod constructor : aClass.getConstructors()) { if (constructorIsCompatible(constructor, params)) return constructor; } } } final PsiMethod[] constructors = aClass.getConstructors(); PsiMethod compatibleConstructor = null; for (PsiMethod constructor : constructors) { if (constructorIsCompatible(constructor, params)) { compatibleConstructor = constructor; break; } } if (compatibleConstructor == null) { return null; } final PsiParameterList parameterList = compatibleConstructor.getParameterList(); final PsiParameter[] constructorParams = parameterList.getParameters(); for (int i = 0; i < constructorParams.length; i++) { final PsiParameter param = constructorParams[i]; final ParameterChunk parameterChunk = params.get(i); final PsiField field = findFieldAssigned(param, compatibleConstructor); if (field == null) { return null; } parameterChunk.setField(field); final PsiMethod getterForField = PropertyUtil.findGetterForField(field); if (getterForField != null) { parameterChunk.setGetter(getterForField.getName()); } final PsiMethod setterForField = PropertyUtil.findSetterForField(field); if (setterForField != null) { parameterChunk.setSetter(setterForField.getName()); } } return compatibleConstructor; } private static boolean constructorIsCompatible(PsiMethod constructor, List<ParameterChunk> params) { final PsiParameterList parameterList = constructor.getParameterList(); final PsiParameter[] constructorParams = parameterList.getParameters(); if (constructorParams.length != params.size()) { return false; } for (int i = 0; i < constructorParams.length; i++) { if (!TypeConversionUtil.isAssignable(constructorParams[i].getType(), params.get(i).parameter.type)) { return false; } } return true; } public static class ParameterChunk { private final VariableData parameter; private PsiField field; private String getter; private String setter; public ParameterChunk(VariableData parameter) { this.parameter = parameter; } public void setField(PsiField field) { this.field = field; } public void setGetter(String getter) { this.getter = getter; } public void setSetter(String setter) { this.setter = setter; } @Nullable public PsiField getField() { return field; } @Nullable public static ParameterChunk getChunkByParameter(PsiParameter param, List<ParameterChunk> params) { for (ParameterChunk chunk : params) { if (chunk.parameter.variable.equals(param)) { return chunk; } } return null; } } private static PsiField findFieldAssigned(PsiParameter param, PsiMethod constructor) { final ParamAssignmentFinder visitor = new ParamAssignmentFinder(param); constructor.accept(visitor); return visitor.getFieldAssigned(); } private static class ParamAssignmentFinder extends JavaRecursiveElementWalkingVisitor { private final PsiParameter param; private PsiField fieldAssigned = null; ParamAssignmentFinder(PsiParameter param) { this.param = param; } public void visitAssignmentExpression(PsiAssignmentExpression assignment) { super.visitAssignmentExpression(assignment); final PsiExpression lhs = assignment.getLExpression(); final PsiExpression rhs = assignment.getRExpression(); if (!(lhs instanceof PsiReferenceExpression)) { return; } if (!(rhs instanceof PsiReferenceExpression)) { return; } final PsiElement referent = ((PsiReference)rhs).resolve(); if (referent == null || !referent.equals(param)) { return; } final PsiElement assigned = ((PsiReference)lhs).resolve(); if (assigned == null || !(assigned instanceof PsiField)) { return; } fieldAssigned = (PsiField)assigned; } public PsiField getFieldAssigned() { return fieldAssigned; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.om.typecomputer.impl; import org.apache.asterix.om.exceptions.IncompatibleTypeException; import org.apache.asterix.om.typecomputer.base.AbstractResultTypeComputer; import org.apache.asterix.om.types.ATypeTag; import org.apache.asterix.om.types.AUnionType; import org.apache.asterix.om.types.BuiltinType; import org.apache.asterix.om.types.IAType; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression; import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression; public class NumericAddSubMulDivTypeComputer extends AbstractResultTypeComputer { /** * For those functions that do not return NULL if both arguments are not NULL */ public static final NumericAddSubMulDivTypeComputer INSTANCE = new NumericAddSubMulDivTypeComputer(false); /** * For those functions that may return NULL even if both arguments are not NULL (e.g. division by zero) */ public static final NumericAddSubMulDivTypeComputer INSTANCE_NULLABLE = new NumericAddSubMulDivTypeComputer(true); private final boolean nullable; private NumericAddSubMulDivTypeComputer(boolean nullable) { this.nullable = nullable; } @Override protected IAType getResultType(ILogicalExpression expr, IAType... strippedInputTypes) throws AlgebricksException { AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr; String funcName = funcExpr.getFunctionIdentifier().getName(); IAType t1 = strippedInputTypes[0]; IAType t2 = strippedInputTypes[1]; ATypeTag tag1 = t1.getTypeTag(); ATypeTag tag2 = t2.getTypeTag(); IAType type; switch (tag1) { case DOUBLE: switch (tag2) { case TINYINT: case SMALLINT: case INTEGER: case BIGINT: case FLOAT: case DOUBLE: type = BuiltinType.ADOUBLE; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case FLOAT: switch (tag2) { case TINYINT: case SMALLINT: case INTEGER: case BIGINT: case FLOAT: type = BuiltinType.AFLOAT; break; case DOUBLE: type = BuiltinType.ADOUBLE; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case BIGINT: switch (tag2) { case TINYINT: case SMALLINT: case INTEGER: case BIGINT: type = BuiltinType.AINT64; break; case FLOAT: type = BuiltinType.AFLOAT; break; case DOUBLE: type = BuiltinType.ADOUBLE; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case INTEGER: switch (tag2) { case TINYINT: case SMALLINT: case INTEGER: type = BuiltinType.AINT32; break; case BIGINT: type = BuiltinType.AINT64; break; case FLOAT: type = BuiltinType.AFLOAT; break; case DOUBLE: type = BuiltinType.ADOUBLE; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case SMALLINT: switch (tag2) { case TINYINT: case SMALLINT: type = BuiltinType.AINT16; break; case INTEGER: type = BuiltinType.AINT32; break; case BIGINT: type = BuiltinType.AINT64; break; case FLOAT: type = BuiltinType.AFLOAT; break; case DOUBLE: type = BuiltinType.ADOUBLE; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case TINYINT: switch (tag2) { case TINYINT: type = BuiltinType.AINT8; break; case SMALLINT: type = BuiltinType.AINT16; break; case INTEGER: type = BuiltinType.AINT32; break; case BIGINT: type = BuiltinType.AINT64; break; case FLOAT: type = BuiltinType.AFLOAT; break; case DOUBLE: type = BuiltinType.ADOUBLE; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case ANY: switch (tag2) { case TINYINT: case SMALLINT: case INTEGER: case BIGINT: case FLOAT: case ANY: case DOUBLE: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case DATE: switch (tag2) { case DATE: type = BuiltinType.ADURATION; break; case YEARMONTHDURATION: case DAYTIMEDURATION: case DURATION: type = BuiltinType.ADATE; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case TIME: switch (tag2) { case TIME: type = BuiltinType.ADURATION; break; case YEARMONTHDURATION: case DAYTIMEDURATION: case DURATION: type = BuiltinType.ATIME; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case DATETIME: switch (tag2) { case DATETIME: type = BuiltinType.ADURATION; break; case YEARMONTHDURATION: case DAYTIMEDURATION: case DURATION: type = BuiltinType.ADATETIME; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case DURATION: switch (tag2) { case DATE: type = BuiltinType.ADATE; break; case TIME: type = BuiltinType.ATIME; break; case DATETIME: type = BuiltinType.ADATETIME; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case YEARMONTHDURATION: switch (tag2) { case DATE: type = BuiltinType.ADATE; break; case TIME: type = BuiltinType.ATIME; break; case DATETIME: type = BuiltinType.ADATETIME; break; case YEARMONTHDURATION: type = BuiltinType.AYEARMONTHDURATION; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; case DAYTIMEDURATION: switch (tag2) { case DATE: type = BuiltinType.ADATE; break; case TIME: type = BuiltinType.ATIME; break; case DATETIME: type = BuiltinType.ADATETIME; break; case DAYTIMEDURATION: type = BuiltinType.ADAYTIMEDURATION; break; case ANY: type = BuiltinType.ANY; break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } break; default: throw new IncompatibleTypeException(funcExpr.getSourceLocation(), funcName, tag1, tag2); } if (nullable && type.getTypeTag() != ATypeTag.ANY) { type = AUnionType.createNullableType(type); } return type; } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.gemstone.gemfire.cache; import com.gemstone.gemfire.cache.query.*; import com.gemstone.gemfire.cache.util.*; import com.gemstone.gemfire.distributed.*; import com.gemstone.gemfire.internal.cache.GemFireCacheImpl; import com.gemstone.gemfire.internal.cache.CachePerfStats; import com.gemstone.gemfire.internal.cache.LocalRegion; import com.gemstone.gemfire.internal.cache.tier.sockets.ClientProxyMembershipID; import io.snappydata.test.dunit.DistributedTestBase; import io.snappydata.test.dunit.DistributedTestBase.WaitCriterion; import junit.framework.TestCase; import java.util.*; /** * Unit test for basic DataPolicy.EMPTY feature. * NOTE: these tests using a loner DistributedSystem and local scope regions * @author Darrel Schneider * @since 5.0 */ public class ProxyJUnitTest extends TestCase { DistributedSystem ds; Cache c; public void setUp() throws Exception { super.setUp(); Properties p = new Properties(); p.setProperty("mcast-port", "0"); p.setProperty("locators", ""); this.ds = DistributedSystem.connect(p); this.c = CacheFactory.create(this.ds); } public void tearDown() throws Exception { if (this.c != null) { this.c.close(); this.c = null; } if (this.ds != null) { this.ds.disconnect(); this.ds = null; } super.tearDown(); } private CachePerfStats getStats() { return ((GemFireCacheImpl)this.c).getCachePerfStats(); } /** * last event a cache listener saw */ public CacheEvent clLastEvent; /** * number of cache listener invocations */ public int clInvokeCount; /** * true if cache listener close called */ public boolean clClosed; /** * last event a cache writer saw */ public CacheEvent cwLastEvent; /** * number of cache writer invocations */ public int cwInvokeCount; /** * true if cache writer close called */ public boolean cwClosed; /** * last getEvents() a transaction listener saw */ public List tlLastEvents; /** * number of transaction listener invocations */ public int tlInvokeCount; /** * true if transaction listener close called */ public boolean tlClosed; /** * Clears the all the callback state this test has received. */ private void clearCallbackState() { this.clLastEvent = null; this.clInvokeCount = 0; this.clClosed = false; this.cwLastEvent = null; this.cwInvokeCount = 0; this.cwClosed = false; this.tlLastEvents = null; this.tlInvokeCount = 0; this.tlClosed = false; } /** * Used to check to see if CacheEvent was what was expected */ protected abstract class ExpectedCacheEvent implements CacheEvent { public Region r; public Operation op; public Object cbArg; public boolean queued; public void check(CacheEvent other) { if (getRegion() != other.getRegion()) { fail("wrong region. Expected " + getRegion() + " but found " + other.getRegion()); } assertEquals(getOperation(), other.getOperation()); assertEquals(getCallbackArgument(), other.getCallbackArgument()); assertEquals(isOriginRemote(), other.isOriginRemote()); assertEquals(getDistributedMember(), other.getDistributedMember()); assertEquals(isExpiration(), other.isExpiration()); assertEquals(isDistributed(), other.isDistributed()); } public Region getRegion() { return this.r; } public Operation getOperation() { return this.op; } public Object getCallbackArgument() { return this.cbArg; } public boolean isCallbackArgumentAvailable() {return true;} public boolean isOriginRemote() { return false; } public DistributedMember getDistributedMember() { return c.getDistributedSystem().getDistributedMember(); } public boolean isExpiration() { return this.op.isExpiration(); } public boolean isDistributed() { return this.op.isDistributed(); } } /** * Used to check to see if EntryEvent was what was expected */ protected class ExpectedEntryEvent extends ExpectedCacheEvent implements EntryEvent { public void check(EntryEvent other) { super.check(other); assertEquals(getKey(), other.getKey()); assertEquals(getOldValue(), other.getOldValue()); assertEquals(getNewValue(), other.getNewValue()); assertEquals(isLocalLoad(), other.isLocalLoad()); assertEquals(isNetLoad(), other.isNetLoad()); assertEquals(isLoad(), other.isLoad()); assertEquals(isNetSearch(), other.isNetSearch()); assertEquals(getTransactionId(), other.getTransactionId()); } public Object key; public Object getKey() { return this.key; } public Object getOldValue() { return null; } public boolean isOldValueAvailable() {return true;} public Object newValue; public Object getNewValue() { return this.newValue; } public boolean isLocalLoad() { return getOperation().isLocalLoad(); } public boolean isNetLoad() { return getOperation().isNetLoad(); } public boolean isLoad() { return getOperation().isLoad(); } public boolean isNetSearch() { return getOperation().isNetSearch(); } public TransactionId txId; public TransactionId getTransactionId() { return this.txId; } public boolean isBridgeEvent() { return hasClientOrigin(); } public boolean hasClientOrigin() { return false; } public ClientProxyMembershipID getContext() { // TODO Auto-generated method stub return null; } public SerializedCacheValue getSerializedOldValue() { return null; } public SerializedCacheValue getSerializedNewValue() { return null; } } /** * Used to check to see if EntryEvent was what was expected */ protected class ExpectedRegionEvent extends ExpectedCacheEvent implements RegionEvent { public void check(RegionEvent other) { super.check(other); assertEquals(isReinitializing(), other.isReinitializing()); } public boolean isReinitializing() { return false; } public DiskAccessException getDiskException() { return null; } } private void checkCWClosed() { assertEquals(true, this.cwClosed); } private void checkCLClosed() { assertEquals(true, this.clClosed); } private void checkTLClosed() { assertEquals(true, this.tlClosed); } private void checkNoCW() { assertEquals(0, this.cwInvokeCount); } private void checkNoCL() { assertEquals(0, this.clInvokeCount); } private void checkNoTL() { assertEquals(0, this.tlInvokeCount); } private void checkTL(ExpectedCacheEvent expected) { assertEquals(1, this.tlInvokeCount); assertEquals(1, this.tlLastEvents.size()); { Object old_CA = expected.cbArg; //expected.cbArg = null; try { expected.check((CacheEvent)this.tlLastEvents.get(0)); } finally { expected.cbArg = old_CA; } } checkNoCW(); //checkNoCL(); clearCallbackState(); } private void checkCW(ExpectedCacheEvent expected) { assertEquals(1, this.cwInvokeCount); expected.check(this.cwLastEvent); } private void checkCL(ExpectedCacheEvent expected){ checkCL(expected, true); } private void checkCL(ExpectedCacheEvent expected, boolean clearCallbackState) { assertEquals(1, this.clInvokeCount); expected.check(this.clLastEvent); if(clearCallbackState){ clearCallbackState(); } } private void setCallbacks(AttributesFactory af) { CacheListener cl1 = new CacheListener() { public void afterUpdate(EntryEvent e) { clLastEvent = e; clInvokeCount++; } public void afterCreate(EntryEvent e) { clLastEvent = e; clInvokeCount++; } public void afterInvalidate(EntryEvent e) { clLastEvent = e; clInvokeCount++; } public void afterDestroy(EntryEvent e) { clLastEvent = e; clInvokeCount++; } public void afterRegionInvalidate(RegionEvent e) { clLastEvent = e; clInvokeCount++; } public void afterRegionDestroy(RegionEvent e) { clLastEvent = e; clInvokeCount++; } public void afterRegionClear(RegionEvent e) { clLastEvent = e; clInvokeCount++; } public void afterRegionCreate(RegionEvent e) { clLastEvent = e; clInvokeCount++; } public void afterRegionLive(RegionEvent e) { clLastEvent = e; clInvokeCount++; } public void close() { clClosed = true; } }; CacheWriter cw = new CacheWriter() { public void beforeUpdate(EntryEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } public void beforeCreate(EntryEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } public void beforeDestroy(EntryEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } public void beforeRegionDestroy(RegionEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } public void beforeRegionClear(RegionEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } public void close() { cwClosed = true; } }; af.addCacheListener(cl1); af.setCacheWriter(cw); { TransactionListener tl = new TransactionListenerAdapter() { public void afterCommit(TransactionEvent e) { tlLastEvents = e.getEvents(); tlInvokeCount++; } public void close() { tlClosed = true; }; }; CacheTransactionManager ctm = this.c.getCacheTransactionManager(); ctm.addListener(tl); } } /** * Confirms region (non-map) methods */ public void testRegionMethods() throws Exception { Object cbArg = new Object(); AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); setCallbacks(af); clearCallbackState(); ExpectedRegionEvent expre = new ExpectedRegionEvent(); assertEquals(0, getStats().getRegions()); Region r = this.c.createRegion("r", af.create()); assertEquals(1, getStats().getRegions()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); assertEquals("r", r.getName()); assertEquals("/r", r.getFullPath()); assertEquals(null, r.getParentRegion()); assertEquals(DataPolicy.EMPTY, r.getAttributes().getDataPolicy()); r.getAttributesMutator(); try { r.getStatistics(); fail ("expected StatisticsDisabledException"); } catch (StatisticsDisabledException expected) { // because they were not enabled in the region attributes } r.invalidateRegion(); expre.op = Operation.REGION_INVALIDATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.invalidateRegion(cbArg); expre.cbArg = cbArg; checkNoCW(); checkCL(expre); r.localInvalidateRegion(); expre.op = Operation.REGION_LOCAL_INVALIDATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.localInvalidateRegion(cbArg); expre.cbArg = cbArg; checkNoCW(); checkCL(expre); r.destroyRegion(); assertEquals(true, r.isDestroyed()); assertEquals(0, getStats().getRegions()); expre.op = Operation.REGION_DESTROY; expre.cbArg = null; checkCW(expre); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.destroyRegion(cbArg); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_DESTROY; expre.cbArg = cbArg; checkCW(expre); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.localDestroyRegion(); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_LOCAL_DESTROY; expre.cbArg = null; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.localDestroyRegion(cbArg); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_LOCAL_DESTROY; expre.cbArg = cbArg; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.close(); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_CLOSE; expre.cbArg = null; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); r = this.c.createRegion("r", af.create()); assertEquals(1, getStats().getRegions()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); try { r.saveSnapshot(System.out); fail("expected UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { } try { r.loadSnapshot(System.in); fail("expected UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { } { Region sr = r.createSubregion("sr", af.create()); assertEquals(2, getStats().getRegions()); expre.r = sr; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); assertEquals("sr", sr.getName()); assertEquals("/r/sr", sr.getFullPath()); assertEquals(r, sr.getParentRegion()); assertEquals(sr, r.getSubregion("sr")); assertEquals(Collections.singleton(sr), r.subregions(false)); sr.close(); assertEquals(1, getStats().getRegions()); expre.op = Operation.REGION_CLOSE; expre.cbArg = null; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); assertEquals(true, sr.isDestroyed()); assertEquals(null, r.getSubregion("sr")); assertEquals(Collections.EMPTY_SET, r.subregions(false)); } ExpectedEntryEvent expee = new ExpectedEntryEvent(); expee.r = r; expee.key = "key"; int creates = getStats().getCreates(); // int puts = getStats().getPuts(); // int updates = getStats().getUpdates(); int destroys = getStats().getDestroys(); int invalidates = getStats().getInvalidates(); int gets = getStats().getGets(); int misses = getStats().getMisses(); r.put("key", "value", cbArg); expee.op = Operation.CREATE; creates++; assertEquals(creates, getStats().getCreates()); expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkCL(expee); // note on a non-proxy region create after put fails with EntryExistsException r.create("key", "value", cbArg); creates++; assertEquals(creates, getStats().getCreates()); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkCL(expee); assertEquals(null, r.getEntry("key")); assertEquals(null, r.get("key", cbArg)); gets++; assertEquals(gets, getStats().getGets()); misses++; assertEquals(misses, getStats().getMisses()); checkNoCW(); checkNoCL(); r.invalidate("key"); invalidates++; assertEquals(invalidates, getStats().getInvalidates()); expee.op = Operation.INVALIDATE; expee.cbArg = null; expee.newValue = null; checkNoCW(); checkCL(expee); r.invalidate("key", cbArg); invalidates++; assertEquals(invalidates, getStats().getInvalidates()); expee.op = Operation.INVALIDATE; expee.cbArg = cbArg; expee.newValue = null; checkNoCW(); checkCL(expee); try { r.localInvalidate("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } try { r.localInvalidate("key", cbArg); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } assertEquals(invalidates, getStats().getInvalidates()); checkNoCW(); checkNoCL(); r.destroy("key"); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; expee.cbArg = null; expee.newValue = null; checkCW(expee); checkCL(expee); r.destroy("key", cbArg); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; expee.cbArg = cbArg; expee.newValue = null; checkCW(expee); checkCL(expee); try { r.localDestroy("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } try { r.localDestroy("key", cbArg); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } assertEquals(destroys, getStats().getDestroys()); checkNoCW(); checkNoCL(); assertEquals(Collections.EMPTY_SET, r.keys()); assertEquals(Collections.EMPTY_SET, r.entries(true)); assertEquals(Collections.EMPTY_SET, r.entrySet(true)); assertEquals(this.c, r.getCache()); r.setUserAttribute(cbArg); assertEquals(cbArg, r.getUserAttribute()); checkNoCW(); checkNoCL(); r.put("key", "value", cbArg); creates++; assertEquals(creates, getStats().getCreates()); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkCL(expee); assertEquals(false, r.containsValueForKey("key")); assertEquals(false, r.existsValue("this = 'value'")); { SelectResults sr = r.query("this = 'value'"); assertEquals(Collections.EMPTY_SET, sr.asSet()); } assertEquals(null, r.selectValue("this = 'value'")); try { r.getRegionDistributedLock(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not global } try { r.getDistributedLock("key"); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not global } try { r.becomeLockGrantor(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not global } try { r.writeToDisk(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not configured for disk } checkNoCW(); checkNoCL(); // check to see if a local loader works { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) throws CacheLoaderException { return "loadedValue"; } public void close() { } }; r.getAttributesMutator().setCacheLoader(cl); r.get("key", cbArg); gets++; assertEquals(gets, getStats().getGets()); misses++; assertEquals(misses, getStats().getMisses()); expee.op = Operation.LOCAL_LOAD_CREATE; expee.newValue = "loadedValue"; checkCW(expee); checkCL(expee); r.getAttributesMutator().setCacheLoader(null); } } /** * Confirms map methods */ public void testMapMethods() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); setCallbacks(af); clearCallbackState(); ExpectedRegionEvent expre = new ExpectedRegionEvent(); Region r = this.c.createRegion("r", af.create()); expre.r = r; expre.cbArg = null; expre.op = Operation.REGION_CREATE; checkNoCW(); checkCL(expre); int creates = getStats().getCreates(); // int puts = getStats().getPuts(); // int updates = getStats().getUpdates(); int destroys = getStats().getDestroys(); // int invalidates = getStats().getInvalidates(); int gets = getStats().getGets(); int misses = getStats().getMisses(); ExpectedEntryEvent expee = new ExpectedEntryEvent(); expee.r = r; expee.key = "key"; expee.cbArg = null; assertEquals(null, r.put("key", "value")); creates++; assertEquals(creates, getStats().getCreates()); expee.op = Operation.CREATE; expee.newValue = "value"; checkCW(expee); checkCL(expee); { HashMap m = new HashMap(); m.put("k1", "v1"); m.put("k2", "v2"); r.putAll(m); assertEquals(0, r.size()); // @todo darrel: check events assertEquals(2, this.cwInvokeCount); assertEquals(2, this.clInvokeCount); clearCallbackState(); creates += 2; assertEquals(creates, getStats().getCreates()); } assertEquals(false, r.containsKey("key")); assertEquals(false, r.containsValue("value")); assertEquals(Collections.EMPTY_SET, r.entrySet()); assertEquals(true, r.isEmpty()); assertEquals(Collections.EMPTY_SET, r.keySet()); assertEquals(0, r.size()); assertEquals(Collections.EMPTY_LIST, new ArrayList(r.values())); checkNoCW(); checkNoCL(); assertEquals(null, r.get("key")); gets++; assertEquals(gets, getStats().getGets()); misses++; assertEquals(misses, getStats().getMisses()); checkNoCW(); checkNoCL(); assertEquals(null, r.remove("key")); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; expee.key = "key"; expee.newValue = null; checkCW(expee); checkCL(expee); r.localClear(); expre.op = Operation.REGION_LOCAL_CLEAR; checkNoCW(); checkCL(expre); r.clear(); expre.op = Operation.REGION_CLEAR; checkCW(expre); checkCL(expre); } /** * Check region ops on a proxy region done from a tx. */ public void testAllMethodsWithTX() throws Exception { Object cbArg = new Object(); AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); setCallbacks(af); clearCallbackState(); CacheTransactionManager ctm = this.c.getCacheTransactionManager(); ExpectedRegionEvent expre = new ExpectedRegionEvent(); Region r = this.c.createRegion("r", af.create()); expre.r = r; expre.cbArg = null; expre.op = Operation.REGION_CREATE; checkNoCW(); checkNoTL(); checkCL(expre); int creates = getStats().getCreates(); // int puts = getStats().getPuts(); // int updates = getStats().getUpdates(); int destroys = getStats().getDestroys(); int invalidates = getStats().getInvalidates(); // int gets = getStats().getGets(); // int misses = getStats().getMisses(); ExpectedEntryEvent expee = new ExpectedEntryEvent(); expee.r = r; expee.key = "key"; ctm.begin(); try { r.localInvalidate("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } try { r.localDestroy("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } ctm.rollback(); ctm.begin(); expee.txId = ctm.getTransactionId(); r.put("key", "value", cbArg); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/*ClearCallbacks*/); checkTL(expee); creates++; assertEquals(creates, getStats().getCreates()); ctm.begin(); expee.txId = ctm.getTransactionId(); r.create("key", "value", cbArg); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/*ClearCallbacks*/); checkTL(expee); creates++; assertEquals(creates, getStats().getCreates()); ctm.begin(); expee.txId = ctm.getTransactionId(); r.invalidate("key", cbArg); expee.op = Operation.INVALIDATE; expee.cbArg = cbArg; expee.newValue = null; checkNoCW(); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/*ClearCallbacks*/); invalidates++; assertEquals(invalidates, getStats().getInvalidates()); checkTL(expee); ctm.begin(); expee.txId = ctm.getTransactionId(); r.destroy("key", cbArg); expee.op = Operation.DESTROY; expee.cbArg = cbArg; expee.newValue = null; checkCW(expee); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/*ClearCallbacks*/); destroys++; assertEquals(destroys, getStats().getDestroys()); checkTL(expee); ctm.begin(); expee.txId = ctm.getTransactionId(); r.create("key", "value", cbArg); r.destroy("key", cbArg); clearCallbackState(); ctm.commit(); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; checkTL(expee); // the following confirms that bug 37903 is fixed ctm.begin(); expee.txId = ctm.getTransactionId(); r.invalidate("key"); r.localInvalidate("key"); r.localDestroy("key", cbArg); // note that the following would fail on a non-proxy with EntryNotFound // so it should also fail on a proxy try { // note if bug 37903 exists then the next line will throw an AssertionError r.destroy("key", cbArg); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } clearCallbackState(); ctm.commit(); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.LOCAL_DESTROY; checkTL(expee); } /** * Make sure a proxy region can be lru and that it makes no difference * since proxies are always empty */ public void testLRU() throws Exception { AttributesFactory af = new AttributesFactory(); af.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(1)); CacheListener cl1 = new CacheListenerAdapter() { public void afterDestroy(EntryEvent e) { clInvokeCount++; } }; af.addCacheListener(cl1); // // first make sure this test is valid by confirming we see evictions // // on a non-proxy lru // { // af.setDataPolicy(DataPolicy.NORMAL); // Region r = this.c.createRegion("rLRU", af.create()); // clearCallbackState(); // assertTrue(clInvokeCount == 0); // for (int i=0; i < 10; i++) { // r.put("key" + i, "value" + i); // } // assertTrue(clInvokeCount > 0); // } // now try it with a proxy region which should never to do an eviction. { af.setDataPolicy(DataPolicy.EMPTY); try { af.create(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { } // Region r = this.c.createRegion("rEMPTY", af.create()); // clearCallbackState(); // assertTrue(clInvokeCount == 0); // for (int i=0; i < 10; i++) { // r.put("key" + i, "value" + i); // } // assertTrue(clInvokeCount == 0); } } /** * Make sure a proxy region expiration behaves as expected */ public void testExpiration() throws Exception { System.setProperty(LocalRegion.EXPIRY_MS_PROPERTY, "true"); try { // now make sure they don't on proxy { AttributesFactory af = new AttributesFactory(); af.setStatisticsEnabled(true); af.setEntryIdleTimeout(new ExpirationAttributes(1, ExpirationAction.LOCAL_INVALIDATE)); af.setEntryTimeToLive(new ExpirationAttributes(2, ExpirationAction.LOCAL_DESTROY)); af.setDataPolicy(DataPolicy.EMPTY); try { af.create(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { } } // make sure regionIdleTimeout works on proxy { CacheListener cl1 = new CacheListenerAdapter() { public void afterRegionDestroy(RegionEvent e) { clInvokeCount++; } public void afterRegionInvalidate(RegionEvent e) { clInvokeCount++; } }; AttributesFactory af = new AttributesFactory(); af.setStatisticsEnabled(true); final int EXPIRE_MS = 500; af.setRegionIdleTimeout(new ExpirationAttributes(EXPIRE_MS, ExpirationAction.LOCAL_DESTROY)); af.addCacheListener(cl1); af.setDataPolicy(DataPolicy.EMPTY); clearCallbackState(); Region r = this.c.createRegion("rEMPTY", af.create()); assertTrue(clInvokeCount == 0); r.put("key", "value"); long endTime = System.currentTimeMillis() + (EXPIRE_MS * 2); do { r.get("key"); } while (System.currentTimeMillis() < endTime); assertEquals(0, this.clInvokeCount); Thread.sleep(EXPIRE_MS * 2); WaitCriterion ev = new WaitCriterion() { public boolean done() { return ProxyJUnitTest.this.clInvokeCount == 1; } public String description() { return "waiting for invocation"; } }; DistributedTestBase.waitForCriterion(ev, 1000, 200, true); } // make sure regionTimeToLive works on proxy { CacheListener cl1 = new CacheListenerAdapter() { public void afterRegionDestroy(RegionEvent e) { clInvokeCount++; } public void afterRegionInvalidate(RegionEvent e) { clInvokeCount++; } }; AttributesFactory af = new AttributesFactory(); af.setStatisticsEnabled(true); final int EXPIRE_MS = 500; af.setRegionTimeToLive(new ExpirationAttributes(EXPIRE_MS, ExpirationAction.LOCAL_DESTROY)); af.addCacheListener(cl1); af.setDataPolicy(DataPolicy.EMPTY); clearCallbackState(); Region r = this.c.createRegion("rEMPTY", af.create()); assertTrue(clInvokeCount == 0); r.put("key", "value"); long endTime = System.currentTimeMillis() + (EXPIRE_MS * 2); do { r.put("key", "value"); } while (System.currentTimeMillis() < endTime); assertEquals(0, this.clInvokeCount); Thread.sleep(EXPIRE_MS * 2); WaitCriterion ev = new WaitCriterion() { public boolean done() { return ProxyJUnitTest.this.clInvokeCount > 0; } public String description() { return "waiting for invocation"; } }; DistributedTestBase.waitForCriterion(ev, 1000, 200, true); } } finally { System.getProperties().remove(LocalRegion.EXPIRY_MS_PROPERTY); assertEquals(null, System.getProperty(LocalRegion.EXPIRY_MS_PROPERTY)); } } /** * Make sure a disk region and proxy play nice. */ public void testDiskProxy() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); af.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(1, EvictionAction.OVERFLOW_TO_DISK)); try { af.create(); fail ("expected IllegalStateException"); } catch (IllegalStateException expected) { } } /** * Make sure a CachStatistics work on proxy */ public void testCacheStatisticsOnProxy() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); af.setStatisticsEnabled(true); Region r = this.c.createRegion("rEMPTY", af.create()); CacheStatistics stats = r.getStatistics(); long lastModifiedTime = stats.getLastModifiedTime(); long lastAccessedTime = stats.getLastAccessedTime(); waitForSystemTimeChange(); r.put("k", "v"); assertTrue(lastModifiedTime != stats.getLastModifiedTime()); assertTrue(lastAccessedTime != stats.getLastAccessedTime()); lastModifiedTime = stats.getLastModifiedTime(); lastAccessedTime = stats.getLastAccessedTime(); waitForSystemTimeChange(); r.create("k", "v"); assertTrue(lastModifiedTime != stats.getLastModifiedTime()); assertTrue(lastAccessedTime != stats.getLastAccessedTime()); lastModifiedTime = stats.getLastModifiedTime(); lastAccessedTime = stats.getLastAccessedTime(); long missCount = stats.getMissCount(); long hitCount = stats.getHitCount(); waitForSystemTimeChange(); r.get("k"); assertEquals(lastModifiedTime, stats.getLastModifiedTime()); assertTrue(lastAccessedTime != stats.getLastAccessedTime()); assertEquals(hitCount, stats.getHitCount()); assertEquals(missCount+1, stats.getMissCount()); } /** * Waits (hot) until the system time changes. */ private void waitForSystemTimeChange() { long start = System.currentTimeMillis(); while (System.currentTimeMillis() == start); } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opensource.component.nineoldandroids.animation; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.content.res.XmlResourceParser; import android.content.res.Resources.NotFoundException; import android.util.AttributeSet; import android.util.TypedValue; import android.util.Xml; import android.view.animation.AnimationUtils; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.util.ArrayList; /** * This class is used to instantiate animator XML files into Animator objects. * <p> * For performance reasons, inflation relies heavily on pre-processing of * XML files that is done at build time. Therefore, it is not currently possible * to use this inflater with an XmlPullParser over a plain XML file at runtime; * it only works with an XmlPullParser returned from a compiled resource (R. * <em>something</em> file.) */ public class AnimatorInflater { private static final int[] AnimatorSet = new int[] { /* 0 */ android.R.attr.ordering, }; private static final int AnimatorSet_ordering = 0; private static final int[] PropertyAnimator = new int[] { /* 0 */ android.R.attr.propertyName, }; private static final int PropertyAnimator_propertyName = 0; private static final int[] Animator = new int[] { /* 0 */ android.R.attr.interpolator, /* 1 */ android.R.attr.duration, /* 2 */ android.R.attr.startOffset, /* 3 */ android.R.attr.repeatCount, /* 4 */ android.R.attr.repeatMode, /* 5 */ android.R.attr.valueFrom, /* 6 */ android.R.attr.valueTo, /* 7 */ android.R.attr.valueType, }; private static final int Animator_interpolator = 0; private static final int Animator_duration = 1; private static final int Animator_startOffset = 2; private static final int Animator_repeatCount = 3; private static final int Animator_repeatMode = 4; private static final int Animator_valueFrom = 5; private static final int Animator_valueTo = 6; private static final int Animator_valueType = 7; /** * These flags are used when parsing AnimatorSet objects */ private static final int TOGETHER = 0; //private static final int SEQUENTIALLY = 1; /** * Enum values used in XML attributes to indicate the value for mValueType */ private static final int VALUE_TYPE_FLOAT = 0; //private static final int VALUE_TYPE_INT = 1; //private static final int VALUE_TYPE_COLOR = 4; //private static final int VALUE_TYPE_CUSTOM = 5; /** * Loads an {@link Animator} object from a resource * * @param context Application context used to access resources * @param id The resource id of the animation to load * @return The animator object reference by the specified id * @throws android.content.res.Resources.NotFoundException when the animation cannot be loaded */ public static Animator loadAnimator(Context context, int id) throws NotFoundException { XmlResourceParser parser = null; try { parser = context.getResources().getAnimation(id); return createAnimatorFromXml(context, parser); } catch (XmlPullParserException ex) { Resources.NotFoundException rnf = new Resources.NotFoundException("Can't load animation resource ID #0x" + Integer.toHexString(id)); rnf.initCause(ex); throw rnf; } catch (IOException ex) { Resources.NotFoundException rnf = new Resources.NotFoundException("Can't load animation resource ID #0x" + Integer.toHexString(id)); rnf.initCause(ex); throw rnf; } finally { if (parser != null) parser.close(); } } private static Animator createAnimatorFromXml(Context c, XmlPullParser parser) throws XmlPullParserException, IOException { return createAnimatorFromXml(c, parser, Xml.asAttributeSet(parser), null, 0); } private static Animator createAnimatorFromXml(Context c, XmlPullParser parser, AttributeSet attrs, AnimatorSet parent, int sequenceOrdering) throws XmlPullParserException, IOException { Animator anim = null; ArrayList<Animator> childAnims = null; // Make sure we are on a start tag. int type; int depth = parser.getDepth(); while (((type=parser.next()) != XmlPullParser.END_TAG || parser.getDepth() > depth) && type != XmlPullParser.END_DOCUMENT) { if (type != XmlPullParser.START_TAG) { continue; } String name = parser.getName(); if (name.equals("objectAnimator")) { anim = loadObjectAnimator(c, attrs); } else if (name.equals("animator")) { anim = loadAnimator(c, attrs, null); } else if (name.equals("set")) { anim = new AnimatorSet(); TypedArray a = c.obtainStyledAttributes(attrs, /*com.android.internal.R.styleable.*/AnimatorSet); TypedValue orderingValue = new TypedValue(); a.getValue(/*com.android.internal.R.styleable.*/AnimatorSet_ordering, orderingValue); int ordering = orderingValue.type == TypedValue.TYPE_INT_DEC ? orderingValue.data : TOGETHER; createAnimatorFromXml(c, parser, attrs, (AnimatorSet) anim, ordering); a.recycle(); } else { throw new RuntimeException("Unknown animator name: " + parser.getName()); } if (parent != null) { if (childAnims == null) { childAnims = new ArrayList<Animator>(); } childAnims.add(anim); } } if (parent != null && childAnims != null) { Animator[] animsArray = new Animator[childAnims.size()]; int index = 0; for (Animator a : childAnims) { animsArray[index++] = a; } if (sequenceOrdering == TOGETHER) { parent.playTogether(animsArray); } else { parent.playSequentially(animsArray); } } return anim; } private static ObjectAnimator loadObjectAnimator(Context context, AttributeSet attrs) throws NotFoundException { ObjectAnimator anim = new ObjectAnimator(); loadAnimator(context, attrs, anim); TypedArray a = context.obtainStyledAttributes(attrs, /*com.android.internal.R.styleable.*/PropertyAnimator); String propertyName = a.getString(/*com.android.internal.R.styleable.*/PropertyAnimator_propertyName); anim.setPropertyName(propertyName); a.recycle(); return anim; } /** * Creates a new animation whose parameters come from the specified context and * attributes set. * * @param context the application environment * @param attrs the set of attributes holding the animation parameters */ private static ValueAnimator loadAnimator(Context context, AttributeSet attrs, ValueAnimator anim) throws NotFoundException { TypedArray a = context.obtainStyledAttributes(attrs, /*com.android.internal.R.styleable.*/Animator); long duration = a.getInt(/*com.android.internal.R.styleable.*/Animator_duration, 0); long startDelay = a.getInt(/*com.android.internal.R.styleable.*/Animator_startOffset, 0); int valueType = a.getInt(/*com.android.internal.R.styleable.*/Animator_valueType, VALUE_TYPE_FLOAT); if (anim == null) { anim = new ValueAnimator(); } //TypeEvaluator evaluator = null; int valueFromIndex = /*com.android.internal.R.styleable.*/Animator_valueFrom; int valueToIndex = /*com.android.internal.R.styleable.*/Animator_valueTo; boolean getFloats = (valueType == VALUE_TYPE_FLOAT); TypedValue tvFrom = a.peekValue(valueFromIndex); boolean hasFrom = (tvFrom != null); int fromType = hasFrom ? tvFrom.type : 0; TypedValue tvTo = a.peekValue(valueToIndex); boolean hasTo = (tvTo != null); int toType = hasTo ? tvTo.type : 0; if ((hasFrom && (fromType >= TypedValue.TYPE_FIRST_COLOR_INT) && (fromType <= TypedValue.TYPE_LAST_COLOR_INT)) || (hasTo && (toType >= TypedValue.TYPE_FIRST_COLOR_INT) && (toType <= TypedValue.TYPE_LAST_COLOR_INT))) { // special case for colors: ignore valueType and get ints getFloats = false; anim.setEvaluator(new ArgbEvaluator()); } if (getFloats) { float valueFrom; float valueTo; if (hasFrom) { if (fromType == TypedValue.TYPE_DIMENSION) { valueFrom = a.getDimension(valueFromIndex, 0f); } else { valueFrom = a.getFloat(valueFromIndex, 0f); } if (hasTo) { if (toType == TypedValue.TYPE_DIMENSION) { valueTo = a.getDimension(valueToIndex, 0f); } else { valueTo = a.getFloat(valueToIndex, 0f); } anim.setFloatValues(valueFrom, valueTo); } else { anim.setFloatValues(valueFrom); } } else { if (toType == TypedValue.TYPE_DIMENSION) { valueTo = a.getDimension(valueToIndex, 0f); } else { valueTo = a.getFloat(valueToIndex, 0f); } anim.setFloatValues(valueTo); } } else { int valueFrom; int valueTo; if (hasFrom) { if (fromType == TypedValue.TYPE_DIMENSION) { valueFrom = (int) a.getDimension(valueFromIndex, 0f); } else if ((fromType >= TypedValue.TYPE_FIRST_COLOR_INT) && (fromType <= TypedValue.TYPE_LAST_COLOR_INT)) { valueFrom = a.getColor(valueFromIndex, 0); } else { valueFrom = a.getInt(valueFromIndex, 0); } if (hasTo) { if (toType == TypedValue.TYPE_DIMENSION) { valueTo = (int) a.getDimension(valueToIndex, 0f); } else if ((toType >= TypedValue.TYPE_FIRST_COLOR_INT) && (toType <= TypedValue.TYPE_LAST_COLOR_INT)) { valueTo = a.getColor(valueToIndex, 0); } else { valueTo = a.getInt(valueToIndex, 0); } anim.setIntValues(valueFrom, valueTo); } else { anim.setIntValues(valueFrom); } } else { if (hasTo) { if (toType == TypedValue.TYPE_DIMENSION) { valueTo = (int) a.getDimension(valueToIndex, 0f); } else if ((toType >= TypedValue.TYPE_FIRST_COLOR_INT) && (toType <= TypedValue.TYPE_LAST_COLOR_INT)) { valueTo = a.getColor(valueToIndex, 0); } else { valueTo = a.getInt(valueToIndex, 0); } anim.setIntValues(valueTo); } } } anim.setDuration(duration); anim.setStartDelay(startDelay); if (a.hasValue(/*com.android.internal.R.styleable.*/Animator_repeatCount)) { anim.setRepeatCount( a.getInt(/*com.android.internal.R.styleable.*/Animator_repeatCount, 0)); } if (a.hasValue(/*com.android.internal.R.styleable.*/Animator_repeatMode)) { anim.setRepeatMode( a.getInt(/*com.android.internal.R.styleable.*/Animator_repeatMode, ValueAnimator.RESTART)); } //if (evaluator != null) { // anim.setEvaluator(evaluator); //} final int resID = a.getResourceId(/*com.android.internal.R.styleable.*/Animator_interpolator, 0); if (resID > 0) { anim.setInterpolator(AnimationUtils.loadInterpolator(context, resID)); } a.recycle(); return anim; } }
package browserview; import java.io.*; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.openqa.selenium.*; import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.os.Kernel32; import com.sun.jna.platform.win32.User32; import com.sun.jna.platform.win32.WinDef.HWND; import com.sun.jna.platform.win32.WinUser; import ui.UI; import util.GitHubURL; import util.PlatformSpecific; import util.events.testevents.JumpToCommentEvent; import util.events.testevents.SendKeysToBrowserEvent; /** * An abstraction for the functions of the Selenium web driver. * It depends minimally on UI for width adjustments. */ public class BrowserComponent { private static final Logger logger = LogManager.getLogger(BrowserComponent.class.getName()); private static final String CHROMEDRIVER_VERSION = "2-18"; private static final boolean USE_MOBILE_USER_AGENT = false; private boolean isTestChromeDriver; // Chrome, Android 4.2.2, Samsung Galaxy S4 private static final String MOBILE_USER_AGENT = "Mozilla/5.0 (Linux; Android 4.2.2; GT-I9505 Build/JDQ39)" + "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Mobile Safari/537.36"; private static final String CHROME_DRIVER_LOCATION = "browserview/"; private static final String CHROME_DRIVER_BINARY_NAME = determineChromeDriverBinaryName(); private static final String CHROME_USER_DATA_DIR = System.getProperty("user.home") + File.separator + ".HubTurbo"; private String pageContentOnLoad = ""; private static final int SWP_NOSIZE = 0x0001; private static final int SWP_NOMOVE = 0x0002; private static final int SWP_NOACTIVATE = 0x0010; private static HWND browserWindowHandle; private static User32 user32; private final UI ui; private ChromeDriverEx driver = null; // We want browser commands to be run on a separate thread, but not to // interfere with each other. This executor is limited to a single instance, // so it ensures that browser commands are queued and executed in sequence. // The alternatives would be to: // - allow race conditions // - interrupt the blocking WebDriver::get method // The first is not desirable and the second does not seem to be possible // at the moment. private Executor executor; public BrowserComponent(UI ui, boolean isTestChromeDriver) { this.ui = ui; executor = Executors.newSingleThreadExecutor(); this.isTestChromeDriver = isTestChromeDriver; setupJNA(); setupChromeDriverExecutable(); } /** * Called on application startup. Blocks until the driver is created. * Guaranteed to only happen once. */ public void initialise() { assert driver == null; executor.execute(() -> { driver = createChromeDriver(); logger.info("Successfully initialised browser component and ChromeDriver"); }); if (!isChromeCustomProfilePresent()) { login(); } else { runBrowserOperation(() -> driver.get(GitHubURL.MAIN_PAGE, false)); } } /** * Called when application quits. Guaranteed to only happen once. */ public void onAppQuit() { quit(); removeChromeDriverIfNecessary(); } private boolean isChromeCustomProfilePresent() { File chromeCustomProfDir = new File(CHROME_USER_DATA_DIR); if (chromeCustomProfDir.isDirectory()) { String[] chromeCustomProfDirList = chromeCustomProfDir.list(); if (chromeCustomProfDirList != null && chromeCustomProfDirList.length > 0) { return true; } else { // directory is empty return false; } } else { // directory does not exist yet return false; } } /** * Reset Chrome Custom Profile directory so that all cookies are cleared */ public void cleanChromeCustomProfile() { try { FileUtils.cleanDirectory(new File(CHROME_USER_DATA_DIR)); } catch (IOException e) { logger.error(e.getLocalizedMessage(), e); } } /** * Quits the browser component. */ private void quit() { logger.info("Quitting browser component"); // The application may quit before the browser is initialised. // In that case, do nothing. if (driver != null) { try { driver.quit(); } catch (WebDriverException e) { // Chrome was closed; do nothing } } } /** * Creates, initialises, and returns a ChromeDriver. * @return */ private ChromeDriverEx createChromeDriver() { ChromeOptions options = new ChromeOptions(); options.addArguments("user-data-dir=" + CHROME_USER_DATA_DIR); if (USE_MOBILE_USER_AGENT) { options.addArguments(String.format("user-agent=\"%s\"", MOBILE_USER_AGENT)); } ChromeDriverEx driver = new ChromeDriverEx(options, isTestChromeDriver); WebDriver.Options manage = driver.manage(); if (!isTestChromeDriver) { manage.window().setPosition(new Point((int) ui.getCollapsedX(), 0)); manage.window().setSize(new Dimension((int) ui.getAvailableDimensions().getWidth(), (int) ui.getAvailableDimensions().getHeight())); initialiseJNA(); } return driver; } private void removeChromeDriverIfNecessary() { if (ui.getCommandLineArgs().containsKey(UI.ARG_UPDATED_TO)) { boolean success = new File(CHROME_DRIVER_BINARY_NAME).delete(); if (!success) { logger.warn("Failed to delete chromedriver"); } } } /** * Executes Javascript in the currently-active driver window. * Run on the UI thread (will block until execution is complete, * i.e. change implementation if long-running scripts must be run). * @param script */ private void executeJavaScript(String script) { driver.executeScript(script); logger.info("Executed JavaScript " + script.substring(0, Math.min(script.length(), 10))); } /** * Navigates to the New Label page on GitHub. * Run on a separate thread. */ public void newLabel() { logger.info("Navigating to New Label page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForNewLabel(ui.logic.getDefaultRepo()), false)); bringToTop(); } /** * Navigates to the New Milestone page on GitHub. * Run on a separate thread. */ public void newMilestone() { logger.info("Navigating to New Milestone page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForNewMilestone(ui.logic.getDefaultRepo()), false)); bringToTop(); } /** * Navigates to the New Issue page on GitHub. * Run on a separate thread. */ public void newIssue() { logger.info("Navigating to New Issue page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForNewIssue(ui.logic.getDefaultRepo()), false)); bringToTop(); } /** * Navigates to the HubTurbo documentation page. * Run on a separate thread. */ public void showDocs() { logger.info("Showing documentation page"); runBrowserOperation(() -> driver.get(GitHubURL.DOCS_PAGE, false)); } /** * Navigates to the GitHub changelog page. * Run on a separate thread. */ // public void showChangelog(String version) { // logger.info("Showing changelog for version " + version); // runBrowserOperation(() -> driver.get(GitHubURL.getChangelogForVersion(version))); // } /** * Navigates to the GitHub page for the given issue in the currently-active * driver window. * Run on a separate thread. */ public void showIssue(String repoId, int id, boolean isPullRequest, boolean isForceRefresh) { if (isPullRequest) { logger.info("Showing pull request #" + id); runBrowserOperation(() -> driver.get(GitHubURL.getPathForPullRequest(repoId, id), isForceRefresh)); } else { logger.info("Showing issue #" + id); runBrowserOperation(() -> driver.get(GitHubURL.getPathForIssue(repoId, id), isForceRefresh)); } } public void jumpToComment(){ if (isTestChromeDriver) { UI.events.triggerEvent(new JumpToCommentEvent()); } try { WebElement comment = driver.findElementById("new_comment_field"); comment.click(); bringToTop(); } catch (Exception e) { logger.warn("Unable to reach jump to comments. "); } } private boolean isBrowserActive(){ if (driver == null) return false; try { // Throws an exception if unable to switch to original HT tab // which then triggers a browser reset when called from runBrowserOperation WebDriver.TargetLocator switchTo = driver.switchTo(); String windowHandle = driver.getWindowHandle(); if (!isTestChromeDriver) switchTo.window(windowHandle); // When the HT tab is closed (but the window is still alive), // a lot of the operations on the driver (such as getCurrentURL) // will hang (without throwing an exception, the thread will just freeze the UI forever), // so we cannot use getCurrentURL/getTitle to check if the original HT tab // is still open. The above line does not hang the driver but still throws // an exception, thus letting us detect that the HT tab is not active any more. return true; } catch (WebDriverException e) { logger.warn("Unable to reach bview. "); return false; } } // A helper function for reseting browser. private void resetBrowser(){ logger.info("Relaunching chrome."); quit(); // if the driver hangs driver = createChromeDriver(); login(); } /** * A helper function for running browser operations. * Takes care of running it on a separate thread, and normalises error-handling across * all types of code. */ private void runBrowserOperation (Runnable operation) { executor.execute(() -> { if (isBrowserActive()) { try { operation.run(); pageContentOnLoad = getCurrentPageSource(); } catch (WebDriverException e) { switch (BrowserComponentError.fromErrorMessage(e.getMessage())) { case NoSuchWindow: resetBrowser(); runBrowserOperation(operation); // Recurse and repeat break; case NoSuchElement: logger.info("Warning: no such element! " + e.getMessage()); break; default: break; } } } else { logger.info("Chrome window not responding."); resetBrowser(); runBrowserOperation(operation); } }); } /** * Logs in the currently-active driver window using the credentials * supplied by the user on login to the app. * Run on a separate thread. */ public void login() { logger.info("Logging in on GitHub..."); focus(ui.getMainWindowHandle()); runBrowserOperation(() -> { driver.get(GitHubURL.LOGIN_PAGE, false); try { WebElement searchBox = driver.findElement(By.name("login")); searchBox.sendKeys(ui.logic.loginController.credentials.username); searchBox = driver.findElement(By.name("password")); searchBox.sendKeys(ui.logic.loginController.credentials.password); searchBox.submit(); } catch (Exception e) { // Already logged in; do nothing logger.info("Unable to login, may already be logged in. "); } }); } /** * One-time JNA setup. */ private static void setupJNA() { if (PlatformSpecific.isOnWindows()) user32 = User32.INSTANCE; } /** * JNA initialisation. Should happen whenever the Chrome window is recreated. */ private void initialiseJNA() { if (PlatformSpecific.isOnWindows()) { browserWindowHandle = user32.FindWindow(null, "data:, - Google Chrome"); } } public static String determineChromeDriverBinaryName() { if (PlatformSpecific.isOnMac()) { logger.info("Using chrome driver binary: chromedriver_" + CHROMEDRIVER_VERSION); return "chromedriver_" + CHROMEDRIVER_VERSION; } else if (PlatformSpecific.isOnWindows()) { logger.info("Using chrome driver binary: chromedriver_" + CHROMEDRIVER_VERSION + ".exe"); return "chromedriver_" + CHROMEDRIVER_VERSION + ".exe"; } else if (PlatformSpecific.isOn32BitsLinux()) { logger.info("Using chrome driver binary: chromedriver_linux_" + CHROMEDRIVER_VERSION); return "chromedriver_linux_" + CHROMEDRIVER_VERSION; } else if (PlatformSpecific.isOn64BitsLinux()) { logger.info("Using chrome driver binary: chromedriver_linux_x86_64_" + CHROMEDRIVER_VERSION); return "chromedriver_linux_x86_64_" + CHROMEDRIVER_VERSION; } else { logger.error("Unable to determine platform for chrome driver"); logger.info("Using chrome driver binary: chromedriver_linux_" + CHROMEDRIVER_VERSION); return "chromedriver_linux_" + CHROMEDRIVER_VERSION; } } /** * Ensures that the chromedriver executable is in the project root before * initialisation. Since executables are packaged for all platforms, this also * picks the right version to use. */ private static void setupChromeDriverExecutable() { File f = new File(CHROME_DRIVER_BINARY_NAME); if (!f.exists()) { InputStream in = BrowserComponent.class.getClassLoader() .getResourceAsStream(CHROME_DRIVER_LOCATION + CHROME_DRIVER_BINARY_NAME); assert in != null : "Could not find " + CHROME_DRIVER_BINARY_NAME + " at " + CHROME_DRIVER_LOCATION + "; this path must be updated if the executables are moved"; OutputStream out; try { out = new FileOutputStream(CHROME_DRIVER_BINARY_NAME); IOUtils.copy(in, out); out.close(); f.setExecutable(true); } catch (IOException e) { logger.error("Could not load Chrome driver binary! " + e.getLocalizedMessage(), e); } logger.info("Could not find " + CHROME_DRIVER_BINARY_NAME + "; extracted it from jar"); } else { logger.info("Located " + CHROME_DRIVER_BINARY_NAME); } System.setProperty("webdriver.chrome.driver", CHROME_DRIVER_BINARY_NAME); } private void bringToTop(){ if (PlatformSpecific.isOnWindows()) { user32.ShowWindow(browserWindowHandle, WinUser.SW_RESTORE); user32.SetForegroundWindow(browserWindowHandle); } } public void focus(HWND mainWindowHandle){ if (PlatformSpecific.isOnWindows()) { // Restores browser window if it is minimized / maximized user32.ShowWindow(browserWindowHandle, WinUser.SW_SHOWNOACTIVATE); // SWP_NOMOVE and SWP_NOSIZE prevents the 0,0,0,0 parameters from taking effect. logger.info("Bringing bView to front"); boolean success = user32.SetWindowPos(browserWindowHandle, mainWindowHandle, 0, 0, 0, 0, SWP_NOMOVE | SWP_NOSIZE | SWP_NOACTIVATE); if (!success) { logger.info("Failed to bring bView to front."); logger.info(Kernel32.INSTANCE.GetLastError()); } user32.SetForegroundWindow(mainWindowHandle); } } private String getCurrentPageSource() { return StringEscapeUtils.escapeHtml4( (String) driver.executeScript("return document.documentElement.outerHTML")); } public boolean hasBviewChanged() { if (isTestChromeDriver) return true; if (isBrowserActive()) { if (getCurrentPageSource().equals(pageContentOnLoad)) return false; pageContentOnLoad = getCurrentPageSource(); return true; } return false; } public void scrollToTop() { String script = "window.scrollTo(0, 0)"; executeJavaScript(script); } public void scrollToBottom() { String script = "window.scrollTo(0, document.body.scrollHeight)"; executeJavaScript(script); } public void scrollPage(boolean isDownScroll) { String script; if (isDownScroll) { script = "window.scrollBy(0,100)"; } else { script = "window.scrollBy(0, -100)"; } executeJavaScript(script); } private void sendKeysToBrowser(String keyCode) { if (isTestChromeDriver) { UI.events.triggerEvent(new SendKeysToBrowserEvent(keyCode)); } WebElement body; try { body = driver.findElementByTagName("body"); body.sendKeys(keyCode); } catch (Exception e) { logger.error("No such element"); } } public void manageAssignees(String keyCode) { sendKeysToBrowser(keyCode.toLowerCase()); bringToTop(); } public void manageMilestones(String keyCode) { sendKeysToBrowser(keyCode.toLowerCase()); bringToTop(); } public void showIssues() { logger.info("Navigating to Issues page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForAllIssues(ui.logic.getDefaultRepo()), false)); } public void showPullRequests() { logger.info("Navigating to Pull requests page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForPullRequests(ui.logic.getDefaultRepo()), false)); } public void showKeyboardShortcuts() { logger.info("Navigating to Keyboard Shortcuts"); runBrowserOperation(() -> driver.get(GitHubURL.KEYBOARD_SHORTCUTS_PAGE, false)); } public void showMilestones() { logger.info("Navigating to Milestones page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForMilestones(ui.logic.getDefaultRepo()), false)); } public void showContributors() { logger.info("Navigating to Contributors page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForContributors(ui.logic.getDefaultRepo()), false)); } public boolean isCurrentUrlIssue() { return driver != null && GitHubURL.isUrlIssue(driver.getCurrentUrl()); } public String getCurrentUrl() { return driver.getCurrentUrl(); } public void switchToConversationTab() { if (GitHubURL.isPullRequestLoaded(getCurrentUrl())) { driver.findElement(By.xpath("//a[@data-container-id='discussion_bucket']")).click(); } } }
package org.apache.solr.schema; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.commons.io.IOUtils; import org.apache.solr.cloud.ZkController; import org.apache.solr.cloud.ZkSolrResourceLoader; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkCmdExecutor; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.util.SystemIdResolver; import org.apache.solr.util.plugin.SolrCoreAware; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.InputSource; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; /** Factory for ManagedIndexSchema */ public class ManagedIndexSchemaFactory extends IndexSchemaFactory implements SolrCoreAware { private static final Logger log = LoggerFactory.getLogger(ManagedIndexSchemaFactory.class); private static final String UPGRADED_SCHEMA_EXTENSION = ".bak"; private static final String SCHEMA_DOT_XML = "schema.xml"; public static final String DEFAULT_MANAGED_SCHEMA_RESOURCE_NAME = "managed-schema"; public static final String MANAGED_SCHEMA_RESOURCE_NAME = "managedSchemaResourceName"; private boolean isMutable; private String managedSchemaResourceName; public String getManagedSchemaResourceName() { return managedSchemaResourceName; } private SolrConfig config; private SolrResourceLoader loader; public SolrResourceLoader getResourceLoader() { return loader; } private String resourceName; private ManagedIndexSchema schema; private SolrCore core; private ZkIndexSchemaReader zkIndexSchemaReader; private String loadedResource; private boolean shouldUpgrade = false; @Override public void init(NamedList args) { SolrParams params = SolrParams.toSolrParams(args); isMutable = params.getBool("mutable", false); args.remove("mutable"); managedSchemaResourceName = params.get(MANAGED_SCHEMA_RESOURCE_NAME, DEFAULT_MANAGED_SCHEMA_RESOURCE_NAME); args.remove(MANAGED_SCHEMA_RESOURCE_NAME); if (SCHEMA_DOT_XML.equals(managedSchemaResourceName)) { String msg = MANAGED_SCHEMA_RESOURCE_NAME + " can't be '" + SCHEMA_DOT_XML + "'"; log.error(msg); throw new SolrException(ErrorCode.SERVER_ERROR, msg); } if (args.size() > 0) { String msg = "Unexpected arg(s): " + args; log.error(msg); throw new SolrException(ErrorCode.SERVER_ERROR, msg); } } /** * First, try to locate the managed schema file named in the managedSchemaResourceName * param. If the managed schema file exists and is accessible, it is used to instantiate * an IndexSchema. * * If the managed schema file can't be found, the resource named by the resourceName * parameter is used to instantiate an IndexSchema. * * Once the IndexSchema is instantiated, if the managed schema file does not exist, * the instantiated IndexSchema is persisted to the managed schema file named in the * managedSchemaResourceName param, in the directory given by * {@link org.apache.solr.core.SolrResourceLoader#getConfigDir()}, or if configs are * in ZooKeeper, under {@link org.apache.solr.cloud.ZkSolrResourceLoader#configSetZkPath}. * * After the managed schema file is persisted, the original schema file is * renamed by appending the extension named in {@link #UPGRADED_SCHEMA_EXTENSION}. */ @Override public ManagedIndexSchema create(String resourceName, SolrConfig config) { this.resourceName = resourceName; this.config = config; this.loader = config.getResourceLoader(); InputStream schemaInputStream = null; if (null == resourceName) { resourceName = IndexSchema.DEFAULT_SCHEMA_FILE; } int schemaZkVersion = -1; if ( ! (loader instanceof ZkSolrResourceLoader)) { schemaInputStream = readSchemaLocally(); } else { // ZooKeeper final ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)loader; final SolrZkClient zkClient = zkLoader.getZkController().getZkClient(); final String managedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + managedSchemaResourceName; Stat stat = new Stat(); try { // Attempt to load the managed schema byte[] data = zkClient.getData(managedSchemaPath, null, stat, true); schemaZkVersion = stat.getVersion(); schemaInputStream = new ByteArrayInputStream(data); loadedResource = managedSchemaResourceName; warnIfNonManagedSchemaExists(); } catch (InterruptedException e) { // Restore the interrupted status Thread.currentThread().interrupt(); log.warn("", e); } catch (KeeperException.NoNodeException e) { log.info("The schema is configured as managed, but managed schema resource " + managedSchemaResourceName + " not found - loading non-managed schema " + resourceName + " instead"); } catch (KeeperException e) { String msg = "Error attempting to access " + managedSchemaPath; log.error(msg, e); throw new SolrException(ErrorCode.SERVER_ERROR, msg, e); } if (null == schemaInputStream) { // The managed schema file could not be found - load the non-managed schema try { schemaInputStream = loader.openSchema(resourceName); loadedResource = resourceName; shouldUpgrade = true; } catch (Exception e) { try { // Retry to load the managed schema, in case it was created since the first attempt byte[] data = zkClient.getData(managedSchemaPath, null, stat, true); schemaZkVersion = stat.getVersion(); schemaInputStream = new ByteArrayInputStream(data); warnIfNonManagedSchemaExists(); } catch (Exception e1) { if (e1 instanceof InterruptedException) { Thread.currentThread().interrupt(); // Restore the interrupted status } final String msg = "Error loading both non-managed schema '" + resourceName + "' and managed schema '" + managedSchemaResourceName + "'"; log.error(msg, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg, e); } } } } InputSource inputSource = new InputSource(schemaInputStream); inputSource.setSystemId(SystemIdResolver.createSystemIdFromResourceName(loadedResource)); try { schema = new ManagedIndexSchema(config, loadedResource, inputSource, isMutable, managedSchemaResourceName, schemaZkVersion, getSchemaUpdateLock()); } catch (KeeperException e) { final String msg = "Error instantiating ManagedIndexSchema"; log.error(msg, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg, e); } catch (InterruptedException e) { // Restore the interrupted status Thread.currentThread().interrupt(); log.warn("", e); } if (shouldUpgrade) { // Persist the managed schema if it doesn't already exist upgradeToManagedSchema(); } return schema; } private InputStream readSchemaLocally() { InputStream schemaInputStream = null; try { // Attempt to load the managed schema schemaInputStream = loader.openSchema(managedSchemaResourceName); loadedResource = managedSchemaResourceName; warnIfNonManagedSchemaExists(); } catch (IOException e) { log.info("The schema is configured as managed, but managed schema resource " + managedSchemaResourceName + " not found - loading non-managed schema " + resourceName + " instead"); } if (null == schemaInputStream) { // The managed schema file could not be found - load the non-managed schema try { schemaInputStream = loader.openSchema(resourceName); loadedResource = resourceName; shouldUpgrade = true; } catch (Exception e) { final String msg = "Error loading both non-managed schema '" + resourceName + "' and managed schema '" + managedSchemaResourceName + "'"; log.error(msg, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg, e); } } return schemaInputStream; } /** * Return whether a non-managed schema exists, either in local storage or on ZooKeeper. */ private void warnIfNonManagedSchemaExists() { if ( ! resourceName.equals(managedSchemaResourceName)) { boolean exists = false; SolrResourceLoader loader = config.getResourceLoader(); if (loader instanceof ZkSolrResourceLoader) { ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)loader; String nonManagedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + resourceName; try { exists = zkLoader.getZkController().pathExists(nonManagedSchemaPath); } catch (InterruptedException e) { Thread.currentThread().interrupt(); // Restore the interrupted status log.warn("", e); // Log as warning and suppress the exception } catch (KeeperException e) { // log as warning and suppress the exception log.warn("Error checking for the existence of the non-managed schema " + resourceName, e); } } else { // Config is not in ZooKeeper InputStream nonManagedSchemaInputStream = null; try { nonManagedSchemaInputStream = loader.openSchema(resourceName); if (null != nonManagedSchemaInputStream) { exists = true; } } catch (IOException e) { // This is expected when the non-managed schema does not exist } finally { IOUtils.closeQuietly(nonManagedSchemaInputStream); } } if (exists) { log.warn("The schema has been upgraded to managed, but the non-managed schema " + resourceName + " is still loadable. PLEASE REMOVE THIS FILE."); } } } /** * Persist the managed schema and rename the non-managed schema * by appending {@link #UPGRADED_SCHEMA_EXTENSION}. * * Failure to rename the non-managed schema will be logged as a warning, * and no exception will be thrown. */ private void upgradeToManagedSchema() { SolrResourceLoader loader = config.getResourceLoader(); if (loader instanceof ZkSolrResourceLoader) { zkUgradeToManagedSchema(); } else { // Configs are not on ZooKeeper schema.persistManagedSchema(true); // Only create it - don't update it if it already exists // After successfully persisting the managed schema, rename the non-managed // schema file by appending UPGRADED_SCHEMA_EXTENSION to its name. if (resourceName.equals(managedSchemaResourceName)) { log.info("On upgrading to managed schema, did not rename non-managed schema '" + resourceName + "' because it's the same as the managed schema's name."); } else { final File nonManagedSchemaFile = locateConfigFile(resourceName); if (null == nonManagedSchemaFile) { // Don't throw an exception for failure to rename the non-managed schema log.warn("On upgrading to managed schema, did not rename non-managed schema " + resourceName + " because it's neither an absolute file " + "nor under SolrConfig.getConfigDir() or the current directory." + " PLEASE REMOVE THIS FILE."); } else { File upgradedSchemaFile = new File(nonManagedSchemaFile.getPath() + UPGRADED_SCHEMA_EXTENSION); if (nonManagedSchemaFile.renameTo(upgradedSchemaFile)) { // Set the resource name to the managed schema so that the CoreAdminHandler returns a findable filename schema.setResourceName(managedSchemaResourceName); log.info("After upgrading to managed schema, renamed the non-managed schema " + nonManagedSchemaFile + " to " + upgradedSchemaFile); } else { // Don't throw an exception for failure to rename the non-managed schema log.warn("Can't rename " + nonManagedSchemaFile.toString() + " to " + upgradedSchemaFile.toString() + " - PLEASE REMOVE THIS FILE."); } } } } } /** * Finds any resource by its name on the filesystem. The classpath is not consulted. * * If the resource is not absolute, the resource is sought in $configDir and then in the current directory. * *@return the File for the named resource, or null if it can't be found */ private File locateConfigFile(String resource) { File located = null; File file = new File(resource); if (file.isAbsolute()) { if (file.isFile() && file.canRead()) { located = file; } } else { // try $configDir/$resource File fileUnderConfigDir = new File(config.getResourceLoader().getConfigDir() + resource); if (fileUnderConfigDir.isFile() && fileUnderConfigDir.canRead()) { located = fileUnderConfigDir; } else { // no success with $configDir/$resource - try $CWD/$resource if (file.isFile() && file.canRead()) { located = file; } } } return located; } /** * Persist the managed schema to ZooKeeper and rename the non-managed schema * by appending {@link #UPGRADED_SCHEMA_EXTENSION}. * * Failure to rename the non-managed schema will be logged as a warning, * and no exception will be thrown. */ private void zkUgradeToManagedSchema() { schema.persistManagedSchemaToZooKeeper(true); // Only create, don't update it if it already exists // After successfully persisting the managed schema, rename the non-managed // schema znode by appending UPGRADED_SCHEMA_EXTENSION to its name. if (resourceName.equals(managedSchemaResourceName)) { log.info("On upgrading to managed schema, did not rename non-managed schema " + resourceName + " because it's the same as the managed schema's name."); } else { // Rename the non-managed schema znode in ZooKeeper ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)loader; final String nonManagedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + resourceName; try { ZkController zkController = zkLoader.getZkController(); ZkCmdExecutor zkCmdExecutor = new ZkCmdExecutor(zkController.getClientTimeout()); if (zkController.pathExists(nonManagedSchemaPath)) { // First, copy the non-managed schema znode content to the upgraded schema znode byte[] bytes = zkController.getZkClient().getData(nonManagedSchemaPath, null, null, true); final String upgradedSchemaPath = nonManagedSchemaPath + UPGRADED_SCHEMA_EXTENSION; zkCmdExecutor.ensureExists(upgradedSchemaPath, zkController.getZkClient()); zkController.getZkClient().setData(upgradedSchemaPath, bytes, true); // Then delete the non-managed schema znode zkController.getZkClient().delete(nonManagedSchemaPath, -1, true); // Set the resource name to the managed schema so that the CoreAdminHandler returns a findable filename schema.setResourceName(managedSchemaResourceName); log.info("After upgrading to managed schema in ZooKeeper, renamed the non-managed schema " + nonManagedSchemaPath + " to " + upgradedSchemaPath); } else { log.info("After upgrading to managed schema in ZooKeeper, the non-managed schema " + nonManagedSchemaPath + " no longer exists."); } } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); // Restore the interrupted status } final String msg = "Error persisting managed schema resource " + managedSchemaResourceName; log.warn(msg, e); // Log as warning and suppress the exception } } } private Object schemaUpdateLock = new Object(); public Object getSchemaUpdateLock() { return schemaUpdateLock; } @Override public void inform(SolrCore core) { this.core = core; if (loader instanceof ZkSolrResourceLoader) { this.zkIndexSchemaReader = new ZkIndexSchemaReader(this); ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)loader; zkLoader.setZkIndexSchemaReader(this.zkIndexSchemaReader); } else { this.zkIndexSchemaReader = null; } } public ManagedIndexSchema getSchema() { return schema; } public void setSchema(ManagedIndexSchema schema) { this.schema = schema; core.setLatestSchema(schema); } }
/* * Copyright 2011 Goldman Sachs. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gs.collections.impl.set.sorted.immutable; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import com.gs.collections.api.LazyIterable; import com.gs.collections.api.block.function.Function; import com.gs.collections.api.block.function.Function0; import com.gs.collections.api.block.procedure.ObjectIntProcedure; import com.gs.collections.api.block.procedure.Procedure2; import com.gs.collections.api.list.ImmutableList; import com.gs.collections.api.list.MutableList; import com.gs.collections.api.map.sorted.MutableSortedMap; import com.gs.collections.api.multimap.sortedset.ImmutableSortedSetMultimap; import com.gs.collections.api.partition.set.sorted.PartitionImmutableSortedSet; import com.gs.collections.api.set.MutableSet; import com.gs.collections.api.set.sorted.ImmutableSortedSet; import com.gs.collections.api.set.sorted.MutableSortedSet; import com.gs.collections.api.set.sorted.SortedSetIterable; import com.gs.collections.api.tuple.Pair; import com.gs.collections.impl.block.factory.Comparators; import com.gs.collections.impl.block.factory.Functions; import com.gs.collections.impl.block.factory.Predicates; import com.gs.collections.impl.block.function.AddFunction; import com.gs.collections.impl.block.function.NegativeIntervalFunction; import com.gs.collections.impl.block.function.PassThruFunction0; import com.gs.collections.impl.block.procedure.CollectionAddProcedure; import com.gs.collections.impl.factory.Lists; import com.gs.collections.impl.factory.Sets; import com.gs.collections.impl.factory.SortedSets; import com.gs.collections.impl.list.Interval; import com.gs.collections.impl.list.mutable.FastList; import com.gs.collections.impl.multimap.set.sorted.TreeSortedSetMultimap; import com.gs.collections.impl.set.mutable.UnifiedSet; import com.gs.collections.impl.set.sorted.mutable.TreeSortedSet; import com.gs.collections.impl.test.Verify; import com.gs.collections.impl.tuple.Tuples; import org.junit.Assert; import org.junit.Test; public abstract class AbstractImmutableSortedSetTestCase { protected abstract ImmutableSortedSet<Integer> classUnderTest(); protected abstract ImmutableSortedSet<Integer> classUnderTest(Comparator<? super Integer> comparator); @Test public void testEqualsAndHashCode() { ImmutableSortedSet<Integer> immutable = this.classUnderTest(); MutableSortedSet<Integer> mutable = TreeSortedSet.newSet(immutable); Verify.assertEqualsAndHashCode(mutable, immutable); Verify.assertPostSerializedEqualsAndHashCode(immutable); Verify.assertNotEquals(FastList.newList(mutable), immutable); } @Test public void testNewWith() { ImmutableSortedSet<Integer> immutable = this.classUnderTest(); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(Interval.fromTo(0, immutable.size())), immutable.newWith(0).castToSortedSet()); Assert.assertSame(immutable, immutable.newWith(immutable.size())); ImmutableSortedSet<Integer> set = this.classUnderTest(Comparators.<Integer>reverseNaturalOrder()); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(Comparators.<Integer>reverseNaturalOrder(), Interval.oneTo(set.size() + 1)), set.newWith(set.size() + 1).castToSortedSet()); } @Test public void testNewWithout() { ImmutableSortedSet<Integer> immutable = this.classUnderTest(); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(Interval.oneTo(immutable.size() - 1)), immutable.newWithout(immutable.size()).castToSortedSet()); Assert.assertSame(immutable, immutable.newWithout(immutable.size() + 1)); ImmutableSortedSet<Integer> set = this.classUnderTest(Comparators.<Integer>reverseNaturalOrder()); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(Comparators.<Integer>reverseNaturalOrder(), Interval.oneTo(set.size() - 1)), set.newWithout(set.size()).castToSortedSet()); } @Test public void testNewWithAll() { ImmutableSortedSet<Integer> set = this.classUnderTest(Collections.<Integer>reverseOrder()); ImmutableSortedSet<Integer> withAll = set.newWithAll(UnifiedSet.newSet(Interval.fromTo(1, set.size() + 1))); Verify.assertNotEquals(set, withAll); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(Comparators.<Integer>reverseNaturalOrder(), Interval.fromTo(1, set.size() + 1)), withAll.castToSortedSet()); } @Test public void testNewWithoutAll() { ImmutableSortedSet<Integer> set = this.classUnderTest(); ImmutableSortedSet<Integer> withoutAll = set.newWithoutAll(set); Assert.assertEquals(SortedSets.immutable.<Integer>of(), withoutAll); Assert.assertEquals(Sets.immutable.<Integer>of(), withoutAll); ImmutableSortedSet<Integer> largeWithoutAll = set.newWithoutAll(Interval.fromTo(101, 150)); Assert.assertEquals(set, largeWithoutAll); ImmutableSortedSet<Integer> largeWithoutAll2 = set.newWithoutAll(UnifiedSet.newSet(Interval.fromTo(151, 199))); Assert.assertEquals(set, largeWithoutAll2); } @Test public void testContains() { ImmutableSortedSet<Integer> set = this.classUnderTest(); for (int i = 1; i <= set.size(); i++) { Verify.assertContains(i, set.castToSortedSet()); } Verify.assertNotContains(Integer.valueOf(set.size() + 1), set.castToSortedSet()); } @Test public void testContainsAllArray() { Assert.assertTrue(this.classUnderTest().containsAllArguments(this.classUnderTest().toArray())); } @Test public void testContainsAllIterable() { Assert.assertTrue(this.classUnderTest().containsAllIterable(Interval.oneTo(this.classUnderTest().size()))); } @Test public void testForEach() { MutableSet<Integer> result = UnifiedSet.newSet(); ImmutableSortedSet<Integer> collection = this.classUnderTest(); collection.forEach(CollectionAddProcedure.<Integer>on(result)); Assert.assertEquals(collection, result); } @Test public void testForEachWith() { final MutableList<Integer> result = Lists.mutable.of(); ImmutableSortedSet<Integer> set = this.classUnderTest(); set.forEachWith(new Procedure2<Integer, Integer>() { public void value(Integer argument1, Integer argument2) { result.add(argument1 + argument2); } }, 0); Verify.assertListsEqual(result, set.toList()); } @Test public void testForEachWithIndex() { final MutableList<Integer> result = Lists.mutable.of(); ImmutableSortedSet<Integer> set = this.classUnderTest(); set.forEachWithIndex(new ObjectIntProcedure<Integer>() { public void value(Integer object, int index) { result.add(object); } }); Verify.assertListsEqual(result, set.toList()); } @Test public void testSelect() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); Verify.assertIterableEmpty(integers.select(Predicates.greaterThan(integers.size()))); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(Comparators.<Integer>reverseNaturalOrder(), Interval.oneTo(integers.size() - 1)), integers.select(Predicates.lessThan(integers.size())).castToSortedSet()); } @Test public void testSelectWithTarget() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Verify.assertListsEqual(integers.toList(), integers.select(Predicates.lessThan(integers.size() + 1), FastList.<Integer>newList())); Verify.assertEmpty( integers.select(Predicates.greaterThan(integers.size()), FastList.<Integer>newList())); } @Test public void testReject() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); Verify.assertEmpty( FastList.newList(integers.reject(Predicates.lessThan(integers.size() + 1)))); Verify.assertSortedSetsEqual(integers.castToSortedSet(), integers.reject(Predicates.greaterThan(integers.size())).castToSortedSet()); } @Test public void testRejectWithTarget() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Verify.assertEmpty( integers.reject(Predicates.lessThan(integers.size() + 1), FastList.<Integer>newList())); Verify.assertListsEqual(integers.toList(), integers.reject(Predicates.greaterThan(integers.size()), FastList.<Integer>newList())); } @Test public void partition() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); PartitionImmutableSortedSet<Integer> partition = integers.partition(Predicates.greaterThan(integers.size())); Verify.assertIterableEmpty(partition.getSelected()); Assert.assertEquals(integers, partition.getRejected()); Assert.assertEquals(Collections.<Integer>reverseOrder(), partition.getSelected().comparator()); Assert.assertEquals(Collections.<Integer>reverseOrder(), partition.getRejected().comparator()); } @Test public void testCollect() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); Verify.assertListsEqual(integers.toList(), integers.collect(Functions.getIntegerPassThru()).castToList()); } @Test public void testCollectWithTarget() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Assert.assertEquals(integers, integers.collect(Functions.getIntegerPassThru(), UnifiedSet.<Integer>newSet())); Verify.assertListsEqual(integers.toList(), integers.collect(Functions.getIntegerPassThru(), FastList.<Integer>newList())); } @Test public void flatCollect() { ImmutableList<String> actual = this.classUnderTest(Collections.<Integer>reverseOrder()).flatCollect(new Function<Integer, MutableList<String>>() { public MutableList<String> valueOf(Integer integer) { return Lists.fixedSize.of(String.valueOf(integer)); } }); ImmutableList<String> expected = this.classUnderTest(Collections.<Integer>reverseOrder()).collect(Functions.getToString()); Assert.assertEquals(expected, actual); Verify.assertListsEqual(expected.toList(), actual.toList()); } @Test public void flatCollectWithTarget() { MutableSet<String> actual = this.classUnderTest().flatCollect(new Function<Integer, MutableList<String>>() { public MutableList<String> valueOf(Integer integer) { return Lists.fixedSize.of(String.valueOf(integer)); } }, UnifiedSet.<String>newSet()); ImmutableList<String> expected = this.classUnderTest().collect(Functions.getToString()); Verify.assertSetsEqual(expected.toSet(), actual); } private static final class Holder { private final int number; private Holder(int i) { this.number = i; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || this.getClass() != o.getClass()) { return false; } Holder holder = (Holder) o; return this.number == holder.number; } @Override public int hashCode() { return this.number; } @Override public String toString() { return String.valueOf(this.number); } } @Test public void zip() { ImmutableSortedSet<Integer> immutableSet = this.classUnderTest(Collections.<Integer>reverseOrder()); List<Object> nulls = Collections.nCopies(immutableSet.size(), null); List<Object> nullsPlusOne = Collections.nCopies(immutableSet.size() + 1, null); List<Object> nullsMinusOne = Collections.nCopies(immutableSet.size() - 1, null); ImmutableSortedSet<Pair<Integer, Object>> pairs = immutableSet.zip(nulls); Assert.assertEquals(immutableSet.toList(), pairs.collect(Functions.<Integer>firstOfPair())); Verify.assertListsEqual(FastList.newList(Interval.fromTo(immutableSet.size(), 1)), pairs.collect(Functions.<Integer>firstOfPair()).toList()); Assert.assertEquals(FastList.<Object>newList(nulls), pairs.collect(Functions.<Object>secondOfPair())); ImmutableSortedSet<Pair<Integer, Object>> pairsPlusOne = immutableSet.zip(nullsPlusOne); Assert.assertEquals(immutableSet.toList(), pairsPlusOne.collect(Functions.<Integer>firstOfPair())); Verify.assertListsEqual(FastList.newList(Interval.fromTo(immutableSet.size(), 1)), pairsPlusOne.collect(Functions.<Integer>firstOfPair()).castToList()); Assert.assertEquals(FastList.<Object>newList(nulls), pairsPlusOne.collect(Functions.<Object>secondOfPair())); ImmutableSortedSet<Pair<Integer, Object>> pairsMinusOne = immutableSet.zip(nullsMinusOne); Verify.assertListsEqual(FastList.<Integer>newList(Interval.fromTo(immutableSet.size(), 2)), pairsMinusOne.collect(Functions.<Integer>firstOfPair()).castToList()); Assert.assertEquals(immutableSet.zip(nulls), immutableSet.zip(nulls, UnifiedSet.<Pair<Integer, Object>>newSet())); FastList<Holder> holders = FastList.newListWith(new Holder(1), new Holder(2), new Holder(3)); ImmutableSortedSet<Pair<Integer, Holder>> zipped = immutableSet.zip(holders); Verify.assertSize(3, zipped.castToSortedSet()); AbstractImmutableSortedSetTestCase.Holder two = new Holder(-1); AbstractImmutableSortedSetTestCase.Holder two1 = new Holder(-1); Assert.assertEquals(Tuples.pair(10, two1), zipped.newWith(Tuples.pair(10, two)).getFirst()); Assert.assertEquals(Tuples.pair(1, new Holder(3)), this.classUnderTest().zip(holders.reverseThis()).getFirst()); } @Test public void zipWithIndex() { ImmutableSortedSet<Integer> immutableSet = this.classUnderTest(Collections.<Integer>reverseOrder()); ImmutableSortedSet<Pair<Integer, Integer>> pairs = immutableSet.zipWithIndex(); Assert.assertEquals(immutableSet.toList(), pairs.collect(Functions.<Integer>firstOfPair())); Assert.assertEquals( Interval.zeroTo(immutableSet.size() - 1).toList(), pairs.collect(Functions.<Integer>secondOfPair())); Assert.assertEquals( immutableSet.zipWithIndex(), immutableSet.zipWithIndex(UnifiedSet.<Pair<Integer, Integer>>newSet())); Verify.assertListsEqual(TreeSortedSet.newSet(Collections.<Integer>reverseOrder(), Interval.oneTo(immutableSet.size())).toList(), pairs.collect(Functions.<Integer>firstOfPair()).toList()); } @Test(expected = IllegalArgumentException.class) public void chunk_zero_throws() { this.classUnderTest().chunk(0); } @Test public void chunk_large_size() { Assert.assertEquals(this.classUnderTest(), this.classUnderTest().chunk(10).getFirst()); Verify.assertInstanceOf(ImmutableSortedSet.class, this.classUnderTest().chunk(10).getFirst()); } @Test public void testDetect() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Assert.assertEquals(Integer.valueOf(1), integers.detect(Predicates.equal(1))); Assert.assertNull(integers.detect(Predicates.equal(integers.size() + 1))); } @Test public void testDetectIfNoneWithBlock() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Function0<Integer> function = new PassThruFunction0<Integer>(integers.size() + 1); Assert.assertEquals(Integer.valueOf(1), integers.detectIfNone(Predicates.equal(1), function)); Assert.assertEquals(Integer.valueOf(integers.size() + 1), integers.detectIfNone(Predicates.equal(integers.size() + 1), function)); } @Test public void testAllSatisfy() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Assert.assertTrue(integers.allSatisfy(Predicates.instanceOf(Integer.class))); Assert.assertFalse(integers.allSatisfy(Predicates.equal(0))); } @Test public void testAnySatisfy() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Assert.assertFalse(integers.anySatisfy(Predicates.instanceOf(String.class))); Assert.assertTrue(integers.anySatisfy(Predicates.instanceOf(Integer.class))); } @Test public void testCount() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Assert.assertEquals(integers.size(), integers.count(Predicates.instanceOf(Integer.class))); Assert.assertEquals(0, integers.count(Predicates.instanceOf(String.class))); } @Test public void testCollectIf() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); Verify.assertListsEqual(integers.toList(), integers.collectIf(Predicates.instanceOf(Integer.class), Functions.getIntegerPassThru()).toList()); } @Test public void testCollectIfWithTarget() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Verify.assertSetsEqual(integers.toSet(), integers.collectIf(Predicates.instanceOf(Integer.class), Functions.getIntegerPassThru(), UnifiedSet.<Integer>newSet())); } @Test public void testGetFirst() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Assert.assertEquals(Integer.valueOf(1), integers.getFirst()); ImmutableSortedSet<Integer> revInt = this.classUnderTest(Collections.<Integer>reverseOrder()); Assert.assertEquals(Integer.valueOf(revInt.size()), revInt.getFirst()); } @Test public void testGetLast() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Assert.assertEquals(Integer.valueOf(integers.size()), integers.getLast()); ImmutableSortedSet<Integer> revInt = this.classUnderTest(Collections.<Integer>reverseOrder()); Assert.assertEquals(Integer.valueOf(1), revInt.getLast()); } @Test public void testIsEmpty() { ImmutableSortedSet<Integer> set = this.classUnderTest(); Assert.assertFalse(set.isEmpty()); Assert.assertTrue(set.notEmpty()); } @Test public void testIterator() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); final Iterator<Integer> iterator = integers.iterator(); for (int i = 0; iterator.hasNext(); i++) { Integer integer = iterator.next(); Assert.assertEquals(i + 1, integer.intValue()); } Verify.assertThrows(NoSuchElementException.class, new Runnable() { public void run() { iterator.next(); } }); final Iterator<Integer> intItr = integers.iterator(); intItr.next(); Verify.assertThrows(UnsupportedOperationException.class, new Runnable() { public void run() { intItr.remove(); } }); } @Test public void testInjectInto() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); Integer result = integers.injectInto(0, AddFunction.INTEGER); Assert.assertEquals(FastList.newList(integers).injectInto(0, AddFunction.INTEGER_TO_INT), result.intValue()); } @Test public void testToArray() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); MutableList<Integer> copy = FastList.newList(integers); Assert.assertArrayEquals(integers.toArray(), copy.toArray()); Assert.assertArrayEquals(integers.toArray(new Integer[integers.size()]), copy.toArray(new Integer[integers.size()])); } @Test public void testToString() { Assert.assertEquals(FastList.newList(this.classUnderTest()).toString(), this.classUnderTest().toString()); } @Test public void testMakeString() { Assert.assertEquals(FastList.newList(this.classUnderTest()).makeString(), this.classUnderTest().makeString()); } @Test public void testAppendString() { Appendable builder = new StringBuilder(); this.classUnderTest().appendString(builder); Assert.assertEquals(FastList.newList(this.classUnderTest()).makeString(), builder.toString()); } @Test public void toList() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); MutableList<Integer> list = integers.toList(); Verify.assertEqualsAndHashCode(FastList.newList(integers), list); } @Test public void toSortedList() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); MutableList<Integer> copy = FastList.newList(integers); MutableList<Integer> list = integers.toSortedList(Collections.<Integer>reverseOrder()); Assert.assertEquals(copy.sortThis(Collections.<Integer>reverseOrder()), list); MutableList<Integer> list2 = integers.toSortedList(); Verify.assertListsEqual(copy.sortThis(), list2); } @Test public void toSortedListBy() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); MutableList<Integer> list = integers.toSortedListBy(Functions.getToString()); Assert.assertEquals(integers.toList(), list); } @Test public void toSortedSet() { ImmutableSortedSet<Integer> integers = this.classUnderTest(Collections.<Integer>reverseOrder()); MutableSortedSet<Integer> set = integers.toSortedSet(); Verify.assertSortedSetsEqual(TreeSortedSet.newSetWith(1, 2, 3, 4), set); } @Test public void toSortedSetWithComparator() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); MutableSortedSet<Integer> set = integers.toSortedSet(Collections.<Integer>reverseOrder()); Assert.assertEquals(integers.toSet(), set); Assert.assertEquals(integers.toSortedList(Comparators.<Integer>reverseNaturalOrder()), set.toList()); } @Test public void toSortedSetBy() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); MutableSortedSet<Integer> set = integers.toSortedSetBy(Functions.getToString()); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(integers), set); } @Test public void toSortedMap() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); MutableSortedMap<Integer, String> map = integers.toSortedMap(Functions.getIntegerPassThru(), Functions.getToString()); Verify.assertMapsEqual(integers.toMap(Functions.getIntegerPassThru(), Functions.getToString()), map); Verify.assertListsEqual(Interval.oneTo(integers.size()), map.keySet().toList()); } @Test public void toSortedMap_with_comparator() { ImmutableSortedSet<Integer> integers = this.classUnderTest(); MutableSortedMap<Integer, String> map = integers.toSortedMap(Comparators.<Integer>reverseNaturalOrder(), Functions.getIntegerPassThru(), Functions.getToString()); Verify.assertMapsEqual(integers.toMap(Functions.getIntegerPassThru(), Functions.getToString()), map); Verify.assertListsEqual(Interval.fromTo(integers.size(), 1), map.keySet().toList()); } @Test public void testForLoop() { ImmutableSortedSet<Integer> set = this.classUnderTest(); for (Integer each : set) { Assert.assertNotNull(each); } } @Test(expected = UnsupportedOperationException.class) public void testIteratorRemove() { this.classUnderTest().iterator().remove(); } @Test(expected = UnsupportedOperationException.class) public void testAdd() { this.classUnderTest().castToSortedSet().add(1); } @Test(expected = UnsupportedOperationException.class) public void testRemove() { this.classUnderTest().castToSortedSet().remove(Integer.valueOf(1)); } @Test(expected = UnsupportedOperationException.class) public void testClear() { this.classUnderTest().castToSortedSet().clear(); } @Test(expected = UnsupportedOperationException.class) public void testRemoveAll() { this.classUnderTest().castToSortedSet().removeAll(Lists.fixedSize.of()); } @Test(expected = UnsupportedOperationException.class) public void testRetainAll() { this.classUnderTest().castToSortedSet().retainAll(Lists.fixedSize.of()); } @Test(expected = UnsupportedOperationException.class) public void testAddAll() { this.classUnderTest().castToSortedSet().addAll(Lists.fixedSize.<Integer>of()); } @Test public void min() { Assert.assertEquals(Integer.valueOf(1), this.classUnderTest().min(Comparators.naturalOrder())); } @Test public void max() { Assert.assertEquals(Integer.valueOf(1), this.classUnderTest().max(Comparators.reverse(Comparators.naturalOrder()))); } @Test public void min_without_comparator() { Assert.assertEquals(Integer.valueOf(1), this.classUnderTest().min()); } @Test public void max_without_comparator() { Assert.assertEquals(Integer.valueOf(this.classUnderTest().size()), this.classUnderTest().max()); } @Test public void minBy() { Assert.assertEquals(Integer.valueOf(1), this.classUnderTest().minBy(Functions.getToString())); } @Test public void maxBy() { Assert.assertEquals(Integer.valueOf(this.classUnderTest().size()), this.classUnderTest().maxBy(Functions.getToString())); } @Test public void groupBy() { ImmutableSortedSet<Integer> undertest = this.classUnderTest(); ImmutableSortedSetMultimap<Integer, Integer> actual = undertest.groupBy(Functions.<Integer>getPassThru()); ImmutableSortedSetMultimap<Integer, Integer> expected = TreeSortedSet.newSet(undertest).groupBy(Functions.<Integer>getPassThru()).toImmutable(); Assert.assertEquals(expected, actual); } @Test public void groupByEach() { ImmutableSortedSet<Integer> undertest = this.classUnderTest(Collections.<Integer>reverseOrder()); NegativeIntervalFunction function = new NegativeIntervalFunction(); ImmutableSortedSetMultimap<Integer, Integer> actual = undertest.groupByEach(function); ImmutableSortedSetMultimap<Integer, Integer> expected = TreeSortedSet.newSet(undertest).groupByEach(function).toImmutable(); Assert.assertEquals(expected, actual); } @Test public void groupByWithTarget() { ImmutableSortedSet<Integer> undertest = this.classUnderTest(); TreeSortedSetMultimap<Integer, Integer> actual = undertest.groupBy(Functions.<Integer>getPassThru(), TreeSortedSetMultimap.<Integer, Integer>newMultimap()); TreeSortedSetMultimap<Integer, Integer> expected = TreeSortedSet.newSet(undertest).groupBy(Functions.<Integer>getPassThru()); Assert.assertEquals(expected, actual); } @Test public void groupByEachWithTarget() { ImmutableSortedSet<Integer> undertest = this.classUnderTest(); NegativeIntervalFunction function = new NegativeIntervalFunction(); TreeSortedSetMultimap<Integer, Integer> actual = undertest.groupByEach(function, TreeSortedSetMultimap.<Integer, Integer>newMultimap()); TreeSortedSetMultimap<Integer, Integer> expected = TreeSortedSet.newSet(undertest).groupByEach(function); Assert.assertEquals(expected, actual); } @Test public void union() { ImmutableSortedSet<Integer> set = this.classUnderTest(); ImmutableSortedSet<Integer> union = set.union(UnifiedSet.newSet(Interval.fromTo(set.size(), set.size() + 3))); Verify.assertSize(set.size() + 3, union.castToSortedSet()); Verify.assertSortedSetsEqual(TreeSortedSet.newSet(Interval.oneTo(set.size() + 3)), union.castToSortedSet()); Assert.assertEquals(set, set.union(UnifiedSet.<Integer>newSet())); } @Test public void unionInto() { ImmutableSortedSet<Integer> set = this.classUnderTest(); MutableSet<Integer> union = set.unionInto(UnifiedSet.newSet(Interval.fromTo(set.size(), set.size() + 3)), UnifiedSet.<Integer>newSet()); Verify.assertSize(set.size() + 3, union); Assert.assertTrue(union.containsAllIterable(Interval.oneTo(set.size() + 3))); Assert.assertEquals(set, set.unionInto(UnifiedSet.<Integer>newSetWith(), UnifiedSet.<Integer>newSet())); } @Test public void intersect() { ImmutableSortedSet<Integer> set = this.classUnderTest(Collections.<Integer>reverseOrder()); ImmutableSortedSet<Integer> intersect = set.intersect(UnifiedSet.newSet(Interval.oneTo(set.size() + 2))); Verify.assertSize(set.size(), intersect.castToSortedSet()); Verify.assertSortedSetsEqual(set.castToSortedSet(), intersect.castToSortedSet()); } @Test public void intersectInto() { ImmutableSortedSet<Integer> set = this.classUnderTest(); MutableSet<Integer> intersect = set.intersectInto(UnifiedSet.newSet(Interval.oneTo(set.size() + 2)), UnifiedSet.<Integer>newSet()); Verify.assertSize(set.size(), intersect); Assert.assertEquals(set, intersect); Verify.assertEmpty(set.intersectInto(UnifiedSet.newSet(Interval.fromTo(set.size() + 1, set.size() + 4)), UnifiedSet.<Integer>newSet())); } @Test public void difference() { ImmutableSortedSet<Integer> set = this.classUnderTest(); ImmutableSortedSet<Integer> difference = set.difference(UnifiedSet.newSet(Interval.fromTo(2, set.size() + 1))); Verify.assertSortedSetsEqual(TreeSortedSet.newSetWith(1), difference.castToSortedSet()); ImmutableSortedSet<Integer> difference2 = set.difference(UnifiedSet.newSet(Interval.fromTo(2, set.size() + 2))); Verify.assertSortedSetsEqual(TreeSortedSet.newSetWith(1), difference2.castToSortedSet()); } @Test public void differenceInto() { ImmutableSortedSet<Integer> set = this.classUnderTest(); MutableSet<Integer> difference = set.differenceInto(UnifiedSet.newSet(Interval.fromTo(2, set.size() + 1)), UnifiedSet.<Integer>newSet()); Verify.assertSetsEqual(UnifiedSet.newSetWith(1), difference); } @Test public void symmetricDifference() { ImmutableSortedSet<Integer> set = this.classUnderTest(Collections.<Integer>reverseOrder()); ImmutableSortedSet<Integer> difference = set.symmetricDifference(UnifiedSet.newSet(Interval.fromTo(2, set.size() + 1))); Verify.assertSortedSetsEqual(TreeSortedSet.newSetWith(Comparators.<Integer>reverseNaturalOrder(), 1, set.size() + 1), difference.castToSortedSet()); } @Test public void symmetricDifferenceInto() { ImmutableSortedSet<Integer> set = this.classUnderTest(); MutableSet<Integer> difference = set.symmetricDifferenceInto(UnifiedSet.newSet(Interval.fromTo(2, set.size() + 1)), UnifiedSet.<Integer>newSet()); Verify.assertSetsEqual(UnifiedSet.newSetWith(1, set.size() + 1), difference); } @Test public void isSubsetOf() { ImmutableSortedSet<Integer> set = this.classUnderTest(); Assert.assertTrue(set.isSubsetOf(set)); } @Test public void isProperSubsetOf() { ImmutableSortedSet<Integer> set = this.classUnderTest(); Assert.assertTrue(set.isProperSubsetOf(Interval.oneTo(set.size() + 1).toSet())); Assert.assertFalse(set.isProperSubsetOf(set)); } @Test public void powerSet() { ImmutableSortedSet<Integer> set = this.classUnderTest(); ImmutableSortedSet<SortedSetIterable<Integer>> powerSet = set.powerSet(); Verify.assertSize((int) StrictMath.pow(2, set.size()), powerSet.castToSortedSet()); Verify.assertContains(UnifiedSet.<String>newSet(), powerSet.toSet()); Verify.assertContains(set, powerSet.toSet()); Verify.assertInstanceOf(ImmutableSortedSet.class, powerSet); Verify.assertInstanceOf(ImmutableSortedSet.class, powerSet.getLast()); } @Test public void cartesianProduct() { ImmutableSortedSet<Integer> set = this.classUnderTest(); LazyIterable<Pair<Integer, Integer>> cartesianProduct = set.cartesianProduct(UnifiedSet.newSet(Interval.oneTo(set.size()))); Assert.assertEquals((long) (set.size() * set.size()), (long) cartesianProduct.size()); Assert.assertEquals(set, cartesianProduct .select(Predicates.attributeEqual(Functions.<Integer>secondOfPair(), 1)) .collect(Functions.<Integer>firstOfPair()).toSet()); } @Test public abstract void subSet(); @Test public abstract void headSet(); @Test public abstract void tailSet(); }
/*<license> Copyright 2008 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.vernacular.persistence_III.jpa.test.semantics; import static org.ppwcode.metainfo_I.License.Type.APACHE_V2; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; import org.ppwcode.metainfo_I.Copyright; import org.ppwcode.metainfo_I.License; import org.ppwcode.metainfo_I.vcs.SvnInfo; import org.ppwcode.vernacular.persistence_III.jpa.AbstractIntegerIdVersionedPersistentBean; import org.ppwcode.vernacular.semantics_VI.exception.CompoundPropertyException; import org.ppwcode.vernacular.semantics_VI.exception.PropertyException; import org.toryt.annotations_I.Basic; import org.toryt.annotations_I.Expression; import org.toryt.annotations_I.MethodContract; import org.toryt.annotations_I.Throw; /** * Master has references to DetailA and DetailB. * In addition, it contains a number of properties. */ @Entity @Table(name = "x") @Copyright("2008 - $Date$, PeopleWare n.v.") @License(APACHE_V2) @SvnInfo(revision = "$Revision$", date = "$Date$") public class X extends AbstractIntegerIdVersionedPersistentBean { public static final String EMPTY = ""; /*<property name="period"> -------------------------------------------------------------------------*/ @Basic( init = @Expression("period == null") ) public Date getPeriod() { return $period; } @MethodContract( post = @Expression("period == _period") ) public void setPeriod(Date _period) throws PropertyException { $period = _period; } @Column(name = "period") private Date $period = null; /*</property>*/ /*<property name="description"> -------------------------------------------------------------------------*/ @Basic(invars = @Expression("description != EMPTY"), init = @Expression("description == null")) public String getDescription() { return $description; } @MethodContract( post = @Expression("description == _description"), exc = @Throw(type = PropertyException.class, cond = @Expression("_description == null || ''.equals(_description)")) ) public void setDescription(String _description) throws PropertyException { if (EMPTY.equals(_description)) { throw new PropertyException(this, "description", "EMPTY_NOT_ALLOWED", null); } this.$description = _description; } @Column(name = "description") private String $description; /*</property>*/ /*<property name="locked"> -------------------------------------------------------------------------*/ @Basic(init = @Expression("locked == false")) public boolean getLocked() { return $locked; } @MethodContract( post = @Expression("locked == _locked") ) public void setLocked(boolean _locked) { this.$locked = _locked; } @Column(name = "locked") private boolean $locked; /*</property>*/ /*<property name="e"> -------------------------------------------------------------------------*/ @Basic(invars = @Expression("e != null ? e.xs.contains(this)"), init = @Expression("e == null")) public E getE() { return $e; } @MethodContract( post = @Expression("e == _e"), exc = @Throw(type = PropertyException.class, cond = @Expression("_e == null")) ) public void setE(E e) throws PropertyException { if (e == null) { throw new PropertyException(this, "e", "NULL_NOT_ALLOWED", null); } if ($e != null) { $e.removeX(this); } $e = e; $e.addX(this); } @ManyToOne(cascade = {}, fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "e_fk") private E $e; /*</property>*/ /*<property name="y"> -------------------------------------------------------------------------*/ @Basic(init = @Expression("$y == null")) public Y getY() { return $y; } @MethodContract( post = @Expression("$y == y") ) public void setY(Y y) { $y = y; } @ManyToOne(cascade = {}, fetch = FetchType.EAGER, optional = false) @JoinColumn(name = "y_fk") private Y $y; /*</property>*/ @Override @MethodContract( post = { @Expression("period == null ? result.contains(this, 'period', 'NULL_NOT_ALLOWED', null)"), @Expression("description == null ? result.contains(this, 'description', 'NULL_NOT_ALLOWED', null)"), @Expression("e == null ? result.contains(this, 'e', 'NULL_NOT_ALLOWED', null)"), @Expression("y == null ? result.contains(this, 'y', 'NULL_NOT_ALLOWED', null)") } ) public CompoundPropertyException wildExceptions() { CompoundPropertyException cpe = super.wildExceptions(); if (getPeriod() == null) { cpe.addElementException((new PropertyException(this, "period", "NULL_NOT_ALLOWED", null))); } if (getDescription() == null) { cpe.addElementException((new PropertyException(this, "description", "NULL_NOT_ALLOWED", null))); } if (getY() == null) { cpe.addElementException((new PropertyException(this, "y", "NULL_NOT_ALLOWED", null))); } if (getE() == null) { cpe.addElementException((new PropertyException(this, "e", "NULL_NOT_ALLOWED", null))); } return cpe; } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.source.hls; import android.net.Uri; import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.source.BehindLiveWindowException; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.chunk.BaseMediaChunkIterator; import com.google.android.exoplayer2.source.chunk.Chunk; import com.google.android.exoplayer2.source.chunk.DataChunk; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment; import com.google.android.exoplayer2.source.hls.playlist.HlsPlaylistTracker; import com.google.android.exoplayer2.trackselection.BaseTrackSelection; import com.google.android.exoplayer2.trackselection.TrackSelection; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.TimestampAdjuster; import com.google.android.exoplayer2.util.UriUtil; import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** Source of Hls (possibly adaptive) chunks. */ /* package */ class HlsChunkSource { /** * Chunk holder that allows the scheduling of retries. */ public static final class HlsChunkHolder { public HlsChunkHolder() { clear(); } /** The chunk to be loaded next. */ @Nullable public Chunk chunk; /** * Indicates that the end of the stream has been reached. */ public boolean endOfStream; /** Indicates that the chunk source is waiting for the referred playlist to be refreshed. */ @Nullable public Uri playlistUrl; /** * Clears the holder. */ public void clear() { chunk = null; endOfStream = false; playlistUrl = null; } } /** * The maximum number of keys that the key cache can hold. This value must be 2 or greater in * order to hold initialization segment and media segment keys simultaneously. */ private static final int KEY_CACHE_SIZE = 4; private final HlsExtractorFactory extractorFactory; private final DataSource mediaDataSource; private final DataSource encryptionDataSource; private final TimestampAdjusterProvider timestampAdjusterProvider; private final Uri[] playlistUrls; private final Format[] playlistFormats; private final HlsPlaylistTracker playlistTracker; private final TrackGroup trackGroup; @Nullable private final List<Format> muxedCaptionFormats; private final FullSegmentEncryptionKeyCache keyCache; private boolean isTimestampMaster; private byte[] scratchSpace; @Nullable private IOException fatalError; @Nullable private Uri expectedPlaylistUrl; private boolean independentSegments; // Note: The track group in the selection is typically *not* equal to trackGroup. This is due to // the way in which HlsSampleStreamWrapper generates track groups. Use only index based methods // in TrackSelection to avoid unexpected behavior. private TrackSelection trackSelection; private long liveEdgeInPeriodTimeUs; private boolean seenExpectedPlaylistError; /** * @param extractorFactory An {@link HlsExtractorFactory} from which to obtain the extractors for * media chunks. * @param playlistTracker The {@link HlsPlaylistTracker} from which to obtain media playlists. * @param playlistUrls The {@link Uri}s of the media playlists that can be adapted between by this * chunk source. * @param playlistFormats The {@link Format Formats} corresponding to the media playlists. * @param dataSourceFactory An {@link HlsDataSourceFactory} to create {@link DataSource}s for the * chunks. * @param mediaTransferListener The transfer listener which should be informed of any media data * transfers. May be null if no listener is available. * @param timestampAdjusterProvider A provider of {@link TimestampAdjuster} instances. If multiple * {@link HlsChunkSource}s are used for a single playback, they should all share the same * provider. * @param muxedCaptionFormats List of muxed caption {@link Format}s. Null if no closed caption * information is available in the master playlist. */ public HlsChunkSource( HlsExtractorFactory extractorFactory, HlsPlaylistTracker playlistTracker, Uri[] playlistUrls, Format[] playlistFormats, HlsDataSourceFactory dataSourceFactory, @Nullable TransferListener mediaTransferListener, TimestampAdjusterProvider timestampAdjusterProvider, @Nullable List<Format> muxedCaptionFormats) { this.extractorFactory = extractorFactory; this.playlistTracker = playlistTracker; this.playlistUrls = playlistUrls; this.playlistFormats = playlistFormats; this.timestampAdjusterProvider = timestampAdjusterProvider; this.muxedCaptionFormats = muxedCaptionFormats; keyCache = new FullSegmentEncryptionKeyCache(KEY_CACHE_SIZE); scratchSpace = Util.EMPTY_BYTE_ARRAY; liveEdgeInPeriodTimeUs = C.TIME_UNSET; mediaDataSource = dataSourceFactory.createDataSource(C.DATA_TYPE_MEDIA); if (mediaTransferListener != null) { mediaDataSource.addTransferListener(mediaTransferListener); } encryptionDataSource = dataSourceFactory.createDataSource(C.DATA_TYPE_DRM); trackGroup = new TrackGroup(playlistFormats); int[] initialTrackSelection = new int[playlistUrls.length]; for (int i = 0; i < playlistUrls.length; i++) { initialTrackSelection[i] = i; } trackSelection = new InitializationTrackSelection(trackGroup, initialTrackSelection); } /** * If the source is currently having difficulty providing chunks, then this method throws the * underlying error. Otherwise does nothing. * * @throws IOException The underlying error. */ public void maybeThrowError() throws IOException { if (fatalError != null) { throw fatalError; } if (expectedPlaylistUrl != null && seenExpectedPlaylistError) { playlistTracker.maybeThrowPlaylistRefreshError(expectedPlaylistUrl); } } /** * Returns the track group exposed by the source. */ public TrackGroup getTrackGroup() { return trackGroup; } /** * Sets the current track selection. * * @param trackSelection The {@link TrackSelection}. */ public void setTrackSelection(TrackSelection trackSelection) { this.trackSelection = trackSelection; } /** Returns the current {@link TrackSelection}. */ public TrackSelection getTrackSelection() { return trackSelection; } /** * Resets the source. */ public void reset() { fatalError = null; } /** * Sets whether this chunk source is responsible for initializing timestamp adjusters. * * @param isTimestampMaster True if this chunk source is responsible for initializing timestamp * adjusters. */ public void setIsTimestampMaster(boolean isTimestampMaster) { this.isTimestampMaster = isTimestampMaster; } /** * Returns the next chunk to load. * * <p>If a chunk is available then {@link HlsChunkHolder#chunk} is set. If the end of the stream * has been reached then {@link HlsChunkHolder#endOfStream} is set. If a chunk is not available * but the end of the stream has not been reached, {@link HlsChunkHolder#playlistUrl} is set to * contain the {@link Uri} that refers to the playlist that needs refreshing. * * @param playbackPositionUs The current playback position relative to the period start in * microseconds. If playback of the period to which this chunk source belongs has not yet * started, the value will be the starting position in the period minus the duration of any * media in previous periods still to be played. * @param loadPositionUs The current load position relative to the period start in microseconds. * @param queue The queue of buffered {@link HlsMediaChunk}s. * @param allowEndOfStream Whether {@link HlsChunkHolder#endOfStream} is allowed to be set for * non-empty media playlists. If {@code false}, the last available chunk is returned instead. * If the media playlist is empty, {@link HlsChunkHolder#endOfStream} is always set. * @param out A holder to populate. */ public void getNextChunk( long playbackPositionUs, long loadPositionUs, List<HlsMediaChunk> queue, boolean allowEndOfStream, HlsChunkHolder out) { HlsMediaChunk previous = queue.isEmpty() ? null : queue.get(queue.size() - 1); int oldTrackIndex = previous == null ? C.INDEX_UNSET : trackGroup.indexOf(previous.trackFormat); long bufferedDurationUs = loadPositionUs - playbackPositionUs; long timeToLiveEdgeUs = resolveTimeToLiveEdgeUs(playbackPositionUs); if (previous != null && !independentSegments) { // Unless segments are known to be independent, switching tracks requires downloading // overlapping segments. Hence we subtract the previous segment's duration from the buffered // duration. // This may affect the live-streaming adaptive track selection logic, when we compare the // buffered duration to time-to-live-edge to decide whether to switch. Therefore, we subtract // the duration of the last loaded segment from timeToLiveEdgeUs as well. long subtractedDurationUs = previous.getDurationUs(); bufferedDurationUs = Math.max(0, bufferedDurationUs - subtractedDurationUs); if (timeToLiveEdgeUs != C.TIME_UNSET) { timeToLiveEdgeUs = Math.max(0, timeToLiveEdgeUs - subtractedDurationUs); } } // Select the track. MediaChunkIterator[] mediaChunkIterators = createMediaChunkIterators(previous, loadPositionUs); trackSelection.updateSelectedTrack( playbackPositionUs, bufferedDurationUs, timeToLiveEdgeUs, queue, mediaChunkIterators); int selectedTrackIndex = trackSelection.getSelectedIndexInTrackGroup(); boolean switchingTrack = oldTrackIndex != selectedTrackIndex; Uri selectedPlaylistUrl = playlistUrls[selectedTrackIndex]; if (!playlistTracker.isSnapshotValid(selectedPlaylistUrl)) { out.playlistUrl = selectedPlaylistUrl; seenExpectedPlaylistError &= selectedPlaylistUrl.equals(expectedPlaylistUrl); expectedPlaylistUrl = selectedPlaylistUrl; // Retry when playlist is refreshed. return; } HlsMediaPlaylist mediaPlaylist = playlistTracker.getPlaylistSnapshot(selectedPlaylistUrl, /* isForPlayback= */ true); // playlistTracker snapshot is valid (checked by if() above), so mediaPlaylist must be non-null. Assertions.checkNotNull(mediaPlaylist); independentSegments = mediaPlaylist.hasIndependentSegments; updateLiveEdgeTimeUs(mediaPlaylist); // Select the chunk. long startOfPlaylistInPeriodUs = mediaPlaylist.startTimeUs - playlistTracker.getInitialStartTimeUs(); long chunkMediaSequence = getChunkMediaSequence( previous, switchingTrack, mediaPlaylist, startOfPlaylistInPeriodUs, loadPositionUs); if (chunkMediaSequence < mediaPlaylist.mediaSequence && previous != null && switchingTrack) { // We try getting the next chunk without adapting in case that's the reason for falling // behind the live window. selectedTrackIndex = oldTrackIndex; selectedPlaylistUrl = playlistUrls[selectedTrackIndex]; mediaPlaylist = playlistTracker.getPlaylistSnapshot(selectedPlaylistUrl, /* isForPlayback= */ true); // playlistTracker snapshot is valid (checked by if() above), so mediaPlaylist must be // non-null. Assertions.checkNotNull(mediaPlaylist); startOfPlaylistInPeriodUs = mediaPlaylist.startTimeUs - playlistTracker.getInitialStartTimeUs(); chunkMediaSequence = previous.getNextChunkIndex(); } if (chunkMediaSequence < mediaPlaylist.mediaSequence) { fatalError = new BehindLiveWindowException(); return; } int segmentIndexInPlaylist = (int) (chunkMediaSequence - mediaPlaylist.mediaSequence); int availableSegmentCount = mediaPlaylist.segments.size(); if (segmentIndexInPlaylist >= availableSegmentCount) { if (mediaPlaylist.hasEndTag) { if (allowEndOfStream || availableSegmentCount == 0) { out.endOfStream = true; return; } segmentIndexInPlaylist = availableSegmentCount - 1; } else /* Live */ { out.playlistUrl = selectedPlaylistUrl; seenExpectedPlaylistError &= selectedPlaylistUrl.equals(expectedPlaylistUrl); expectedPlaylistUrl = selectedPlaylistUrl; return; } } // We have a valid playlist snapshot, we can discard any playlist errors at this point. seenExpectedPlaylistError = false; expectedPlaylistUrl = null; // Handle encryption. HlsMediaPlaylist.Segment segment = mediaPlaylist.segments.get(segmentIndexInPlaylist); // Check if the segment or its initialization segment are fully encrypted. Uri initSegmentKeyUri = getFullEncryptionKeyUri(mediaPlaylist, segment.initializationSegment); out.chunk = maybeCreateEncryptionChunkFor(initSegmentKeyUri, selectedTrackIndex); if (out.chunk != null) { return; } Uri mediaSegmentKeyUri = getFullEncryptionKeyUri(mediaPlaylist, segment); out.chunk = maybeCreateEncryptionChunkFor(mediaSegmentKeyUri, selectedTrackIndex); if (out.chunk != null) { return; } out.chunk = HlsMediaChunk.createInstance( extractorFactory, mediaDataSource, playlistFormats[selectedTrackIndex], startOfPlaylistInPeriodUs, mediaPlaylist, segmentIndexInPlaylist, selectedPlaylistUrl, muxedCaptionFormats, trackSelection.getSelectionReason(), trackSelection.getSelectionData(), isTimestampMaster, timestampAdjusterProvider, previous, /* mediaSegmentKey= */ keyCache.get(mediaSegmentKeyUri), /* initSegmentKey= */ keyCache.get(initSegmentKeyUri)); } /** * Called when the {@link HlsSampleStreamWrapper} has finished loading a chunk obtained from this * source. * * @param chunk The chunk whose load has been completed. */ public void onChunkLoadCompleted(Chunk chunk) { if (chunk instanceof EncryptionKeyChunk) { EncryptionKeyChunk encryptionKeyChunk = (EncryptionKeyChunk) chunk; scratchSpace = encryptionKeyChunk.getDataHolder(); keyCache.put( encryptionKeyChunk.dataSpec.uri, Assertions.checkNotNull(encryptionKeyChunk.getResult())); } } /** * Attempts to blacklist the track associated with the given chunk. Blacklisting will fail if the * track is the only non-blacklisted track in the selection. * * @param chunk The chunk whose load caused the blacklisting attempt. * @param blacklistDurationMs The number of milliseconds for which the track selection should be * blacklisted. * @return Whether the blacklisting succeeded. */ public boolean maybeBlacklistTrack(Chunk chunk, long blacklistDurationMs) { return trackSelection.blacklist( trackSelection.indexOf(trackGroup.indexOf(chunk.trackFormat)), blacklistDurationMs); } /** * Called when a playlist load encounters an error. * * @param playlistUrl The {@link Uri} of the playlist whose load encountered an error. * @param blacklistDurationMs The duration for which the playlist should be blacklisted. Or {@link * C#TIME_UNSET} if the playlist should not be blacklisted. * @return True if blacklisting did not encounter errors. False otherwise. */ public boolean onPlaylistError(Uri playlistUrl, long blacklistDurationMs) { int trackGroupIndex = C.INDEX_UNSET; for (int i = 0; i < playlistUrls.length; i++) { if (playlistUrls[i].equals(playlistUrl)) { trackGroupIndex = i; break; } } if (trackGroupIndex == C.INDEX_UNSET) { return true; } int trackSelectionIndex = trackSelection.indexOf(trackGroupIndex); if (trackSelectionIndex == C.INDEX_UNSET) { return true; } seenExpectedPlaylistError |= playlistUrl.equals(expectedPlaylistUrl); return blacklistDurationMs == C.TIME_UNSET || trackSelection.blacklist(trackSelectionIndex, blacklistDurationMs); } /** * Returns an array of {@link MediaChunkIterator}s for upcoming media chunks. * * @param previous The previous media chunk. May be null. * @param loadPositionUs The position at which the iterators will start. * @return Array of {@link MediaChunkIterator}s for each track. */ public MediaChunkIterator[] createMediaChunkIterators( @Nullable HlsMediaChunk previous, long loadPositionUs) { int oldTrackIndex = previous == null ? C.INDEX_UNSET : trackGroup.indexOf(previous.trackFormat); MediaChunkIterator[] chunkIterators = new MediaChunkIterator[trackSelection.length()]; for (int i = 0; i < chunkIterators.length; i++) { int trackIndex = trackSelection.getIndexInTrackGroup(i); Uri playlistUrl = playlistUrls[trackIndex]; if (!playlistTracker.isSnapshotValid(playlistUrl)) { chunkIterators[i] = MediaChunkIterator.EMPTY; continue; } HlsMediaPlaylist playlist = playlistTracker.getPlaylistSnapshot(playlistUrl, /* isForPlayback= */ false); // Playlist snapshot is valid (checked by if() above) so playlist must be non-null. Assertions.checkNotNull(playlist); long startOfPlaylistInPeriodUs = playlist.startTimeUs - playlistTracker.getInitialStartTimeUs(); boolean switchingTrack = trackIndex != oldTrackIndex; long chunkMediaSequence = getChunkMediaSequence( previous, switchingTrack, playlist, startOfPlaylistInPeriodUs, loadPositionUs); if (chunkMediaSequence < playlist.mediaSequence) { chunkIterators[i] = MediaChunkIterator.EMPTY; continue; } int chunkIndex = (int) (chunkMediaSequence - playlist.mediaSequence); chunkIterators[i] = new HlsMediaPlaylistSegmentIterator(playlist, startOfPlaylistInPeriodUs, chunkIndex); } return chunkIterators; } /** * Evaluates whether {@link MediaChunk MediaChunks} should be removed from the back of the queue. * * <p>Removing {@link MediaChunk MediaChunks} from the back of the queue can be useful if they * could be replaced with chunks of a significantly higher quality (e.g. because the available * bandwidth has substantially increased). * * <p>Will only be called if no {@link MediaChunk} in the queue is currently loading. * * @param playbackPositionUs The current playback position, in microseconds. * @param queue The queue of buffered {@link MediaChunk MediaChunks}. * @return The preferred queue size. */ public int getPreferredQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) { if (fatalError != null || trackSelection.length() < 2) { return queue.size(); } return trackSelection.evaluateQueueSize(playbackPositionUs, queue); } /** * Returns whether an ongoing load of a chunk should be canceled. * * @param playbackPositionUs The current playback position, in microseconds. * @param loadingChunk The currently loading {@link Chunk}. * @param queue The queue of buffered {@link MediaChunk MediaChunks}. * @return Whether the ongoing load of {@code loadingChunk} should be canceled. */ public boolean shouldCancelLoad( long playbackPositionUs, Chunk loadingChunk, List<? extends MediaChunk> queue) { if (fatalError != null) { return false; } return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue); } // Private methods. /** * Returns the media sequence number of the segment to load next in {@code mediaPlaylist}. * * @param previous The last (at least partially) loaded segment. * @param switchingTrack Whether the segment to load is not preceded by a segment in the same * track. * @param mediaPlaylist The media playlist to which the segment to load belongs. * @param startOfPlaylistInPeriodUs The start of {@code mediaPlaylist} relative to the period * start in microseconds. * @param loadPositionUs The current load position relative to the period start in microseconds. * @return The media sequence of the segment to load. */ private long getChunkMediaSequence( @Nullable HlsMediaChunk previous, boolean switchingTrack, HlsMediaPlaylist mediaPlaylist, long startOfPlaylistInPeriodUs, long loadPositionUs) { if (previous == null || switchingTrack) { long endOfPlaylistInPeriodUs = startOfPlaylistInPeriodUs + mediaPlaylist.durationUs; long targetPositionInPeriodUs = (previous == null || independentSegments) ? loadPositionUs : previous.startTimeUs; if (!mediaPlaylist.hasEndTag && targetPositionInPeriodUs >= endOfPlaylistInPeriodUs) { // If the playlist is too old to contain the chunk, we need to refresh it. return mediaPlaylist.mediaSequence + mediaPlaylist.segments.size(); } long targetPositionInPlaylistUs = targetPositionInPeriodUs - startOfPlaylistInPeriodUs; return Util.binarySearchFloor( mediaPlaylist.segments, /* value= */ targetPositionInPlaylistUs, /* inclusive= */ true, /* stayInBounds= */ !playlistTracker.isLive() || previous == null) + mediaPlaylist.mediaSequence; } return previous.isLoadCompleted() ? previous.getNextChunkIndex() : previous.chunkIndex; } private long resolveTimeToLiveEdgeUs(long playbackPositionUs) { final boolean resolveTimeToLiveEdgePossible = liveEdgeInPeriodTimeUs != C.TIME_UNSET; return resolveTimeToLiveEdgePossible ? liveEdgeInPeriodTimeUs - playbackPositionUs : C.TIME_UNSET; } private void updateLiveEdgeTimeUs(HlsMediaPlaylist mediaPlaylist) { liveEdgeInPeriodTimeUs = mediaPlaylist.hasEndTag ? C.TIME_UNSET : (mediaPlaylist.getEndTimeUs() - playlistTracker.getInitialStartTimeUs()); } @Nullable private Chunk maybeCreateEncryptionChunkFor(@Nullable Uri keyUri, int selectedTrackIndex) { if (keyUri == null) { return null; } @Nullable byte[] encryptionKey = keyCache.remove(keyUri); if (encryptionKey != null) { // The key was present in the key cache. We re-insert it to prevent it from being evicted by // the following key addition. Note that removal of the key is necessary to affect the // eviction order. keyCache.put(keyUri, encryptionKey); return null; } DataSpec dataSpec = new DataSpec.Builder().setUri(keyUri).setFlags(DataSpec.FLAG_ALLOW_GZIP).build(); return new EncryptionKeyChunk( encryptionDataSource, dataSpec, playlistFormats[selectedTrackIndex], trackSelection.getSelectionReason(), trackSelection.getSelectionData(), scratchSpace); } @Nullable private static Uri getFullEncryptionKeyUri(HlsMediaPlaylist playlist, @Nullable Segment segment) { if (segment == null || segment.fullSegmentEncryptionKeyUri == null) { return null; } return UriUtil.resolveToUri(playlist.baseUri, segment.fullSegmentEncryptionKeyUri); } // Private classes. /** * A {@link TrackSelection} to use for initialization. */ private static final class InitializationTrackSelection extends BaseTrackSelection { private int selectedIndex; public InitializationTrackSelection(TrackGroup group, int[] tracks) { super(group, tracks); selectedIndex = indexOf(group.getFormat(0)); } @Override public void updateSelectedTrack( long playbackPositionUs, long bufferedDurationUs, long availableDurationUs, List<? extends MediaChunk> queue, MediaChunkIterator[] mediaChunkIterators) { long nowMs = SystemClock.elapsedRealtime(); if (!isBlacklisted(selectedIndex, nowMs)) { return; } // Try from lowest bitrate to highest. for (int i = length - 1; i >= 0; i--) { if (!isBlacklisted(i, nowMs)) { selectedIndex = i; return; } } // Should never happen. throw new IllegalStateException(); } @Override public int getSelectedIndex() { return selectedIndex; } @Override public int getSelectionReason() { return C.SELECTION_REASON_UNKNOWN; } @Override @Nullable public Object getSelectionData() { return null; } } private static final class EncryptionKeyChunk extends DataChunk { private byte @MonotonicNonNull [] result; public EncryptionKeyChunk( DataSource dataSource, DataSpec dataSpec, Format trackFormat, int trackSelectionReason, @Nullable Object trackSelectionData, byte[] scratchSpace) { super(dataSource, dataSpec, C.DATA_TYPE_DRM, trackFormat, trackSelectionReason, trackSelectionData, scratchSpace); } @Override protected void consume(byte[] data, int limit) { result = Arrays.copyOf(data, limit); } /** Return the result of this chunk, or null if loading is not complete. */ @Nullable public byte[] getResult() { return result; } } /** {@link MediaChunkIterator} wrapping a {@link HlsMediaPlaylist}. */ private static final class HlsMediaPlaylistSegmentIterator extends BaseMediaChunkIterator { private final HlsMediaPlaylist playlist; private final long startOfPlaylistInPeriodUs; /** * Creates iterator. * * @param playlist The {@link HlsMediaPlaylist} to wrap. * @param startOfPlaylistInPeriodUs The start time of the playlist in the period, in * microseconds. * @param chunkIndex The index of the first available chunk in the playlist. */ public HlsMediaPlaylistSegmentIterator( HlsMediaPlaylist playlist, long startOfPlaylistInPeriodUs, int chunkIndex) { super(/* fromIndex= */ chunkIndex, /* toIndex= */ playlist.segments.size() - 1); this.playlist = playlist; this.startOfPlaylistInPeriodUs = startOfPlaylistInPeriodUs; } @Override public DataSpec getDataSpec() { checkInBounds(); Segment segment = playlist.segments.get((int) getCurrentIndex()); Uri chunkUri = UriUtil.resolveToUri(playlist.baseUri, segment.url); return new DataSpec(chunkUri, segment.byteRangeOffset, segment.byteRangeLength); } @Override public long getChunkStartTimeUs() { checkInBounds(); Segment segment = playlist.segments.get((int) getCurrentIndex()); return startOfPlaylistInPeriodUs + segment.relativeStartTimeUs; } @Override public long getChunkEndTimeUs() { checkInBounds(); Segment segment = playlist.segments.get((int) getCurrentIndex()); long segmentStartTimeInPeriodUs = startOfPlaylistInPeriodUs + segment.relativeStartTimeUs; return segmentStartTimeInPeriodUs + segment.durationUs; } } }
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.cli.net; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.karaf.shell.commands.Argument; import org.apache.karaf.shell.commands.Command; import org.apache.karaf.shell.commands.Option; import org.onosproject.cli.AbstractShellCommand; import org.onosproject.cli.Comparators; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.FlowEntry; import org.onosproject.net.flow.FlowEntry.FlowEntryState; import org.onosproject.net.flow.FlowRuleService; import org.onosproject.net.flow.TrafficTreatment; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.function.Predicate; import static com.google.common.collect.Lists.newArrayList; /** * Lists all currently-known flows. */ @Command(scope = "onos", name = "flows", description = "Lists all currently-known flows.") public class FlowsListCommand extends AbstractShellCommand { private static final Predicate<FlowEntry> TRUE_PREDICATE = f -> true; public static final String ANY = "any"; private static final String LONG_FORMAT = " id=%s, state=%s, bytes=%s, " + "packets=%s, duration=%s, priority=%s, tableId=%s, appId=%s, " + "payLoad=%s, selector=%s, treatment=%s"; private static final String SHORT_FORMAT = " %s, bytes=%s, packets=%s, " + "table=%s, priority=%s, selector=%s, treatment=%s"; @Argument(index = 0, name = "state", description = "Flow Rule state", required = false, multiValued = false) String state = null; @Argument(index = 1, name = "uri", description = "Device ID", required = false, multiValued = false) String uri = null; @Argument(index = 2, name = "table", description = "Table ID", required = false, multiValued = false) String table = null; @Option(name = "-s", aliases = "--short", description = "Print more succinct output for each flow", required = false, multiValued = false) private boolean shortOutput = false; @Option(name = "-c", aliases = "--count", description = "Print flow count only", required = false, multiValued = false) private boolean countOnly = false; private Predicate<FlowEntry> predicate = TRUE_PREDICATE; @Override protected void execute() { CoreService coreService = get(CoreService.class); DeviceService deviceService = get(DeviceService.class); FlowRuleService service = get(FlowRuleService.class); compilePredicate(); SortedMap<Device, List<FlowEntry>> flows = getSortedFlows(deviceService, service); if (outputJson()) { print("%s", json(flows.keySet(), flows)); } else { flows.forEach((device, flow) -> printFlows(device, flow, coreService)); } } /** * Produces a JSON array of flows grouped by the each device. * * @param devices collection of devices to group flow by * @param flows collection of flows per each device * @return JSON array */ private JsonNode json(Iterable<Device> devices, Map<Device, List<FlowEntry>> flows) { ObjectMapper mapper = new ObjectMapper(); ArrayNode result = mapper.createArrayNode(); for (Device device : devices) { result.add(json(mapper, device, flows.get(device))); } return result; } /** * Compiles a predicate to find matching flows based on the command * arguments. */ private void compilePredicate() { if (state != null && !state.equals(ANY)) { final FlowEntryState feState = FlowEntryState.valueOf(state.toUpperCase()); predicate = predicate.and(f -> f.state().equals(feState)); } if (table != null) { final int tableId = Integer.parseInt(table); predicate = predicate.and(f -> f.tableId() == tableId); } } // Produces JSON object with the flows of the given device. private ObjectNode json(ObjectMapper mapper, Device device, List<FlowEntry> flows) { ObjectNode result = mapper.createObjectNode(); ArrayNode array = mapper.createArrayNode(); flows.forEach(flow -> array.add(jsonForEntity(flow, FlowEntry.class))); result.put("device", device.id().toString()) .put("flowCount", flows.size()) .set("flows", array); return result; } /** * Returns the list of devices sorted using the device ID URIs. * * @param deviceService device service * @param service flow rule service * @return sorted device list */ protected SortedMap<Device, List<FlowEntry>> getSortedFlows(DeviceService deviceService, FlowRuleService service) { SortedMap<Device, List<FlowEntry>> flows = new TreeMap<>(Comparators.ELEMENT_COMPARATOR); List<FlowEntry> rules; Iterable<Device> devices = null; if (uri == null) { devices = deviceService.getDevices(); } else { Device dev = deviceService.getDevice(DeviceId.deviceId(uri)); devices = (dev == null) ? deviceService.getDevices() : Collections.singletonList(dev); } for (Device d : devices) { if (predicate.equals(TRUE_PREDICATE)) { rules = newArrayList(service.getFlowEntries(d.id())); } else { rules = newArrayList(); for (FlowEntry f : service.getFlowEntries(d.id())) { if (predicate.test(f)) { rules.add(f); } } } rules.sort(Comparators.FLOW_RULE_COMPARATOR); flows.put(d, rules); } return flows; } /** * Prints flows. * * @param d the device * @param flows the set of flows for that device * @param coreService core system service */ protected void printFlows(Device d, List<FlowEntry> flows, CoreService coreService) { boolean empty = flows == null || flows.isEmpty(); print("deviceId=%s, flowRuleCount=%d", d.id(), empty ? 0 : flows.size()); if (empty || countOnly) { return; } for (FlowEntry f : flows) { if (shortOutput) { print(SHORT_FORMAT, f.state(), f.bytes(), f.packets(), f.tableId(), f.priority(), f.selector().criteria(), printTreatment(f.treatment())); } else { ApplicationId appId = coreService.getAppId(f.appId()); print(LONG_FORMAT, Long.toHexString(f.id().value()), f.state(), f.bytes(), f.packets(), f.life(), f.priority(), f.tableId(), appId != null ? appId.name() : "<none>", f.payLoad() == null ? null : f.payLoad().payLoad().toString(), f.selector().criteria(), f.treatment()); } } } private String printTreatment(TrafficTreatment treatment) { final String delimiter = ", "; StringBuilder builder = new StringBuilder("["); if (!treatment.immediate().isEmpty()) { builder.append("immediate=" + treatment.immediate() + delimiter); } if (!treatment.deferred().isEmpty()) { builder.append("deferred=" + treatment.deferred() + delimiter); } if (treatment.clearedDeferred()) { builder.append("clearDeferred" + delimiter); } if (treatment.tableTransition() != null) { builder.append("transition=" + treatment.tableTransition() + delimiter); } if (treatment.metered() != null) { builder.append("meter=" + treatment.metered() + delimiter); } if (treatment.writeMetadata() != null) { builder.append("metadata=" + treatment.writeMetadata() + delimiter); } // Chop off last delimiter builder.replace(builder.length() - delimiter.length(), builder.length(), ""); builder.append("]"); return builder.toString(); } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.governance.common.utils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xerces.util.SecurityManager; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.wso2.carbon.governance.common.GovernanceConfiguration; import org.wso2.carbon.governance.common.GovernanceConfigurationException; import org.wso2.carbon.utils.CarbonUtils; import org.wso2.carbon.utils.ServerConstants; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; public class GovernanceUtils { public static final String DISCOVERY_AGENTS = "DiscoveryAgents"; public static final String DISCOVERY_AGENT = "DiscoveryAgent"; public static final String SERVER_TYPE_ID = "ServerTypeId"; public static final String AGENT_CLASS = "AgentClass"; public static final String PROPERTY = "property"; public static final String NAME = "name"; public static final String VALUE = "value"; public static final String GOVERNANCE_CONFIG_FILE = "governance.xml"; public static final String COMPARATORS = "Comparators"; public static final String COMPARATOR = "Comparator"; public static final String CLASS_ATTR = "class"; public static final String ENDPOINT_STATE_MANAGEMENT = "EndpointStateManagement"; public static final String ENDPOINT_STATE_MANAGEMENT_ENABLED = "enabled"; public static final String DEFAULT_ENDPOINT_ACTIVE_DURATION = "DefaultEndpointActiveDuration"; public static final String ENABLE_LIFECYCLE_CHECKLIST_ITEMS = "enableLifecycleChecklistItems"; public static final String LIFECYCLE_CHECKLIST_ITEMS_ENABLED = "true"; private static final int ENTITY_EXPANSION_LIMIT = 0; private static Log log = LogFactory.getLog(GovernanceUtils.class); private static boolean isConfigInitialized = false; public static GovernanceConfiguration getGovernanceConfiguration() throws GovernanceConfigurationException { GovernanceConfiguration govConfig = GovernanceConfiguration.getInstance(); if (!isConfigInitialized) { String governanceXML = getGovernanceXML(); File governanceXMLFile = new File(governanceXML); InputStream in = null; try { in = new FileInputStream(governanceXMLFile); initGovernanceConfiguration(in, govConfig); isConfigInitialized = true; if (log.isDebugEnabled()) { log.debug(govConfig); } } catch (IOException e) { throw new GovernanceConfigurationException("Cannot read file " + governanceXML, e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { log.warn("Cannot close file " + governanceXML, e); } } } } return govConfig; } private static void initGovernanceConfiguration(InputStream in, GovernanceConfiguration govConfig) throws GovernanceConfigurationException { try { DocumentBuilderFactory factory = getSecuredDocumentBuilder(); DocumentBuilder builder = factory.newDocumentBuilder(); Document document = builder.parse(in); readChildElements(document.getDocumentElement(), govConfig); isConfigInitialized = true; } catch (ParserConfigurationException | SAXException | IOException e) { log.fatal("Problem in parsing governance configuration file ", e); throw new GovernanceConfigurationException(e); } } private static String getGovernanceXML() { return getCarbonConfigDirPath() + File.separator + GOVERNANCE_CONFIG_FILE; } private static void readChildElements(Element config, GovernanceConfiguration govConfig) { readDiscoveryAgents(config, govConfig); readComparators(config, govConfig); readEndpointStateManagement(config, govConfig); readLifecycleChecklistItems(config, govConfig); } private static void readDiscoveryAgents(Element config, GovernanceConfiguration govConfig) { Element agentsElement = getFirstElement(config, DISCOVERY_AGENTS); if (agentsElement != null) { NodeList agents = agentsElement.getElementsByTagName(DISCOVERY_AGENT); for (int i = 0; i < agents.getLength(); i++) { Element agent = (Element) agents.item(i); String serverType = getFirstElementContent(agent, SERVER_TYPE_ID); String agentClass = getFirstElementContent(agent, AGENT_CLASS); Map<String, String> properties = getProperties(agent); properties.put(AGENT_CLASS, agentClass); govConfig.addDiscoveryAgentConfig(serverType, properties); } } } private static void readComparators(Element config, GovernanceConfiguration govConfig) { Element comparatorsEle = getFirstElement(config, COMPARATORS); if (comparatorsEle != null) { NodeList comparatorsElements = comparatorsEle.getElementsByTagName(COMPARATOR); for (int i = 0; i < comparatorsElements.getLength(); i++) { Element comparatorEle = (Element) comparatorsElements.item(i); String comparatorClass = comparatorEle.getAttribute(CLASS_ATTR); if (comparatorClass != null && !comparatorClass.isEmpty()) { govConfig.addComparator(comparatorClass); } } } } private static void readEndpointStateManagement(Element config, GovernanceConfiguration govConfig) { Element endpointStateManagementEle = getFirstElement(config, ENDPOINT_STATE_MANAGEMENT); if (endpointStateManagementEle != null) { String enabled = endpointStateManagementEle.getTextContent(); if (enabled != null && ENDPOINT_STATE_MANAGEMENT_ENABLED.equals(enabled.toLowerCase())) { govConfig.setEndpointStateManagementEnabled(true); } } Element DefaultEndpointActiveEle = getFirstElement(config, DEFAULT_ENDPOINT_ACTIVE_DURATION); if (DefaultEndpointActiveEle != null) { String durationStr = DefaultEndpointActiveEle.getTextContent(); if (durationStr != null) { long duration = Long.valueOf(durationStr); govConfig.setDefaultEndpointActiveDuration(duration); } } } /** * This method is used to read the property of 'enableLifecycleChecklistItems' in the governance.xml file.. * * @param config child element of the configuration. * @param govConfig the governance configuration file. */ private static void readLifecycleChecklistItems(Element config, GovernanceConfiguration govConfig) { Element enableLifecycleChecklistItemsElement = getFirstElement(config, ENABLE_LIFECYCLE_CHECKLIST_ITEMS); if (enableLifecycleChecklistItemsElement != null) { String lifecycleEnabled = enableLifecycleChecklistItemsElement.getTextContent(); if (lifecycleEnabled != null && LIFECYCLE_CHECKLIST_ITEMS_ENABLED.equals(lifecycleEnabled.toLowerCase())) { govConfig.setLifecycleChecklistItemsEnabled(true); } } } private static Map<String, String> getProperties(Element agentEle) { NodeList propertyNodes = agentEle.getElementsByTagName(PROPERTY); Map<String, String> properties = new HashMap<>(); for (int i = 0; i < propertyNodes.getLength(); i++) { Element propertyELe = (Element) propertyNodes.item(i); String propertyKey = propertyELe.getAttribute(NAME); String propertyValue = propertyELe.getAttribute(VALUE); if (propertyKey != null && propertyValue != null && !propertyKey.isEmpty() && !propertyValue.isEmpty()) { properties.put(propertyKey, propertyValue); } } return properties; } private static Element getFirstElement(Element element, String childName) { if (element.getElementsByTagName(childName) != null) { return (Element) element.getElementsByTagName(childName).item(0); } return null; } private static String getFirstElementContent(Element element, String childName) { return element.getElementsByTagName(childName).item(0).getTextContent(); } public static String getCarbonConfigDirPath() { /* if user set the system property telling where is the configuration directory*/ String carbonConfigDir = System.getProperty(ServerConstants.CARBON_CONFIG_DIR_PATH); if (carbonConfigDir == null) { carbonConfigDir = CarbonUtils.getCarbonConfigDirPath(); } return carbonConfigDir; } /** * Returns a secured DocumentBuilderFactory instance * * @return DocumentBuilderFactory */ public static DocumentBuilderFactory getSecuredDocumentBuilder() { org.apache.xerces.impl.Constants Constants = null; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); dbf.setXIncludeAware(false); dbf.setExpandEntityReferences(false); try { dbf.setFeature(Constants.SAX_FEATURE_PREFIX + Constants.EXTERNAL_GENERAL_ENTITIES_FEATURE, false); dbf.setFeature(Constants.SAX_FEATURE_PREFIX + Constants.EXTERNAL_PARAMETER_ENTITIES_FEATURE, false); dbf.setFeature(Constants.XERCES_FEATURE_PREFIX + Constants.LOAD_EXTERNAL_DTD_FEATURE, false); } catch (ParserConfigurationException e) { log.error( "Failed to load XML Processor Feature " + Constants.EXTERNAL_GENERAL_ENTITIES_FEATURE + " or " + Constants.EXTERNAL_PARAMETER_ENTITIES_FEATURE + " or " + Constants.LOAD_EXTERNAL_DTD_FEATURE); } SecurityManager securityManager = new SecurityManager(); securityManager.setEntityExpansionLimit(ENTITY_EXPANSION_LIMIT); dbf.setAttribute(Constants.XERCES_PROPERTY_PREFIX + Constants.SECURITY_MANAGER_PROPERTY, securityManager); return dbf; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.lifesciences.v2beta; /** * Service definition for CloudLifeSciences (v2beta). * * <p> * Cloud Life Sciences is a suite of services and tools for managing, processing, and transforming life sciences data. * </p> * * <p> * For more information about this service, see the * <a href="https://cloud.google.com/life-sciences" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link CloudLifeSciencesRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class CloudLifeSciences extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.28.0 of the Cloud Life Sciences API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://lifesciences.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public CloudLifeSciences(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ CloudLifeSciences(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Projects collection. * * <p>The typical use is:</p> * <pre> * {@code CloudLifeSciences lifesciences = new CloudLifeSciences(...);} * {@code CloudLifeSciences.Projects.List request = lifesciences.projects().list(parameters ...)} * </pre> * * @return the resource collection */ public Projects projects() { return new Projects(); } /** * The "projects" collection of methods. */ public class Projects { /** * An accessor for creating requests from the Locations collection. * * <p>The typical use is:</p> * <pre> * {@code CloudLifeSciences lifesciences = new CloudLifeSciences(...);} * {@code CloudLifeSciences.Locations.List request = lifesciences.locations().list(parameters ...)} * </pre> * * @return the resource collection */ public Locations locations() { return new Locations(); } /** * The "locations" collection of methods. */ public class Locations { /** * Gets information about a location. * * Create a request for the method "locations.get". * * This request holds the parameters needed by the lifesciences server. After setting any optional * parameters, call the {@link Get#execute()} method to invoke the remote operation. * * @param name Resource name for the location. * @return the request */ public Get get(java.lang.String name) throws java.io.IOException { Get result = new Get(name); initialize(result); return result; } public class Get extends CloudLifeSciencesRequest<com.google.api.services.lifesciences.v2beta.model.Location> { private static final String REST_PATH = "v2beta/{+name}"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/locations/[^/]+$"); /** * Gets information about a location. * * Create a request for the method "locations.get". * * This request holds the parameters needed by the the lifesciences server. After setting any * optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> * {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Resource name for the location. * @since 1.13 */ protected Get(java.lang.String name) { super(CloudLifeSciences.this, "GET", REST_PATH, null, com.google.api.services.lifesciences.v2beta.model.Location.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public Get set$Xgafv(java.lang.String $Xgafv) { return (Get) super.set$Xgafv($Xgafv); } @Override public Get setAccessToken(java.lang.String accessToken) { return (Get) super.setAccessToken(accessToken); } @Override public Get setAlt(java.lang.String alt) { return (Get) super.setAlt(alt); } @Override public Get setCallback(java.lang.String callback) { return (Get) super.setCallback(callback); } @Override public Get setFields(java.lang.String fields) { return (Get) super.setFields(fields); } @Override public Get setKey(java.lang.String key) { return (Get) super.setKey(key); } @Override public Get setOauthToken(java.lang.String oauthToken) { return (Get) super.setOauthToken(oauthToken); } @Override public Get setPrettyPrint(java.lang.Boolean prettyPrint) { return (Get) super.setPrettyPrint(prettyPrint); } @Override public Get setQuotaUser(java.lang.String quotaUser) { return (Get) super.setQuotaUser(quotaUser); } @Override public Get setUploadType(java.lang.String uploadType) { return (Get) super.setUploadType(uploadType); } @Override public Get setUploadProtocol(java.lang.String uploadProtocol) { return (Get) super.setUploadProtocol(uploadProtocol); } /** Resource name for the location. */ @com.google.api.client.util.Key private java.lang.String name; /** Resource name for the location. */ public java.lang.String getName() { return name; } /** Resource name for the location. */ public Get setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+$"); } this.name = name; return this; } @Override public Get set(String parameterName, Object value) { return (Get) super.set(parameterName, value); } } /** * Lists information about the supported locations for this service. * * Create a request for the method "locations.list". * * This request holds the parameters needed by the lifesciences server. After setting any optional * parameters, call the {@link List#execute()} method to invoke the remote operation. * * @param name The resource that owns the locations collection, if applicable. * @return the request */ public List list(java.lang.String name) throws java.io.IOException { List result = new List(name); initialize(result); return result; } public class List extends CloudLifeSciencesRequest<com.google.api.services.lifesciences.v2beta.model.ListLocationsResponse> { private static final String REST_PATH = "v2beta/{+name}/locations"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+$"); /** * Lists information about the supported locations for this service. * * Create a request for the method "locations.list". * * This request holds the parameters needed by the the lifesciences server. After setting any * optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p> * {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name The resource that owns the locations collection, if applicable. * @since 1.13 */ protected List(java.lang.String name) { super(CloudLifeSciences.this, "GET", REST_PATH, null, com.google.api.services.lifesciences.v2beta.model.ListLocationsResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public List set$Xgafv(java.lang.String $Xgafv) { return (List) super.set$Xgafv($Xgafv); } @Override public List setAccessToken(java.lang.String accessToken) { return (List) super.setAccessToken(accessToken); } @Override public List setAlt(java.lang.String alt) { return (List) super.setAlt(alt); } @Override public List setCallback(java.lang.String callback) { return (List) super.setCallback(callback); } @Override public List setFields(java.lang.String fields) { return (List) super.setFields(fields); } @Override public List setKey(java.lang.String key) { return (List) super.setKey(key); } @Override public List setOauthToken(java.lang.String oauthToken) { return (List) super.setOauthToken(oauthToken); } @Override public List setPrettyPrint(java.lang.Boolean prettyPrint) { return (List) super.setPrettyPrint(prettyPrint); } @Override public List setQuotaUser(java.lang.String quotaUser) { return (List) super.setQuotaUser(quotaUser); } @Override public List setUploadType(java.lang.String uploadType) { return (List) super.setUploadType(uploadType); } @Override public List setUploadProtocol(java.lang.String uploadProtocol) { return (List) super.setUploadProtocol(uploadProtocol); } /** The resource that owns the locations collection, if applicable. */ @com.google.api.client.util.Key private java.lang.String name; /** The resource that owns the locations collection, if applicable. */ public java.lang.String getName() { return name; } /** The resource that owns the locations collection, if applicable. */ public List setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+$"); } this.name = name; return this; } /** The standard list filter. */ @com.google.api.client.util.Key private java.lang.String filter; /** The standard list filter. */ public java.lang.String getFilter() { return filter; } /** The standard list filter. */ public List setFilter(java.lang.String filter) { this.filter = filter; return this; } /** The standard list page size. */ @com.google.api.client.util.Key private java.lang.Integer pageSize; /** The standard list page size. */ public java.lang.Integer getPageSize() { return pageSize; } /** The standard list page size. */ public List setPageSize(java.lang.Integer pageSize) { this.pageSize = pageSize; return this; } /** The standard list page token. */ @com.google.api.client.util.Key private java.lang.String pageToken; /** The standard list page token. */ public java.lang.String getPageToken() { return pageToken; } /** The standard list page token. */ public List setPageToken(java.lang.String pageToken) { this.pageToken = pageToken; return this; } @Override public List set(String parameterName, Object value) { return (List) super.set(parameterName, value); } } /** * An accessor for creating requests from the Operations collection. * * <p>The typical use is:</p> * <pre> * {@code CloudLifeSciences lifesciences = new CloudLifeSciences(...);} * {@code CloudLifeSciences.Operations.List request = lifesciences.operations().list(parameters ...)} * </pre> * * @return the resource collection */ public Operations operations() { return new Operations(); } /** * The "operations" collection of methods. */ public class Operations { /** * Starts asynchronous cancellation on a long-running operation. The server makes a best effort to * cancel the operation, but success is not guaranteed. Clients may use Operations.GetOperation or * Operations.ListOperations to check whether the cancellation succeeded or the operation completed * despite cancellation. Authorization requires the following [Google * IAM](https://cloud.google.com/iam) permission * * * `lifesciences.operations.cancel` * * Create a request for the method "operations.cancel". * * This request holds the parameters needed by the lifesciences server. After setting any optional * parameters, call the {@link Cancel#execute()} method to invoke the remote operation. * * @param name The name of the operation resource to be cancelled. * @param content the {@link com.google.api.services.lifesciences.v2beta.model.CancelOperationRequest} * @return the request */ public Cancel cancel(java.lang.String name, com.google.api.services.lifesciences.v2beta.model.CancelOperationRequest content) throws java.io.IOException { Cancel result = new Cancel(name, content); initialize(result); return result; } public class Cancel extends CloudLifeSciencesRequest<com.google.api.services.lifesciences.v2beta.model.Empty> { private static final String REST_PATH = "v2beta/{+name}:cancel"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/locations/[^/]+/operations/[^/]+$"); /** * Starts asynchronous cancellation on a long-running operation. The server makes a best effort to * cancel the operation, but success is not guaranteed. Clients may use Operations.GetOperation or * Operations.ListOperations to check whether the cancellation succeeded or the operation * completed despite cancellation. Authorization requires the following [Google * IAM](https://cloud.google.com/iam) permission * * * `lifesciences.operations.cancel` * * Create a request for the method "operations.cancel". * * This request holds the parameters needed by the the lifesciences server. After setting any * optional parameters, call the {@link Cancel#execute()} method to invoke the remote operation. * <p> {@link * Cancel#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param name The name of the operation resource to be cancelled. * @param content the {@link com.google.api.services.lifesciences.v2beta.model.CancelOperationRequest} * @since 1.13 */ protected Cancel(java.lang.String name, com.google.api.services.lifesciences.v2beta.model.CancelOperationRequest content) { super(CloudLifeSciences.this, "POST", REST_PATH, content, com.google.api.services.lifesciences.v2beta.model.Empty.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+/operations/[^/]+$"); } } @Override public Cancel set$Xgafv(java.lang.String $Xgafv) { return (Cancel) super.set$Xgafv($Xgafv); } @Override public Cancel setAccessToken(java.lang.String accessToken) { return (Cancel) super.setAccessToken(accessToken); } @Override public Cancel setAlt(java.lang.String alt) { return (Cancel) super.setAlt(alt); } @Override public Cancel setCallback(java.lang.String callback) { return (Cancel) super.setCallback(callback); } @Override public Cancel setFields(java.lang.String fields) { return (Cancel) super.setFields(fields); } @Override public Cancel setKey(java.lang.String key) { return (Cancel) super.setKey(key); } @Override public Cancel setOauthToken(java.lang.String oauthToken) { return (Cancel) super.setOauthToken(oauthToken); } @Override public Cancel setPrettyPrint(java.lang.Boolean prettyPrint) { return (Cancel) super.setPrettyPrint(prettyPrint); } @Override public Cancel setQuotaUser(java.lang.String quotaUser) { return (Cancel) super.setQuotaUser(quotaUser); } @Override public Cancel setUploadType(java.lang.String uploadType) { return (Cancel) super.setUploadType(uploadType); } @Override public Cancel setUploadProtocol(java.lang.String uploadProtocol) { return (Cancel) super.setUploadProtocol(uploadProtocol); } /** The name of the operation resource to be cancelled. */ @com.google.api.client.util.Key private java.lang.String name; /** The name of the operation resource to be cancelled. */ public java.lang.String getName() { return name; } /** The name of the operation resource to be cancelled. */ public Cancel setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+/operations/[^/]+$"); } this.name = name; return this; } @Override public Cancel set(String parameterName, Object value) { return (Cancel) super.set(parameterName, value); } } /** * Gets the latest state of a long-running operation. Clients can use this method to poll the * operation result at intervals as recommended by the API service. Authorization requires the * following [Google IAM](https://cloud.google.com/iam) permission * * * `lifesciences.operations.get` * * Create a request for the method "operations.get". * * This request holds the parameters needed by the lifesciences server. After setting any optional * parameters, call the {@link Get#execute()} method to invoke the remote operation. * * @param name The name of the operation resource. * @return the request */ public Get get(java.lang.String name) throws java.io.IOException { Get result = new Get(name); initialize(result); return result; } public class Get extends CloudLifeSciencesRequest<com.google.api.services.lifesciences.v2beta.model.Operation> { private static final String REST_PATH = "v2beta/{+name}"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/locations/[^/]+/operations/[^/]+$"); /** * Gets the latest state of a long-running operation. Clients can use this method to poll the * operation result at intervals as recommended by the API service. Authorization requires the * following [Google IAM](https://cloud.google.com/iam) permission * * * `lifesciences.operations.get` * * Create a request for the method "operations.get". * * This request holds the parameters needed by the the lifesciences server. After setting any * optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> * {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name The name of the operation resource. * @since 1.13 */ protected Get(java.lang.String name) { super(CloudLifeSciences.this, "GET", REST_PATH, null, com.google.api.services.lifesciences.v2beta.model.Operation.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+/operations/[^/]+$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public Get set$Xgafv(java.lang.String $Xgafv) { return (Get) super.set$Xgafv($Xgafv); } @Override public Get setAccessToken(java.lang.String accessToken) { return (Get) super.setAccessToken(accessToken); } @Override public Get setAlt(java.lang.String alt) { return (Get) super.setAlt(alt); } @Override public Get setCallback(java.lang.String callback) { return (Get) super.setCallback(callback); } @Override public Get setFields(java.lang.String fields) { return (Get) super.setFields(fields); } @Override public Get setKey(java.lang.String key) { return (Get) super.setKey(key); } @Override public Get setOauthToken(java.lang.String oauthToken) { return (Get) super.setOauthToken(oauthToken); } @Override public Get setPrettyPrint(java.lang.Boolean prettyPrint) { return (Get) super.setPrettyPrint(prettyPrint); } @Override public Get setQuotaUser(java.lang.String quotaUser) { return (Get) super.setQuotaUser(quotaUser); } @Override public Get setUploadType(java.lang.String uploadType) { return (Get) super.setUploadType(uploadType); } @Override public Get setUploadProtocol(java.lang.String uploadProtocol) { return (Get) super.setUploadProtocol(uploadProtocol); } /** The name of the operation resource. */ @com.google.api.client.util.Key private java.lang.String name; /** The name of the operation resource. */ public java.lang.String getName() { return name; } /** The name of the operation resource. */ public Get setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+/operations/[^/]+$"); } this.name = name; return this; } @Override public Get set(String parameterName, Object value) { return (Get) super.set(parameterName, value); } } /** * Lists operations that match the specified filter in the request. Authorization requires the * following [Google IAM](https://cloud.google.com/iam) permission * * * `lifesciences.operations.list` * * Create a request for the method "operations.list". * * This request holds the parameters needed by the lifesciences server. After setting any optional * parameters, call the {@link List#execute()} method to invoke the remote operation. * * @param name The name of the operation's parent resource. * @return the request */ public List list(java.lang.String name) throws java.io.IOException { List result = new List(name); initialize(result); return result; } public class List extends CloudLifeSciencesRequest<com.google.api.services.lifesciences.v2beta.model.ListOperationsResponse> { private static final String REST_PATH = "v2beta/{+name}/operations"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/locations/[^/]+$"); /** * Lists operations that match the specified filter in the request. Authorization requires the * following [Google IAM](https://cloud.google.com/iam) permission * * * `lifesciences.operations.list` * * Create a request for the method "operations.list". * * This request holds the parameters needed by the the lifesciences server. After setting any * optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p> * {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name The name of the operation's parent resource. * @since 1.13 */ protected List(java.lang.String name) { super(CloudLifeSciences.this, "GET", REST_PATH, null, com.google.api.services.lifesciences.v2beta.model.ListOperationsResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public List set$Xgafv(java.lang.String $Xgafv) { return (List) super.set$Xgafv($Xgafv); } @Override public List setAccessToken(java.lang.String accessToken) { return (List) super.setAccessToken(accessToken); } @Override public List setAlt(java.lang.String alt) { return (List) super.setAlt(alt); } @Override public List setCallback(java.lang.String callback) { return (List) super.setCallback(callback); } @Override public List setFields(java.lang.String fields) { return (List) super.setFields(fields); } @Override public List setKey(java.lang.String key) { return (List) super.setKey(key); } @Override public List setOauthToken(java.lang.String oauthToken) { return (List) super.setOauthToken(oauthToken); } @Override public List setPrettyPrint(java.lang.Boolean prettyPrint) { return (List) super.setPrettyPrint(prettyPrint); } @Override public List setQuotaUser(java.lang.String quotaUser) { return (List) super.setQuotaUser(quotaUser); } @Override public List setUploadType(java.lang.String uploadType) { return (List) super.setUploadType(uploadType); } @Override public List setUploadProtocol(java.lang.String uploadProtocol) { return (List) super.setUploadProtocol(uploadProtocol); } /** The name of the operation's parent resource. */ @com.google.api.client.util.Key private java.lang.String name; /** The name of the operation's parent resource. */ public java.lang.String getName() { return name; } /** The name of the operation's parent resource. */ public List setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/locations/[^/]+$"); } this.name = name; return this; } /** * A string for filtering Operations. The following filter fields are supported * * * createTime The time this job was created * events The set of event (names) that have * occurred while running the pipeline. The operator can be used to determine if a * particular event has occurred. * error If the pipeline is running, this value is NULL. * Once the pipeline finishes, the value is the standard Google error code. * labels.key * or labels."key with space" where key is a label key. * done If the pipeline is running, * this value is false. Once the pipeline finishes, the value is true. */ @com.google.api.client.util.Key private java.lang.String filter; /** A string for filtering Operations. The following filter fields are supported * createTime The time this job was created * events The set of event (names) that have occurred while running the pipeline. The operator can be used to determine if a particular event has occurred. * error If the pipeline is running, this value is NULL. Once the pipeline finishes, the value is the standard Google error code. * labels.key or labels."key with space" where key is a label key. * done If the pipeline is running, this value is false. Once the pipeline finishes, the value is true. */ public java.lang.String getFilter() { return filter; } /** * A string for filtering Operations. The following filter fields are supported * * * createTime The time this job was created * events The set of event (names) that have * occurred while running the pipeline. The operator can be used to determine if a * particular event has occurred. * error If the pipeline is running, this value is NULL. * Once the pipeline finishes, the value is the standard Google error code. * labels.key * or labels."key with space" where key is a label key. * done If the pipeline is running, * this value is false. Once the pipeline finishes, the value is true. */ public List setFilter(java.lang.String filter) { this.filter = filter; return this; } /** The maximum number of results to return. The maximum value is 256. */ @com.google.api.client.util.Key private java.lang.Integer pageSize; /** The maximum number of results to return. The maximum value is 256. */ public java.lang.Integer getPageSize() { return pageSize; } /** The maximum number of results to return. The maximum value is 256. */ public List setPageSize(java.lang.Integer pageSize) { this.pageSize = pageSize; return this; } /** The standard list page token. */ @com.google.api.client.util.Key private java.lang.String pageToken; /** The standard list page token. */ public java.lang.String getPageToken() { return pageToken; } /** The standard list page token. */ public List setPageToken(java.lang.String pageToken) { this.pageToken = pageToken; return this; } @Override public List set(String parameterName, Object value) { return (List) super.set(parameterName, value); } } } /** * An accessor for creating requests from the Pipelines collection. * * <p>The typical use is:</p> * <pre> * {@code CloudLifeSciences lifesciences = new CloudLifeSciences(...);} * {@code CloudLifeSciences.Pipelines.List request = lifesciences.pipelines().list(parameters ...)} * </pre> * * @return the resource collection */ public Pipelines pipelines() { return new Pipelines(); } /** * The "pipelines" collection of methods. */ public class Pipelines { /** * Runs a pipeline. The returned Operation's metadata field will contain a * google.cloud.lifesciences.v2beta.Metadata object describing the status of the pipeline execution. * The response field will contain a google.cloud.lifesciences.v2beta.RunPipelineResponse object if * the pipeline completes successfully. * * **Note:** Before you can use this method, the *Life Sciences Service Agent* must have access to * your project. This is done automatically when the Cloud Life Sciences API is first enabled, but * if you delete this permission you must disable and re-enable the API to grant the Life Sciences * Service Agent the required permissions. Authorization requires the following [Google * IAM](https://cloud.google.com/iam/) permission: * * * `lifesciences.workflows.run` * * Create a request for the method "pipelines.run". * * This request holds the parameters needed by the lifesciences server. After setting any optional * parameters, call the {@link Run#execute()} method to invoke the remote operation. * * @param parent The project and location that this request should be executed against. * @param content the {@link com.google.api.services.lifesciences.v2beta.model.RunPipelineRequest} * @return the request */ public Run run(java.lang.String parent, com.google.api.services.lifesciences.v2beta.model.RunPipelineRequest content) throws java.io.IOException { Run result = new Run(parent, content); initialize(result); return result; } public class Run extends CloudLifeSciencesRequest<com.google.api.services.lifesciences.v2beta.model.Operation> { private static final String REST_PATH = "v2beta/{+parent}/pipelines:run"; private final java.util.regex.Pattern PARENT_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/locations/[^/]+$"); /** * Runs a pipeline. The returned Operation's metadata field will contain a * google.cloud.lifesciences.v2beta.Metadata object describing the status of the pipeline * execution. The response field will contain a * google.cloud.lifesciences.v2beta.RunPipelineResponse object if the pipeline completes * successfully. * * **Note:** Before you can use this method, the *Life Sciences Service Agent* must have access to * your project. This is done automatically when the Cloud Life Sciences API is first enabled, but * if you delete this permission you must disable and re-enable the API to grant the Life Sciences * Service Agent the required permissions. Authorization requires the following [Google * IAM](https://cloud.google.com/iam/) permission: * * * `lifesciences.workflows.run` * * Create a request for the method "pipelines.run". * * This request holds the parameters needed by the the lifesciences server. After setting any * optional parameters, call the {@link Run#execute()} method to invoke the remote operation. <p> * {@link Run#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param parent The project and location that this request should be executed against. * @param content the {@link com.google.api.services.lifesciences.v2beta.model.RunPipelineRequest} * @since 1.13 */ protected Run(java.lang.String parent, com.google.api.services.lifesciences.v2beta.model.RunPipelineRequest content) { super(CloudLifeSciences.this, "POST", REST_PATH, content, com.google.api.services.lifesciences.v2beta.model.Operation.class); this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^projects/[^/]+/locations/[^/]+$"); } } @Override public Run set$Xgafv(java.lang.String $Xgafv) { return (Run) super.set$Xgafv($Xgafv); } @Override public Run setAccessToken(java.lang.String accessToken) { return (Run) super.setAccessToken(accessToken); } @Override public Run setAlt(java.lang.String alt) { return (Run) super.setAlt(alt); } @Override public Run setCallback(java.lang.String callback) { return (Run) super.setCallback(callback); } @Override public Run setFields(java.lang.String fields) { return (Run) super.setFields(fields); } @Override public Run setKey(java.lang.String key) { return (Run) super.setKey(key); } @Override public Run setOauthToken(java.lang.String oauthToken) { return (Run) super.setOauthToken(oauthToken); } @Override public Run setPrettyPrint(java.lang.Boolean prettyPrint) { return (Run) super.setPrettyPrint(prettyPrint); } @Override public Run setQuotaUser(java.lang.String quotaUser) { return (Run) super.setQuotaUser(quotaUser); } @Override public Run setUploadType(java.lang.String uploadType) { return (Run) super.setUploadType(uploadType); } @Override public Run setUploadProtocol(java.lang.String uploadProtocol) { return (Run) super.setUploadProtocol(uploadProtocol); } /** The project and location that this request should be executed against. */ @com.google.api.client.util.Key private java.lang.String parent; /** The project and location that this request should be executed against. */ public java.lang.String getParent() { return parent; } /** The project and location that this request should be executed against. */ public Run setParent(java.lang.String parent) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^projects/[^/]+/locations/[^/]+$"); } this.parent = parent; return this; } @Override public Run set(String parameterName, Object value) { return (Run) super.set(parameterName, value); } } } } } /** * Builder for {@link CloudLifeSciences}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link CloudLifeSciences}. */ @Override public CloudLifeSciences build() { return new CloudLifeSciences(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link CloudLifeSciencesRequestInitializer}. * * @since 1.12 */ public Builder setCloudLifeSciencesRequestInitializer( CloudLifeSciencesRequestInitializer cloudlifesciencesRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(cloudlifesciencesRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
package org.testng.xml; import org.testng.collections.Lists; import org.testng.collections.Maps; import org.testng.internal.YamlParser; import org.xml.sax.SAXException; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.List; import java.util.Map; /** * <code>Parser</code> is a parser for a TestNG XML test suite file. */ public class Parser { /** The name of the TestNG DTD. */ public static final String TESTNG_DTD = "testng-1.0.dtd"; /** The URL to the deprecated TestNG DTD. */ public static final String DEPRECATED_TESTNG_DTD_URL = "http://beust.com/testng/" + TESTNG_DTD; /** The URL to the TestNG DTD. */ public static final String TESTNG_DTD_URL = "http://testng.org/" + TESTNG_DTD; /** The default file name for the TestNG test suite if none is specified (testng.xml). */ public static final String DEFAULT_FILENAME = "testng.xml"; private static final IFileParser<XmlSuite> XML_PARSER = // new DomXmlParser(); new SuiteXmlParser(); private static final IFileParser<XmlSuite> YAML_PARSER = new YamlParser(); private static final IFileParser<XmlSuite> DEFAULT_FILE_PARSER = XML_PARSER; /** The file name of the xml suite being parsed. This may be null if the Parser * has not been initialized with a file name. TODO CQ This member is never used. */ private String m_fileName; private InputStream m_inputStream; private IPostProcessor m_postProcessor; private boolean m_loadClasses = true; /** * Constructs a <code>Parser</code> to use the inputStream as the source of * the xml test suite to parse. * @param fileName the filename corresponding to the inputStream or null if * unknown. */ public Parser(String fileName) { init(fileName, null, null); } /** * Creates a parser that will try to find the DEFAULT_FILENAME from the jar. * @throws FileNotFoundException if the DEFAULT_FILENAME resource is not * found in the classpath. */ public Parser() throws FileNotFoundException { init(null, null, null); } public Parser(InputStream is) { init(null, is, null); } private void init(String fileName, InputStream is, IFileParser fp) { m_fileName = fileName != null ? fileName : DEFAULT_FILENAME; m_inputStream = is; } public void setPostProcessor(IPostProcessor processor) { m_postProcessor = processor; } /** * If false, don't try to load the classes during the parsing. */ public void setLoadClasses(boolean loadClasses) { m_loadClasses = loadClasses; } /** * Returns an input stream on the resource named DEFAULT_FILENAME. * * @return an input stream on the resource named DEFAULT_FILENAME. * @throws FileNotFoundException if the DEFAULT_FILENAME resource is not * found in the classpath. */ // private static InputStream getInputStream(String fileName) throws FileNotFoundException { // // Try to look for the DEFAULT_FILENAME from the jar // ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); // InputStream in; // // TODO CQ is this OK? should we fall back to the default classloader if the // // context classloader fails. // if (classLoader != null) { // in = classLoader.getResourceAsStream(fileName); // } // else { // in = Parser.class.getResourceAsStream(fileName); // } // if (in == null) { // throw new FileNotFoundException(fileName); // } // return in; // } private IFileParser getParser(String fileName) { IFileParser result = DEFAULT_FILE_PARSER; if (fileName.endsWith(".xml")) result = XML_PARSER; else if (fileName.endsWith(".yaml")) result = YAML_PARSER; return result; } /** * Parses the TestNG test suite and returns the corresponding XmlSuite, * and possibly, other XmlSuite that are pointed to by <suite-files> * tags. * * @return the parsed TestNG test suite. * * @throws ParserConfigurationException * @throws SAXException * @throws IOException if an I/O error occurs while parsing the test suite file or * if the default testng.xml file is not found. */ public Collection<XmlSuite> parse() throws ParserConfigurationException, SAXException, IOException { // Each suite found is put in this list, using their canonical // path to make sure we don't add a same file twice // (e.g. "testng.xml" and "./testng.xml") List<String> processedSuites = Lists.newArrayList(); XmlSuite resultSuite = null; List<String> toBeParsed = Lists.newArrayList(); List<String> toBeAdded = Lists.newArrayList(); List<String> toBeRemoved = Lists.newArrayList(); if (m_fileName != null) { File mainFile = new File(m_fileName); toBeParsed.add(mainFile.getCanonicalPath()); } /* * Keeps a track of parent XmlSuite for each child suite */ Map<String, XmlSuite> childToParentMap = Maps.newHashMap(); while (toBeParsed.size() > 0) { for (String currentFile : toBeParsed) { File currFile = new File(currentFile); File parentFile = currFile.getParentFile(); InputStream inputStream = m_inputStream != null ? m_inputStream : new FileInputStream(currentFile); IFileParser<XmlSuite> fileParser = getParser(currentFile); XmlSuite currentXmlSuite = fileParser.parse(currentFile, inputStream, m_loadClasses); processedSuites.add(currentFile); toBeRemoved.add(currentFile); if (childToParentMap.containsKey(currentFile)) { XmlSuite parentSuite = childToParentMap.get(currentFile); //Set parent currentXmlSuite.setParentSuite(parentSuite); //append children parentSuite.getChildSuites().add(currentXmlSuite); } if (null == resultSuite) { resultSuite = currentXmlSuite; } List<String> suiteFiles = currentXmlSuite.getSuiteFiles(); if (suiteFiles.size() > 0) { for (String path : suiteFiles) { String canonicalPath; if (parentFile != null && new File(parentFile, path).exists()) { canonicalPath = new File(parentFile, path).getCanonicalPath(); } else { canonicalPath = new File(path).getCanonicalPath(); } if (!processedSuites.contains(canonicalPath)) { toBeAdded.add(canonicalPath); childToParentMap.put(canonicalPath, currentXmlSuite); } } } } // // Add and remove files from toBeParsed before we loop // for (String s : toBeRemoved) { toBeParsed.remove(s); } toBeRemoved = Lists.newArrayList(); for (String s : toBeAdded) { toBeParsed.add(s); } toBeAdded = Lists.newArrayList(); } //returning a list of single suite to keep changes minimum List<XmlSuite> resultList = Lists.newArrayList(); resultList.add(resultSuite); boolean postProcess = true; if (postProcess && m_postProcessor != null) { return m_postProcessor.process(resultList); } else { return resultList; } } public List<XmlSuite> parseToList() throws ParserConfigurationException, SAXException, IOException { List<XmlSuite> result = Lists.newArrayList(); Collection<XmlSuite> suites = parse(); for (XmlSuite suite : suites) { result.add(suite); } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.core.data; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.jcr.RepositoryException; import org.apache.jackrabbit.core.cluster.ClusterException; import org.apache.jackrabbit.core.cluster.ClusterNode; import org.apache.jackrabbit.core.cluster.SimpleClusterContext; import org.apache.jackrabbit.core.cluster.UpdateEventChannel; import org.apache.jackrabbit.core.cluster.UpdateEventListener; import org.apache.jackrabbit.core.config.ClusterConfig; import org.apache.jackrabbit.core.id.NodeId; import org.apache.jackrabbit.core.journal.Journal; import org.apache.jackrabbit.core.journal.JournalFactory; import org.apache.jackrabbit.core.journal.MemoryJournal; import org.apache.jackrabbit.core.journal.MemoryJournal.MemoryRecord; import org.apache.jackrabbit.core.observation.EventState; import org.apache.jackrabbit.core.persistence.bundle.AbstractBundlePersistenceManager; import org.apache.jackrabbit.core.persistence.bundle.ConsistencyCheckerImpl; import org.apache.jackrabbit.core.persistence.check.ReportItem; import org.apache.jackrabbit.core.persistence.util.BLOBStore; import org.apache.jackrabbit.core.persistence.util.NodePropBundle; import org.apache.jackrabbit.core.state.ChangeLog; import org.apache.jackrabbit.core.state.ItemStateException; import org.apache.jackrabbit.core.state.NoSuchItemStateException; import org.apache.jackrabbit.core.state.NodeReferences; import org.apache.jackrabbit.spi.NameFactory; import org.apache.jackrabbit.spi.commons.name.NameConstants; import org.apache.jackrabbit.spi.commons.name.NameFactoryImpl; import org.apache.jackrabbit.spi.commons.namespace.NamespaceResolver; import junit.framework.TestCase; public class ConsistencyCheckerImplTest extends TestCase { private static final NameFactory nameFactory = NameFactoryImpl.getInstance(); /** Default sync delay: 5 seconds. */ private static final long SYNC_DELAY = 5000; private List<MemoryRecord> records = new ArrayList<MemoryRecord>(); private ClusterNode master; private ClusterNode slave; @Override public void setUp() throws Exception { super.setUp(); master = createClusterNode("master"); master.start(); slave = createClusterNode("slave"); slave.start(); } // Abandoned nodes are nodes that have a link to a parent but that // parent does not have a link back to the child public void testFixAbandonedNode() throws RepositoryException, ClusterException { NodePropBundle bundle1 = new NodePropBundle(new NodeId(0, 0)); NodePropBundle bundle2 = new NodePropBundle(new NodeId(0, 1)); // node2 has a reference to node 1 as its parent, but node 1 doesn't have // a corresponding child node entry bundle2.setParentId(bundle1.getId()); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(bundle1, bundle2)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, master.createUpdateChannel("default")); // set up cluster event update listener final TestUpdateEventListener listener = new TestUpdateEventListener(); final UpdateEventChannel slaveEventChannel = slave.createUpdateChannel("default"); slaveEventChannel.setListener(listener); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.ABANDONED, reportItem.getType()); assertEquals(bundle2.getId().toString(), reportItem.getNodeId()); checker.repair(); // node1 should now have a child node entry for node2 bundle1 = pm.loadBundle(bundle1.getId()); assertEquals(1, bundle1.getChildNodeEntries().size()); assertEquals(bundle2.getId(), bundle1.getChildNodeEntries().get(0).getId()); slave.sync(); // verify events were correctly broadcast to cluster assertNotNull("Cluster node did not receive update event", listener.changes); assertTrue("Expected node1 to be modified", listener.changes.isModified(bundle1.getId())); } public void testDoubleCheckAbandonedNode() throws RepositoryException { NodePropBundle bundle1 = new NodePropBundle(new NodeId(0, 0)); NodePropBundle bundle2 = new NodePropBundle(new NodeId(0, 1)); // node2 has a reference to node 1 as its parent, but node 1 doesn't have // a corresponding child node entry bundle2.setParentId(bundle1.getId()); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(bundle1, bundle2)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, null); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.ABANDONED, reportItem.getType()); assertEquals(bundle2.getId().toString(), reportItem.getNodeId()); checker.doubleCheckErrors(); assertFalse("Double check removed valid error", checker.getReport().getItems().isEmpty()); // fix the error bundle1.addChildNodeEntry(nameFactory.create("", "test"), bundle2.getId()); checker.doubleCheckErrors(); assertTrue("Double check didn't remove invalid error", checker.getReport().getItems().isEmpty()); } /* * There was a bug where when there were multiple abandoned nodes by the same parent * only one of them was fixed. Hence this separate test case for this scenario. */ public void testFixMultipleAbandonedNodesBySameParent() throws RepositoryException { NodePropBundle bundle1 = new NodePropBundle(new NodeId(0, 0)); NodePropBundle bundle2 = new NodePropBundle(new NodeId(0, 1)); NodePropBundle bundle3 = new NodePropBundle(new NodeId(1, 0)); // node2 and node3 have a reference to node1 as its parent, but node1 doesn't have // corresponding child node entries bundle2.setParentId(bundle1.getId()); bundle3.setParentId(bundle1.getId()); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(bundle1, bundle2, bundle3)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, null); checker.check(null, false); checker.repair(); // node1 should now have child node entries for node2 and node3 bundle1 = pm.loadBundle(bundle1.getId()); assertEquals(2, bundle1.getChildNodeEntries().size()); assertEquals(bundle2.getId(), bundle1.getChildNodeEntries().get(0).getId()); assertEquals(bundle3.getId(), bundle1.getChildNodeEntries().get(1).getId()); } // Orphaned nodes are those nodes who's parent does not exist public void testAddOrphanedNodeToLostAndFound() throws RepositoryException, ClusterException { final NodeId lostAndFoundId = new NodeId(0, 0); NodePropBundle lostAndFound = new NodePropBundle(lostAndFoundId); // lost and found must be of type nt:unstructured lostAndFound.setNodeTypeName(NameConstants.NT_UNSTRUCTURED); final NodeId orphanedId = new NodeId(0, 1); NodePropBundle orphaned = new NodePropBundle(orphanedId); // set non-existent parent node id orphaned.setParentId(new NodeId(1, 0)); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(lostAndFound, orphaned)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, lostAndFoundId.toString(), master.createUpdateChannel("default")); // set up cluster event update listener final TestUpdateEventListener listener = new TestUpdateEventListener(); final UpdateEventChannel slaveEventChannel = slave.createUpdateChannel("default"); slaveEventChannel.setListener(listener); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.ORPHANED, reportItem.getType()); assertEquals(orphanedId.toString(), reportItem.getNodeId()); checker.repair(); // orphan should have been added to lost+found lostAndFound = pm.loadBundle(lostAndFoundId); assertEquals(1, lostAndFound.getChildNodeEntries().size()); assertEquals(orphanedId, lostAndFound.getChildNodeEntries().get(0).getId()); orphaned = pm.loadBundle(orphanedId); assertEquals(lostAndFoundId, orphaned.getParentId()); slave.sync(); // verify events were correctly broadcast to cluster assertNotNull("Cluster node did not receive update event", listener.changes); assertTrue("Expected lostAndFound to be modified", listener.changes.isModified(lostAndFoundId)); assertTrue("Expected orphan to be modified", listener.changes.isModified(orphanedId)); } public void testDoubleCheckOrphanedNode() throws RepositoryException { NodePropBundle orphaned = new NodePropBundle(new NodeId(0, 1)); orphaned.setParentId(new NodeId(1, 0)); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(orphaned)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, null); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.ORPHANED, reportItem.getType()); assertEquals(orphaned.getId().toString(), reportItem.getNodeId()); checker.doubleCheckErrors(); assertFalse("Double check removed valid error", checker.getReport().getItems().isEmpty()); // fix the error NodePropBundle parent = new NodePropBundle(orphaned.getParentId()); pm.bundles.put(parent.getId(), parent); checker.doubleCheckErrors(); assertTrue("Double check didn't remove invalid error", checker.getReport().getItems().isEmpty()); } // Disconnected nodes are those nodes for which there are nodes // that have the node as its child, but the node itself does not // have those nodes as its parent public void testFixDisconnectedNode() throws RepositoryException, ClusterException { NodePropBundle bundle1 = new NodePropBundle(new NodeId(0, 0)); NodePropBundle bundle2 = new NodePropBundle(new NodeId(0, 1)); NodePropBundle bundle3 = new NodePropBundle(new NodeId(1, 0)); // node1 has child node3 bundle1.addChildNodeEntry(nameFactory.create("", "test"), bundle3.getId()); // node2 also has child node3 bundle2.addChildNodeEntry(nameFactory.create("", "test"), bundle3.getId()); // node3 has node2 as parent bundle3.setParentId(bundle2.getId()); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(bundle1, bundle2, bundle3)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, master.createUpdateChannel("default")); // set up cluster event update listener final TestUpdateEventListener listener = new TestUpdateEventListener(); final UpdateEventChannel slaveEventChannel = slave.createUpdateChannel("default"); slaveEventChannel.setListener(listener); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.DISCONNECTED, reportItem.getType()); assertEquals(bundle1.getId().toString(), reportItem.getNodeId()); checker.repair(); bundle1 = pm.loadBundle(bundle1.getId()); bundle2 = pm.loadBundle(bundle2.getId()); bundle3 = pm.loadBundle(bundle3.getId()); // node3 should have been removed as child node entry of node1 assertEquals(0, bundle1.getChildNodeEntries().size()); // node3 should still be a child of node2 assertEquals(1, bundle2.getChildNodeEntries().size()); assertEquals(bundle2.getId(), bundle3.getParentId()); slave.sync(); // verify events were correctly broadcast to cluster assertNotNull("Cluster node did not receive update event", listener.changes); assertTrue("Expected node1 to be modified", listener.changes.isModified(bundle1.getId())); } public void testDoubleCheckDisonnectedNode() throws RepositoryException { NodePropBundle bundle1 = new NodePropBundle(new NodeId(0, 0)); NodePropBundle bundle2 = new NodePropBundle(new NodeId(0, 1)); NodePropBundle bundle3 = new NodePropBundle(new NodeId(1, 0)); // node1 has child node3 bundle1.addChildNodeEntry(nameFactory.create("", "test"), bundle3.getId()); // node2 also has child node3 bundle2.addChildNodeEntry(nameFactory.create("", "test"), bundle3.getId()); // node3 has node2 as parent bundle3.setParentId(bundle2.getId()); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(bundle1, bundle2, bundle3)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, null); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.DISCONNECTED, reportItem.getType()); assertEquals(bundle1.getId().toString(), reportItem.getNodeId()); checker.doubleCheckErrors(); assertFalse("Double check removed valid error", checker.getReport().getItems().isEmpty()); // fix the error bundle1.getChildNodeEntries().remove(0); checker.doubleCheckErrors(); assertTrue("Double check didn't remove invalid error", checker.getReport().getItems().isEmpty()); } public void testFixMissingNode() throws RepositoryException, ClusterException { NodePropBundle bundle = new NodePropBundle(new NodeId(0, 0)); bundle.addChildNodeEntry(nameFactory.create("", "test"), new NodeId(0, 1)); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(bundle)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, master.createUpdateChannel("default")); // set up cluster event update listener final TestUpdateEventListener listener = new TestUpdateEventListener(); final UpdateEventChannel slaveEventChannel = slave.createUpdateChannel("default"); slaveEventChannel.setListener(listener); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.MISSING, reportItem.getType()); assertEquals(bundle.getId().toString(), reportItem.getNodeId()); checker.repair(); // node should have no child no entries assertTrue(bundle.getChildNodeEntries().isEmpty()); slave.sync(); // verify events were correctly broadcast to cluster assertNotNull("Cluster node did not receive update event", listener.changes); assertTrue("Expected node to be modified", listener.changes.isModified(bundle.getId())); } public void testDoubleCheckMissingNode() throws RepositoryException { NodePropBundle bundle = new NodePropBundle(new NodeId(0, 0)); final NodeId childNodeId = new NodeId(0, 1); bundle.addChildNodeEntry(nameFactory.create("", "test"), childNodeId); MockPersistenceManager pm = new MockPersistenceManager(Arrays.asList(bundle)); ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(pm, null, null, null); checker.check(null, false); Set<ReportItem> reportItems = checker.getReport().getItems(); assertEquals(1, reportItems.size()); ReportItem reportItem = reportItems.iterator().next(); assertEquals(ReportItem.Type.MISSING, reportItem.getType()); assertEquals(bundle.getId().toString(), reportItem.getNodeId()); checker.doubleCheckErrors(); assertFalse("Double check removed valid error", checker.getReport().getItems().isEmpty()); // fix the error NodePropBundle child = new NodePropBundle(childNodeId); pm.bundles.put(childNodeId, child); checker.doubleCheckErrors(); assertTrue("Double check didn't remove invalid error", checker.getReport().getItems().isEmpty()); } private ClusterNode createClusterNode(String id) throws Exception { final MemoryJournal journal = new MemoryJournal() { protected boolean syncAgainOnNewRecords() { return true; } }; JournalFactory jf = new JournalFactory() { public Journal getJournal(NamespaceResolver resolver) throws RepositoryException { return journal; } }; ClusterConfig cc = new ClusterConfig(id, SYNC_DELAY, jf); SimpleClusterContext context = new SimpleClusterContext(cc); journal.setRepositoryHome(context.getRepositoryHome()); journal.init(id, context.getNamespaceResolver()); journal.setRecords(records); ClusterNode clusterNode = new ClusterNode(); clusterNode.init(context); return clusterNode; } private static class MockPersistenceManager extends AbstractBundlePersistenceManager { private Map<NodeId, NodePropBundle> bundles = new LinkedHashMap<NodeId, NodePropBundle>(); private MockPersistenceManager(List<NodePropBundle> bundles) { for (NodePropBundle bundle : bundles) { this.bundles.put(bundle.getId(), bundle); } } public List<NodeId> getAllNodeIds(final NodeId after, final int maxCount) throws ItemStateException, RepositoryException { List<NodeId> allNodeIds = new ArrayList<NodeId>(); boolean add = after == null; for (NodeId nodeId : bundles.keySet()) { if (add) { allNodeIds.add(nodeId); } if (!add) { add = nodeId.equals(after); } } return allNodeIds; } @Override protected NodePropBundle loadBundle(final NodeId id) { return bundles.get(id); } @Override protected void evictBundle(final NodeId id) { } @Override protected void storeBundle(final NodePropBundle bundle) throws ItemStateException { bundles.put(bundle.getId(), bundle); } @Override protected void destroyBundle(final NodePropBundle bundle) throws ItemStateException { bundles.remove(bundle.getId()); } @Override protected void destroy(final NodeReferences refs) throws ItemStateException { } @Override protected void store(final NodeReferences refs) throws ItemStateException { } @Override protected BLOBStore getBlobStore() { return null; } public NodeReferences loadReferencesTo(final NodeId id) throws NoSuchItemStateException, ItemStateException { return null; } public boolean existsReferencesTo(final NodeId targetId) throws ItemStateException { return false; } } private static class TestUpdateEventListener implements UpdateEventListener { private ChangeLog changes; @Override public void externalUpdate(final ChangeLog changes, final List<EventState> events, final long timestamp, final String userData) throws RepositoryException { this.changes = changes; } } }
/** * Created by Nicholas Hallahan on 1/2/15. * nhallahan@spatialdev.com */ package com.spatialdev.osm.model; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import com.spatialdev.osm.OSMUtil; import com.spatialdev.osm.renderer.OSMPath; import com.vividsolutions.jts.geom.Geometry; import org.xmlpull.v1.XmlSerializer; public abstract class OSMElement { private static LinkedList<OSMElement> selectedElements = new LinkedList<>(); private static boolean selectedElementsChanged = false; private static LinkedList<OSMElement> modifiedElements = new LinkedList<>(); private static LinkedList<OSMElement> modifiedElementsInInstance = new LinkedList<>(); protected final OSMColorConfig osmColorConfig; protected long id; protected long version; protected String timestamp; protected long changeset; protected long uid; protected String user; protected boolean selected = false; // set to true if the tags for this element have been modified protected boolean modified = false; // set to true if the application modifies tags for this element in this instance protected boolean modifiedInInstance = false; protected Geometry jtsGeom; /** * These tags get modified by the application */ protected Map<String, String> tags = new LinkedHashMap<>(); /** * This can be used to keep track of which tag is currently selected in a tag editor * like OpenMapKit. * * * */ protected String selectedTag; /** * These tags are the original tags in the data set. This SHOULD NOT BE MODIFIED. */ protected Map<String, String> originalTags = new LinkedHashMap<>(); /** * This is the object that actually gets drawn by OSMOverlay. */ protected OSMPath osmPath; /** * Elements that have been put in a select state* * @return */ public static LinkedList<OSMElement> getSelectedElements() { return selectedElements; } /** * All of the modified elements we've got in memory, including those in previous * edits of previous survey instances that have been scraped from ODK Collect. * * * * * @return all modified OSMElements */ public static LinkedList<OSMElement> getModifiedElements() { return modifiedElements; } /** * Only the modified elements that have had their tags modified in this survey instance * * * * @return elements with modified tags in this survey instance */ public static LinkedList<OSMElement> getModifiedElementsInInstance() { return modifiedElementsInInstance; } public static boolean hasSelectedElementsChanged() { if (selectedElementsChanged) { selectedElementsChanged = false; return true; } return false; } public static void deselectAll() { for (OSMElement el : selectedElements) { selectedElementsChanged = true; el.deselect(); } } /** * This constructor is used by OSMDataSet in the XML parsing process. */ public OSMElement(String idStr, String versionStr, String timestampStr, String changesetStr, String uidStr, String userStr, String action, OSMColorConfig osmColorConfig) { try { id = Long.valueOf(idStr); } catch (Exception e) { // dont assign } try { version = Long.valueOf(versionStr); } catch (Exception e) { // dont assign } try { timestamp = timestampStr; } catch (Exception e) { // dont assign } try { changeset = Long.valueOf(changesetStr); } catch (Exception e) { // dont assign } try { uid = Long.valueOf(uidStr); } catch (Exception e) { // dont assign } try { user = userStr; } catch (Exception e) { // dont assign } if (action != null && action.equals("modify")) { setAsModified(); } this.osmColorConfig = osmColorConfig; } /** * This constructor is used when we are creating an new OSMElement, * such as when a new Node is created. This constructor assumes * that we are creating a NEW element in the current survey. */ public OSMElement(OSMColorConfig osmColorConfig) { id = getUniqueNegativeId(); setAsModifiedInInstance(); this.osmColorConfig = osmColorConfig; } public static long getUniqueNegativeId() { /*negativeId--; return uniqueDevId + negativeId;*/ long random = UUID.randomUUID().getMostSignificantBits(); if(random < 0){ return random; } else { return random * -1; } } /** * All OSM Element types need to have this implemented. This checksum is composed * of the tags sorted alphabetically by key. The rest of the implementation is * defined differently whether it is Node, Way, or Relation. * * @return SHA-1 HEX checksum of the element */ public abstract String checksum(); /** * The tags are sorted by key, and each key, value is * iterated and concatenated to a String. * * @return */ public StringBuilder tagsAsSortedKVString() { List<String> keys = new ArrayList<>(tags.keySet()); java.util.Collections.sort(keys); StringBuilder tagsStr = new StringBuilder(); for (String k : keys) { String v = tags.get(k); if (v.length() > 0) { tagsStr.append(k); tagsStr.append(v); } } return tagsStr; } void xml(XmlSerializer xmlSerializer, String omkOsmUser) throws IOException { // set the tags for the element (all element types can have tags) Set<String> tagKeys = tags.keySet(); for (String tagKey : tagKeys) { String tagVal = tags.get(tagKey); if (tagVal == null || tagVal.equals("")) { continue; } xmlSerializer.startTag(null, "tag"); xmlSerializer.attribute(null, "k", tagKey); xmlSerializer.attribute(null, "v", tagVal); xmlSerializer.endTag(null, "tag"); } } protected void setOsmElementXmlAttributes(XmlSerializer xmlSerializer, String omkOsmUser) throws IOException { xmlSerializer.attribute(null, "id", String.valueOf(id)); if (isModified()) { xmlSerializer.attribute(null, "action", "modify"); } if (version != 0) { xmlSerializer.attribute(null, "version", String.valueOf(version)); } if (changeset != 0) { xmlSerializer.attribute(null, "changeset", String.valueOf(changeset)); } /** * If the element just got modified, we want to set the time stamp when the record * is serialized. If it has not been modified or was modified in a previous session, * we want to stay with the previously recorded timestamp. */ xmlSerializer.attribute(null, "timestamp", getTimestamp()); /** * We want to put the OSM user set in OMK Android for all of the elements we are writing. * This is important, because when we are filtering in OMK iD, we need to be able to filter * by OSM user. The OSM user should refer to all elements affected by an edit. This means * that the OMK Android OSM user name should apply to nodes referenced by an edited way * as well (so that filtering gets the complete geometry in). */ xmlSerializer.attribute(null, "user", omkOsmUser); } /** * This method returns the relevant timestamp for this element depending on whether it's been * modified or not * * @return */ public String getTimestamp() { if(modifiedInInstance == true || timestamp == null) { return OSMUtil.nowTimestamp(); } else { return timestamp; } } /** * This method returns the date object representing the timestamp * * @return The {@link Date} object representing the timestamp or null if an error occurred */ public Date getTimestampDate() { String timestamp = getTimestamp(); if(timestamp != null) { try { return OSMUtil.dateFromTimestamp(timestamp); } catch (ParseException e) { e.printStackTrace(); } } return null; } /** * Maintains state over which tag is selected in a tag editor UI * * * * @param tagKey */ public void selectTag(String tagKey) { selectedTag = tagKey; } /** * If a tag is edited or added, this should be called by the application.* * @param k * @param v */ public void addOrEditTag(String k, String v) { // OSM requires tag keys and values to not have trailing whitespaces. String trimKey = k.trim(); String trimVal = v.trim(); String origVal = tags.get(trimKey); // if the original tag is the same as this, we're not really editing anything. if (trimVal.equals(origVal)) { return; } setAsModifiedInInstance(); tags.put(trimKey, trimVal); } /** * If the user removes a tag, call this method with the key of the tag.* * @param k */ public void deleteTag(String k) { String origVal = tags.get(k); // Don't do anything if we are not deleting anything. if (origVal == null) { return; } setAsModifiedInInstance(); tags.remove(k); } public boolean isModified() { return modified; } /** * Any element that has been modified, either in the current instance or in previous * survey instances. * * * * */ protected void setAsModified() { modified = true; modifiedElements.add(this); } /** * This is when an element is modified in this survey instance rather than a previous survey. * We need to know this so that the edits can be written to OSM XML in ODK Collect. * * * */ private void setAsModifiedInInstance() { setAsModified(); modifiedInInstance = true; modifiedElementsInInstance.add(this); } /** * This should only be used by the parser. * @param k * @param v */ public void addParsedTag(String k, String v) { originalTags.put(k, v); tags.put(k, v); } public long getId() { return id; } public Map<String, String> getTags() { return tags; } public int getTagCount() { return tags.size(); } public void setJTSGeom(Geometry geom) { jtsGeom = geom; } public Geometry getJTSGeom() { return jtsGeom; } public void select() { selectedElementsChanged = true; selected = true; selectedElements.push(this); if (osmPath != null) { osmPath.select(); } } public void deselect() { selectedElementsChanged = true; selected = false; selectedElements.remove(this); if (osmPath != null) { osmPath.deselect(); } } public void toggle() { if (selected) { deselect(); } else { select(); } } public boolean isSelected() { return selected; } public OSMColorConfig getOsmColorConfig() { return this.osmColorConfig; } }
// Copyright 2010 Victor Iacoban // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under // the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions and // limitations under the License. package org.zmlx.hg4idea.util; import com.intellij.dvcs.DvcsUtil; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.util.BackgroundTaskUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Couple; import com.intellij.openapi.util.ShutDownTracker; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.FileStatus; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.ChangeListManager; import com.intellij.openapi.vcs.changes.ContentRevision; import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager; import com.intellij.openapi.vcs.history.FileHistoryPanelImpl; import com.intellij.openapi.vcs.history.VcsFileRevisionEx; import com.intellij.openapi.vcs.vfs.AbstractVcsVirtualFile; import com.intellij.openapi.vcs.vfs.VcsVirtualFile; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.GuiUtils; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.vcsUtil.VcsUtil; import org.jetbrains.annotations.CalledInAwt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.zmlx.hg4idea.*; import org.zmlx.hg4idea.command.HgCatCommand; import org.zmlx.hg4idea.command.HgRemoveCommand; import org.zmlx.hg4idea.command.HgStatusCommand; import org.zmlx.hg4idea.command.HgWorkingCopyRevisionsCommand; import org.zmlx.hg4idea.execution.HgCommandResult; import org.zmlx.hg4idea.execution.ShellCommand; import org.zmlx.hg4idea.execution.ShellCommandException; import org.zmlx.hg4idea.log.HgHistoryUtil; import org.zmlx.hg4idea.provider.HgChangeProvider; import org.zmlx.hg4idea.repo.HgRepository; import org.zmlx.hg4idea.repo.HgRepositoryManager; import java.io.*; import java.lang.reflect.InvocationTargetException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * HgUtil is a collection of static utility methods for Mercurial. */ public abstract class HgUtil { public static final Pattern URL_WITH_PASSWORD = Pattern.compile("(?:.+)://(?:.+)(:.+)@(?:.+)"); //http(s)://username:password@url public static final int MANY_FILES = 100; private static final Logger LOG = Logger.getInstance(HgUtil.class); public static final String DOT_HG = ".hg"; public static final String TIP_REFERENCE = "tip"; public static final String HEAD_REFERENCE = "HEAD"; public static File copyResourceToTempFile(String basename, String extension) throws IOException { final InputStream in = HgUtil.class.getClassLoader().getResourceAsStream("python/" + basename + extension); final File tempFile = FileUtil.createTempFile(basename, extension); final byte[] buffer = new byte[4096]; OutputStream out = null; try { out = new FileOutputStream(tempFile, false); int bytesRead; while ((bytesRead = in.read(buffer)) != -1) out.write(buffer, 0, bytesRead); } finally { try { out.close(); } catch (IOException e) { // ignore } } try { in.close(); } catch (IOException e) { // ignore } tempFile.deleteOnExit(); return tempFile; } public static void markDirectoryDirty(final Project project, final VirtualFile file) throws InvocationTargetException, InterruptedException { VfsUtil.markDirtyAndRefresh(true, true, false, file); VcsDirtyScopeManager.getInstance(project).dirDirtyRecursively(file); } public static void markFileDirty(final Project project, final VirtualFile file) throws InvocationTargetException, InterruptedException { ApplicationManager.getApplication().runReadAction(() -> VcsDirtyScopeManager.getInstance(project).fileDirty(file)); runWriteActionAndWait(() -> file.refresh(true, false)); } /** * Runs the given task as a write action in the event dispatching thread and waits for its completion. */ public static void runWriteActionAndWait(@NotNull final Runnable runnable) throws InvocationTargetException, InterruptedException { GuiUtils.runOrInvokeAndWait(() -> ApplicationManager.getApplication().runWriteAction(runnable)); } /** * Schedules the given task to be run as a write action in the event dispatching thread. */ public static void runWriteActionLater(@NotNull final Runnable runnable) { ApplicationManager.getApplication().invokeLater(() -> ApplicationManager.getApplication().runWriteAction(runnable)); } /** * Returns a temporary python file that will be deleted on exit. * * Also all compiled version of the python file will be deleted. * * @param base The basename of the file to copy * @return The temporary copy the specified python file, with all the necessary hooks installed * to make sure it is completely removed at shutdown */ @Nullable public static File getTemporaryPythonFile(String base) { try { final File file = copyResourceToTempFile(base, ".py"); final String fileName = file.getName(); ShutDownTracker.getInstance().registerShutdownTask(() -> { File[] files = file.getParentFile().listFiles((dir, name) -> name.startsWith(fileName)); if (files != null) { for (File file1 : files) { file1.delete(); } } }); return file; } catch (IOException e) { return null; } } /** * Calls 'hg remove' to remove given files from the VCS. * @param project * @param files files to be removed from the VCS. */ public static void removeFilesFromVcs(Project project, List<FilePath> files) { final HgRemoveCommand command = new HgRemoveCommand(project); for (FilePath filePath : files) { final VirtualFile vcsRoot = VcsUtil.getVcsRootFor(project, filePath); if (vcsRoot == null) { continue; } command.executeInCurrentThread(new HgFile(vcsRoot, filePath)); } } /** * Finds the nearest parent directory which is an hg root. * @param dir Directory which parent will be checked. * @return Directory which is the nearest hg root being a parent of this directory, * or <code>null</code> if this directory is not under hg. * @see com.intellij.openapi.vcs.AbstractVcs#isVersionedDirectory(VirtualFile) */ @Nullable public static VirtualFile getNearestHgRoot(VirtualFile dir) { VirtualFile currentDir = dir; while (currentDir != null) { if (isHgRoot(currentDir)) { return currentDir; } currentDir = currentDir.getParent(); } return null; } /** * Checks if the given directory is an hg root. */ public static boolean isHgRoot(@Nullable VirtualFile dir) { return dir != null && dir.findChild(DOT_HG) != null; } /** * Gets the Mercurial root for the given file path or null if non exists: * the root should not only be in directory mappings, but also the .hg repository folder should exist. * * @see #getHgRootOrThrow(Project, FilePath) */ @Nullable public static VirtualFile getHgRootOrNull(Project project, FilePath filePath) { if (project == null) { return getNearestHgRoot(VcsUtil.getVirtualFile(filePath.getPath())); } return getNearestHgRoot(VcsUtil.getVcsRootFor(project, filePath)); } /** * Get hg roots for paths * * @param filePaths the context paths * @return a set of hg roots */ @NotNull public static Set<VirtualFile> hgRoots(@NotNull Project project, @NotNull Collection<FilePath> filePaths) { HashSet<VirtualFile> roots = new HashSet<>(); for (FilePath path : filePaths) { ContainerUtil.addIfNotNull(roots, getHgRootOrNull(project, path)); } return roots; } /** * Gets the Mercurial root for the given file path or null if non exists: * the root should not only be in directory mappings, but also the .hg repository folder should exist. * @see #getHgRootOrThrow(Project, FilePath) * @see #getHgRootOrNull(Project, FilePath) */ @Nullable public static VirtualFile getHgRootOrNull(Project project, @NotNull VirtualFile file) { return getHgRootOrNull(project, VcsUtil.getFilePath(file.getPath())); } /** * Gets the Mercurial root for the given file path or throws a VcsException if non exists: * the root should not only be in directory mappings, but also the .hg repository folder should exist. * @see #getHgRootOrNull(Project, FilePath) */ @NotNull public static VirtualFile getHgRootOrThrow(Project project, FilePath filePath) throws VcsException { final VirtualFile vf = getHgRootOrNull(project, filePath); if (vf == null) { throw new VcsException(HgVcsMessages.message("hg4idea.exception.file.not.under.hg", filePath.getPresentableUrl())); } return vf; } @NotNull public static VirtualFile getHgRootOrThrow(Project project, VirtualFile file) throws VcsException { return getHgRootOrThrow(project, VcsUtil.getFilePath(file.getPath())); } /** * Shows a message dialog to enter the name of new branch. * * @return name of new branch or {@code null} if user has cancelled the dialog. */ @Nullable public static String getNewBranchNameFromUser(@NotNull HgRepository repository, @NotNull String dialogTitle) { return Messages.showInputDialog(repository.getProject(), "Enter the name of new branch:", dialogTitle, Messages.getQuestionIcon(), "", new HgBranchReferenceValidator(repository)); } /** * Checks is a merge operation is in progress on the given repository. * Actually gets the number of parents of the current revision. If there are 2 parents, then a merge is going on. Otherwise there is * only one parent. * @param project project to work on. * @param repository repository which is checked on merge. * @return True if merge operation is in progress, false if there is no merge operation. */ public static boolean isMergeInProgress(@NotNull Project project, VirtualFile repository) { return new HgWorkingCopyRevisionsCommand(project).parents(repository).size() > 1; } /** * Groups the given files by their Mercurial repositories and returns the map of relative paths to files for each repository. * @param hgFiles files to be grouped. * @return key is repository, values is the non-empty list of relative paths to files, which belong to this repository. */ @NotNull public static Map<VirtualFile, List<String>> getRelativePathsByRepository(Collection<HgFile> hgFiles) { final Map<VirtualFile, List<String>> map = new HashMap<>(); if (hgFiles == null) { return map; } for(HgFile file : hgFiles) { final VirtualFile repo = file.getRepo(); List<String> files = map.get(repo); if (files == null) { files = new ArrayList<>(); map.put(repo, files); } files.add(file.getRelativePath()); } return map; } @NotNull public static HgFile getFileNameInTargetRevision(Project project, HgRevisionNumber vcsRevisionNumber, HgFile localHgFile) { //get file name in target revision if it was moved/renamed // if file was moved but not committed then hg status would return nothing, so it's better to point working dir as '.' revision HgStatusCommand statCommand = new HgStatusCommand.Builder(false).copySource(true).baseRevision(vcsRevisionNumber). targetRevision(HgRevisionNumber.getInstance("", ".")).build(project); Set<HgChange> changes = statCommand.executeInCurrentThread(localHgFile.getRepo(), Collections.singletonList(localHgFile.toFilePath())); for (HgChange change : changes) { if (change.afterFile().equals(localHgFile)) { return change.beforeFile(); } } return localHgFile; } @NotNull public static FilePath getOriginalFileName(@NotNull FilePath filePath, ChangeListManager changeListManager) { Change change = changeListManager.getChange(filePath); if (change == null) { return filePath; } FileStatus status = change.getFileStatus(); if (status == HgChangeProvider.COPIED || status == HgChangeProvider.RENAMED) { ContentRevision beforeRevision = change.getBeforeRevision(); assert beforeRevision != null : "If a file's status is copied or renamed, there must be an previous version"; return beforeRevision.getFile(); } else { return filePath; } } @NotNull public static Map<VirtualFile, Collection<VirtualFile>> sortByHgRoots(@NotNull Project project, @NotNull Collection<VirtualFile> files) { Map<VirtualFile, Collection<VirtualFile>> sorted = new HashMap<>(); HgRepositoryManager repositoryManager = getRepositoryManager(project); for (VirtualFile file : files) { HgRepository repo = repositoryManager.getRepositoryForFile(file); if (repo == null) { continue; } Collection<VirtualFile> filesForRoot = sorted.get(repo.getRoot()); if (filesForRoot == null) { filesForRoot = new HashSet<>(); sorted.put(repo.getRoot(), filesForRoot); } filesForRoot.add(file); } return sorted; } @NotNull public static Map<VirtualFile, Collection<FilePath>> groupFilePathsByHgRoots(@NotNull Project project, @NotNull Collection<FilePath> files) { Map<VirtualFile, Collection<FilePath>> sorted = new HashMap<>(); if (project.isDisposed()) return sorted; HgRepositoryManager repositoryManager = getRepositoryManager(project); for (FilePath file : files) { HgRepository repo = repositoryManager.getRepositoryForFile(file); if (repo == null) { continue; } Collection<FilePath> filesForRoot = sorted.get(repo.getRoot()); if (filesForRoot == null) { filesForRoot = new HashSet<>(); sorted.put(repo.getRoot(), filesForRoot); } filesForRoot.add(file); } return sorted; } @NotNull public static ProgressIndicator executeOnPooledThread(@NotNull Runnable runnable, @NotNull Disposable parentDisposable) { return BackgroundTaskUtil.executeOnPooledThread(runnable, parentDisposable); } /** * Convert {@link VcsVirtualFile} to the {@link LocalFileSystem local} Virtual File. * * TODO * It is a workaround for the following problem: VcsVirtualFiles returned from the {@link FileHistoryPanelImpl} contain the current path * of the file, not the path that was in certain revision. This has to be fixed by making {@link HgFileRevision} implement * {@link VcsFileRevisionEx}. */ @Nullable public static VirtualFile convertToLocalVirtualFile(@Nullable VirtualFile file) { if (!(file instanceof AbstractVcsVirtualFile)) { return file; } LocalFileSystem lfs = LocalFileSystem.getInstance(); VirtualFile resultFile = lfs.findFileByPath(file.getPath()); if (resultFile == null) { resultFile = lfs.refreshAndFindFileByPath(file.getPath()); } return resultFile; } @NotNull public static List<Change> getDiff(@NotNull final Project project, @NotNull final VirtualFile root, @NotNull final FilePath path, @Nullable final HgRevisionNumber revNum1, @Nullable final HgRevisionNumber revNum2) { HgStatusCommand statusCommand; if (revNum1 != null) { //rev2==null means "compare with local version" statusCommand = new HgStatusCommand.Builder(true).ignored(false).unknown(false).copySource(!path.isDirectory()).baseRevision(revNum1) .targetRevision(revNum2).build(project); } else { LOG.assertTrue(revNum2 != null, "revision1 and revision2 can't both be null. Path: " + path); //rev1 and rev2 can't be null both// //get initial changes// statusCommand = new HgStatusCommand.Builder(true).ignored(false).unknown(false).copySource(false).baseRevision(revNum2) .build(project); } Collection<HgChange> hgChanges = statusCommand.executeInCurrentThread(root, Collections.singleton(path)); List<Change> changes = new ArrayList<>(); //convert output changes to standard Change class for (HgChange hgChange : hgChanges) { FileStatus status = convertHgDiffStatus(hgChange.getStatus()); if (status != FileStatus.UNKNOWN) { changes.add(HgHistoryUtil.createChange(project, root, hgChange.beforeFile().getRelativePath(), revNum1, hgChange.afterFile().getRelativePath(), revNum2, status)); } } return changes; } @NotNull public static FileStatus convertHgDiffStatus(@NotNull HgFileStatusEnum hgstatus) { if (hgstatus.equals(HgFileStatusEnum.ADDED)) { return FileStatus.ADDED; } else if (hgstatus.equals(HgFileStatusEnum.DELETED)) { return FileStatus.DELETED; } else if (hgstatus.equals(HgFileStatusEnum.MODIFIED)) { return FileStatus.MODIFIED; } else if (hgstatus.equals(HgFileStatusEnum.COPY)) { return HgChangeProvider.COPIED; } else if (hgstatus.equals(HgFileStatusEnum.UNVERSIONED)) { return FileStatus.UNKNOWN; } else if (hgstatus.equals(HgFileStatusEnum.IGNORED)) { return FileStatus.IGNORED; } else { return FileStatus.UNKNOWN; } } @NotNull public static byte[] loadContent(@NotNull Project project, @Nullable HgRevisionNumber revisionNumber, @NotNull HgFile fileToCat) { HgCommandResult result = new HgCatCommand(project).execute(fileToCat, revisionNumber, fileToCat.toFilePath().getCharset()); return result != null && result.getExitValue() == 0 ? result.getBytesOutput() : ArrayUtil.EMPTY_BYTE_ARRAY; } public static String removePasswordIfNeeded(@NotNull String path) { Matcher matcher = URL_WITH_PASSWORD.matcher(path); if (matcher.matches()) { return path.substring(0, matcher.start(1)) + path.substring(matcher.end(1), path.length()); } return path; } @NotNull public static String getDisplayableBranchOrBookmarkText(@NotNull HgRepository repository) { HgRepository.State state = repository.getState(); String branchText = ""; if (state != HgRepository.State.NORMAL) { branchText += state.toString() + " "; } return branchText + repository.getCurrentBranchName(); } @NotNull public static HgRepositoryManager getRepositoryManager(@NotNull Project project) { return ServiceManager.getService(project, HgRepositoryManager.class); } @Nullable @CalledInAwt public static HgRepository getCurrentRepository(@NotNull Project project) { if (project.isDisposed()) return null; return DvcsUtil.guessRepositoryForFile(project, getRepositoryManager(project), DvcsUtil.getSelectedFile(project), HgProjectSettings.getInstance(project).getRecentRootPath()); } @Nullable public static HgRepository getRepositoryForFile(@NotNull Project project, @Nullable VirtualFile file) { if (file == null || project.isDisposed()) return null; HgRepositoryManager repositoryManager = getRepositoryManager(project); VirtualFile root = getHgRootOrNull(project, file); return repositoryManager.getRepositoryForRoot(root); } @Nullable public static String getRepositoryDefaultPath(@NotNull Project project, @NotNull VirtualFile root) { HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root); assert hgRepository != null : "Repository can't be null for root " + root.getName(); return hgRepository.getRepositoryConfig().getDefaultPath(); } @Nullable public static String getRepositoryDefaultPushPath(@NotNull Project project, @NotNull VirtualFile root) { HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root); assert hgRepository != null : "Repository can't be null for root " + root.getName(); return hgRepository.getRepositoryConfig().getDefaultPushPath(); } @Nullable public static String getRepositoryDefaultPushPath(@NotNull HgRepository repository) { return repository.getRepositoryConfig().getDefaultPushPath(); } @Nullable public static String getConfig(@NotNull Project project, @NotNull VirtualFile root, @NotNull String section, @Nullable String configName) { HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root); assert hgRepository != null : "Repository can't be null for root " + root.getName(); return hgRepository.getRepositoryConfig().getNamedConfig(section, configName); } @NotNull public static Collection<String> getRepositoryPaths(@NotNull Project project, @NotNull VirtualFile root) { HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root); assert hgRepository != null : "Repository can't be null for root " + root.getName(); return hgRepository.getRepositoryConfig().getPaths(); } public static boolean isExecutableValid(@Nullable String executable) { try { if (StringUtil.isEmptyOrSpaces(executable)) { return false; } HgCommandResult result = getVersionOutput(executable); return result.getExitValue() == 0 && !result.getRawOutput().isEmpty(); } catch (Throwable e) { LOG.info("Error during hg executable validation: ", e); return false; } } @NotNull public static HgCommandResult getVersionOutput(@NotNull String executable) throws ShellCommandException, InterruptedException { String hgExecutable = executable.trim(); List<String> cmdArgs = new ArrayList<>(); cmdArgs.add(hgExecutable); cmdArgs.add("version"); cmdArgs.add("-q"); ShellCommand shellCommand = new ShellCommand(cmdArgs, null, CharsetToolkit.getDefaultSystemCharset()); return shellCommand.execute(false, false); } public static List<String> getNamesWithoutHashes(Collection<HgNameWithHashInfo> namesWithHashes) { //return names without duplication (actually for several heads in one branch) List<String> names = new ArrayList<>(); for (HgNameWithHashInfo hash : namesWithHashes) { if (!names.contains(hash.getName())) { names.add(hash.getName()); } } return names; } public static List<String> getSortedNamesWithoutHashes(Collection<HgNameWithHashInfo> namesWithHashes) { List<String> names = getNamesWithoutHashes(namesWithHashes); Collections.sort(names); return names; } @NotNull public static Couple<String> parseUserNameAndEmail(@NotNull String authorString) { //special characters should be retained for properly filtering by username. For Mercurial "a.b" username is not equal to "a b" // Vasya Pupkin <vasya.pupkin@jetbrains.com> -> Vasya Pupkin , vasya.pupkin@jetbrains.com int startEmailIndex = authorString.indexOf('<'); int startDomainIndex = authorString.indexOf('@'); int endEmailIndex = authorString.indexOf('>'); String userName; String email; if (0 < startEmailIndex && startEmailIndex < startDomainIndex && startDomainIndex < endEmailIndex) { email = authorString.substring(startEmailIndex + 1, endEmailIndex); userName = authorString.substring(0, startEmailIndex).trim(); } // vasya.pupkin@email.com || <vasya.pupkin@email.com> else if (!authorString.contains(" ") && startDomainIndex > 0) { //simple e-mail check. john@localhost userName = ""; if (startEmailIndex >= 0 && startDomainIndex > startEmailIndex && startDomainIndex < endEmailIndex) { email = authorString.substring(startEmailIndex + 1, endEmailIndex).trim(); } else { email = authorString; } } else { userName = authorString.trim(); email = ""; } return Couple.of(userName, email); } @NotNull public static List<String> getTargetNames(@NotNull HgRepository repository) { return ContainerUtil.<String>sorted(ContainerUtil.map(repository.getRepositoryConfig().getPaths(), s -> removePasswordIfNeeded(s))); } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2022 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.registry.driver; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBPImage; import org.jkiss.dbeaver.model.connection.DBPDriverLibrary; import org.jkiss.dbeaver.model.connection.DBPNativeClientLocation; import org.jkiss.dbeaver.model.connection.LocalNativeClientLocation; import org.jkiss.dbeaver.registry.DataSourceProviderDescriptor; import org.jkiss.dbeaver.registry.DataSourceProviderRegistry; import org.jkiss.dbeaver.registry.RegistryConstants; import org.jkiss.dbeaver.registry.maven.MavenArtifactReference; import org.jkiss.utils.CommonUtils; import org.jkiss.utils.xml.SAXListener; import org.jkiss.utils.xml.SAXReader; import org.jkiss.utils.xml.XMLBuilder; import org.xml.sax.Attributes; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; /** * DriverDescriptorSerializerLegacy */ @Deprecated public class DriverDescriptorSerializerLegacy extends DriverDescriptorSerializer { private static final Log log = Log.getLog(DriverDescriptorSerializerLegacy.class); private DriverDescriptor driver; DriverDescriptorSerializerLegacy(DriverDescriptor driver) { this.driver = driver; } public void serialize(XMLBuilder xml, boolean export) throws IOException { Map<String, String> pathSubstitutions = getPathSubstitutions(); try (XMLBuilder.Element e0 = xml.startElement(RegistryConstants.TAG_DRIVER)) { if (export) { xml.addAttribute(RegistryConstants.ATTR_PROVIDER, driver.getProviderDescriptor().getId()); } xml.addAttribute(RegistryConstants.ATTR_ID, driver.getId()); if (driver.isDisabled()) { xml.addAttribute(RegistryConstants.ATTR_DISABLED, true); } if (!CommonUtils.isEmpty(driver.getCategory())) { xml.addAttribute(RegistryConstants.ATTR_CATEGORY, driver.getCategory()); } xml.addAttribute(RegistryConstants.ATTR_CATEGORIES, String.join(",", driver.getCategories())); xml.addAttribute(RegistryConstants.ATTR_NAME, driver.getName()); xml.addAttribute(RegistryConstants.ATTR_CLASS, driver.getDriverClassName()); if (!CommonUtils.isEmpty(driver.getSampleURL())) { xml.addAttribute(RegistryConstants.ATTR_URL, driver.getSampleURL()); } if (!CommonUtils.isEmpty(driver.getDefaultPort())) { xml.addAttribute(RegistryConstants.ATTR_PORT, driver.getDefaultPort()); } if (!CommonUtils.isEmpty(driver.getDefaultDatabase())) { xml.addAttribute(RegistryConstants.ATTR_DEFAULT_DATABASE, driver.getDefaultDatabase()); } if (!CommonUtils.isEmpty(driver.getDefaultServer())) { xml.addAttribute(RegistryConstants.ATTR_DEFAULT_SERVER, driver.getDefaultServer()); } if (!CommonUtils.isEmpty(driver.getDefaultUser())) { xml.addAttribute(RegistryConstants.ATTR_DEFAULT_USER, driver.getDefaultUser()); } xml.addAttribute(RegistryConstants.ATTR_DESCRIPTION, CommonUtils.notEmpty(driver.getDescription())); if (driver.isCustomDriverLoader()) { xml.addAttribute(RegistryConstants.ATTR_CUSTOM_DRIVER_LOADER, driver.isCustomDriverLoader()); } xml.addAttribute(RegistryConstants.ATTR_CUSTOM, driver.isCustom()); if (driver.isEmbedded()) { xml.addAttribute(RegistryConstants.ATTR_EMBEDDED, driver.isEmbedded()); } if (driver.isAnonymousAccess()) { xml.addAttribute(RegistryConstants.ATTR_ANONYMOUS, driver.isAnonymousAccess()); } if (driver.isAllowsEmptyPassword()) { xml.addAttribute("allowsEmptyPassword", driver.isAllowsEmptyPassword()); } if (!driver.isInstantiable()) { xml.addAttribute(RegistryConstants.ATTR_INSTANTIABLE, driver.isInstantiable()); } // Libraries for (DBPDriverLibrary lib : driver.getDriverLibraries()) { if (export && !lib.isDisabled()) { continue; } try (XMLBuilder.Element e1 = xml.startElement(RegistryConstants.TAG_LIBRARY)) { xml.addAttribute(RegistryConstants.ATTR_TYPE, lib.getType().name()); xml.addAttribute(RegistryConstants.ATTR_PATH, substitutePathVariables(pathSubstitutions, lib.getPath())); xml.addAttribute(RegistryConstants.ATTR_CUSTOM, lib.isCustom()); if (lib.isDisabled()) { xml.addAttribute(RegistryConstants.ATTR_DISABLED, true); } if (!CommonUtils.isEmpty(lib.getPreferredVersion())) { xml.addAttribute(RegistryConstants.ATTR_VERSION, lib.getPreferredVersion()); } if (lib instanceof DriverLibraryMavenArtifact) { if (((DriverLibraryMavenArtifact) lib).isIgnoreDependencies()) { xml.addAttribute("ignore-dependencies", true); } if (((DriverLibraryMavenArtifact) lib).isLoadOptionalDependencies()) { xml.addAttribute("load-optional-dependencies", true); } } //xml.addAttribute(RegistryConstants.ATTR_CUSTOM, lib.isCustom()); List<DriverDescriptor.DriverFileInfo> files = driver.getResolvedFiles().get(lib); if (files != null) { for (DriverDescriptor.DriverFileInfo file : files) { try (XMLBuilder.Element e2 = xml.startElement(RegistryConstants.TAG_FILE)) { if (file.getFile() == null) { log.warn("File missing in " + file.getId()); continue; } xml.addAttribute(RegistryConstants.ATTR_ID, file.getId()); if (!CommonUtils.isEmpty(file.getVersion())) { xml.addAttribute(RegistryConstants.ATTR_VERSION, file.getVersion()); } xml.addAttribute(RegistryConstants.ATTR_PATH, substitutePathVariables(pathSubstitutions, file.getFile().getAbsolutePath())); } } } } } // Client homes for (DBPNativeClientLocation location : driver.getNativeClientHomes()) { try (XMLBuilder.Element e1 = xml.startElement(RegistryConstants.TAG_CLIENT_HOME)) { xml.addAttribute(RegistryConstants.ATTR_ID, location.getName()); if (location.getPath() != null) { xml.addAttribute(RegistryConstants.ATTR_PATH, location.getPath().getAbsolutePath()); } } } // Parameters for (Map.Entry<String, Object> paramEntry : driver.getCustomParameters().entrySet()) { if (driver.isCustom() || !CommonUtils.equalObjects(paramEntry.getValue(), driver.getDefaultParameters().get(paramEntry.getKey()))) { // Save custom parameters for custom drivers. It can help with PG drivers, as example (we must store serverType for PG-clones). try (XMLBuilder.Element e1 = xml.startElement(RegistryConstants.TAG_PARAMETER)) { xml.addAttribute(RegistryConstants.ATTR_NAME, paramEntry.getKey()); xml.addAttribute(RegistryConstants.ATTR_VALUE, CommonUtils.toString(paramEntry.getValue())); } } } // Extra icon parameter for the custom driver if (driver.isCustom()) { try (XMLBuilder.Element e1 = xml.startElement(RegistryConstants.TAG_PARAMETER)) { xml.addAttribute(RegistryConstants.ATTR_ICON, driver.getIcon().getLocation()); } } // Properties for (Map.Entry<String, Object> propEntry : driver.getCustomConnectionProperties().entrySet()) { if (!CommonUtils.equalObjects(propEntry.getValue(), driver.getDefaultConnectionProperties().get(propEntry.getKey()))) { try (XMLBuilder.Element e1 = xml.startElement(RegistryConstants.TAG_PROPERTY)) { xml.addAttribute(RegistryConstants.ATTR_NAME, propEntry.getKey()); xml.addAttribute(RegistryConstants.ATTR_VALUE, CommonUtils.toString(propEntry.getValue())); } } } } } public static class DriversParser implements SAXListener { private final boolean providedDrivers; DataSourceProviderDescriptor curProvider; DriverDescriptor curDriver; DBPDriverLibrary curLibrary; public DriversParser(boolean provided) { this.providedDrivers = provided; } @Override public void saxStartElement(SAXReader reader, String namespaceURI, String localName, Attributes atts) { switch (localName) { case RegistryConstants.TAG_PROVIDER: { curProvider = null; curDriver = null; String idAttr = atts.getValue(RegistryConstants.ATTR_ID); if (CommonUtils.isEmpty(idAttr)) { log.warn("No id for driver provider"); return; } curProvider = DataSourceProviderRegistry.getInstance().getDataSourceProvider(idAttr); if (curProvider == null) { log.warn("Datasource provider '" + idAttr + "' not found. Bad provider description."); } break; } case RegistryConstants.TAG_DRIVER: { curDriver = null; if (curProvider == null) { String providerId = atts.getValue(RegistryConstants.ATTR_PROVIDER); if (!CommonUtils.isEmpty(providerId)) { curProvider = DataSourceProviderRegistry.getInstance().getDataSourceProvider(providerId); if (curProvider == null) { log.warn("Datasource provider '" + providerId + "' not found. Bad driver description."); } } if (curProvider == null) { log.warn("Driver outside of datasource provider"); return; } } String idAttr = atts.getValue(RegistryConstants.ATTR_ID); curDriver = curProvider.getDriver(idAttr); if (curDriver == null) { curDriver = new DriverDescriptor(curProvider, idAttr); curProvider.addDriver(curDriver); } if (providedDrivers || curProvider.isDriversManagable()) { String category = atts.getValue(RegistryConstants.ATTR_CATEGORY); if (!CommonUtils.isEmpty(category)) { curDriver.setCategory(category); } if (providedDrivers || curDriver.isCustom()) { curDriver.setName(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_NAME), curDriver.getName())); } curDriver.setDescription(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_DESCRIPTION), curDriver.getDescription())); curDriver.setDriverClassName(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_CLASS), curDriver.getDriverClassName())); curDriver.setSampleURL(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_URL), curDriver.getSampleURL())); curDriver.setDriverDefaultPort(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_PORT), curDriver.getDefaultPort())); curDriver.setDriverDefaultDatabase(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_DEFAULT_DATABASE), curDriver.getDefaultDatabase())); curDriver.setDriverDefaultServer(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_DEFAULT_SERVER), curDriver.getDefaultServer())); curDriver.setDriverDefaultUser(CommonUtils.toString(atts.getValue(RegistryConstants.ATTR_DEFAULT_USER), curDriver.getDefaultUser())); curDriver.setEmbedded(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_EMBEDDED), curDriver.isEmbedded())); curDriver.setAnonymousAccess(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_ANONYMOUS), curDriver.isAnonymousAccess())); curDriver.setAllowsEmptyPassword(CommonUtils.getBoolean(atts.getValue("allowsEmptyPassword"), curDriver.isAllowsEmptyPassword())); curDriver.setInstantiable(CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_INSTANTIABLE), curDriver.isInstantiable())); } if (atts.getValue(RegistryConstants.ATTR_CUSTOM_DRIVER_LOADER) != null) { curDriver.setCustomDriverLoader(( CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_CUSTOM_DRIVER_LOADER), false))); } if (atts.getValue(RegistryConstants.ATTR_USE_URL_TEMPLATE) != null) { curDriver.setUseURL(( CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_USE_URL_TEMPLATE), true))); } curDriver.setModified(true); String disabledAttr = atts.getValue(RegistryConstants.ATTR_DISABLED); if (CommonUtils.getBoolean(disabledAttr)) { curDriver.setDisabled(true); } break; } case RegistryConstants.TAG_LIBRARY: { if (curDriver == null) { log.warn("Library outside of driver"); return; } DBPDriverLibrary.FileType type; String typeStr = atts.getValue(RegistryConstants.ATTR_TYPE); if (CommonUtils.isEmpty(typeStr)) { type = DBPDriverLibrary.FileType.jar; } else { type = CommonUtils.valueOf(DBPDriverLibrary.FileType.class, typeStr, DBPDriverLibrary.FileType.jar); } String path = normalizeLibraryPath(atts.getValue(RegistryConstants.ATTR_PATH)); if (!CommonUtils.isEmpty(path)) { path = replacePathVariables(path); } boolean custom = CommonUtils.getBoolean(atts.getValue(RegistryConstants.ATTR_CUSTOM), true); String version = atts.getValue(RegistryConstants.ATTR_VERSION); DBPDriverLibrary lib = curDriver.getDriverLibrary(path); if (!providedDrivers && !custom && lib == null) { // Perhaps this library isn't included in driver bundle // Or this is predefined library from some previous version - as it wasn't defined in plugin.xml // so let's just skip it //log.debug("Skip obsolete custom library '" + path + "'"); return; } if(providedDrivers && lib == null && !(curDriver.getDriverLibraries().isEmpty())){ curDriver.disabledAllDefaultLibraries(); } String disabledAttr = atts.getValue(RegistryConstants.ATTR_DISABLED); if (lib != null && CommonUtils.getBoolean(disabledAttr)) { lib.setDisabled(true); } else if (lib == null) { lib = DriverLibraryAbstract.createFromPath(curDriver, type, path, version); curDriver.addDriverLibrary(lib, false); } else if (!CommonUtils.isEmpty(version)) { lib.setPreferredVersion(version); } if (lib instanceof DriverLibraryMavenArtifact) { ((DriverLibraryMavenArtifact) lib).setIgnoreDependencies(CommonUtils.toBoolean(atts.getValue("ignore-dependencies"))); ((DriverLibraryMavenArtifact) lib).setLoadOptionalDependencies(CommonUtils.toBoolean(atts.getValue("load-optional-dependencies"))); } curLibrary = lib; break; } case RegistryConstants.TAG_FILE: { if (curDriver != null && curLibrary != null) { String path = atts.getValue(RegistryConstants.ATTR_PATH); if (path != null) { path = replacePathVariables(path); if (CommonUtils.isEmpty(path)) { log.warn("Empty path for library file"); } else { DriverDescriptor.DriverFileInfo info = new DriverDescriptor.DriverFileInfo( atts.getValue(CommonUtils.notEmpty(RegistryConstants.ATTR_ID)), atts.getValue(CommonUtils.notEmpty(RegistryConstants.ATTR_VERSION)), curLibrary.getType(), new File(path)); curDriver.addLibraryFile(curLibrary, info); } } } break; } case RegistryConstants.TAG_CLIENT_HOME: if (curDriver != null) { curDriver.addNativeClientLocation( new LocalNativeClientLocation( atts.getValue(RegistryConstants.ATTR_ID), atts.getValue(RegistryConstants.ATTR_PATH))); } break; case RegistryConstants.TAG_PARAMETER: { if (curDriver != null) { final String paramName = atts.getValue(RegistryConstants.ATTR_NAME); final String paramValue = atts.getValue(RegistryConstants.ATTR_VALUE); if (!CommonUtils.isEmpty(paramName) && !CommonUtils.isEmpty(paramValue)) { curDriver.setDriverParameter(paramName, paramValue, false); } // Read extra icon parameter for custom drivers if (curDriver.isCustom()) { final String iconParam = atts.getValue(RegistryConstants.ATTR_ICON); if (!CommonUtils.isEmpty(iconParam)) { DBPImage icon = curDriver.iconToImage(iconParam); curDriver.setIconPlain(icon); curDriver.makeIconExtensions(); } } } break; } case RegistryConstants.TAG_PROPERTY: { if (curDriver != null) { final String paramName = atts.getValue(RegistryConstants.ATTR_NAME); final String paramValue = atts.getValue(RegistryConstants.ATTR_VALUE); if (!CommonUtils.isEmpty(paramName)) { curDriver.setConnectionProperty(paramName, paramValue); } } break; } } } // TODO: support of 3.5.1 -> 3.5.2 maven dependencies migration private static final String PATH_VERSION_OBSOLETE_RELEASE = ":release"; private static String normalizeLibraryPath(String value) { if (value.startsWith(DriverLibraryMavenArtifact.PATH_PREFIX)) { if (value.endsWith(PATH_VERSION_OBSOLETE_RELEASE)) { value = value.substring(0, value.length() - PATH_VERSION_OBSOLETE_RELEASE.length()) + ":" + MavenArtifactReference.VERSION_PATTERN_RELEASE; } } return value; } @Override public void saxText(SAXReader reader, String data) { } @Override public void saxEndElement(SAXReader reader, String namespaceURI, String localName) { switch (localName) { case RegistryConstants.TAG_LIBRARY: curLibrary = null; break; } } } }
package de.lessvoid.nifty.controls; import de.lessvoid.nifty.Nifty; import de.lessvoid.nifty.controls.button.builder.ButtonBuilder; import de.lessvoid.nifty.elements.Element; import de.lessvoid.nifty.elements.render.ImageRenderer; import de.lessvoid.nifty.elements.render.TextRenderer; import de.lessvoid.nifty.input.NiftyInputEvent; import de.lessvoid.nifty.render.NiftyImage; import de.lessvoid.nifty.screen.Screen; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Arrays; import java.util.List; /** * TODO: This thing is bad! Hardcoded images and it breaks the general controller conventions. */ @SuppressWarnings("ConstantConditions") public class MessageBox extends AbstractController { @Nonnull private String[] buttonCaptions; @Nonnull private MessageType messageType = MessageType.INFO; @Nullable private NiftyImage icon; @Nonnull private String message; @Nonnull private final String buttonWidth = "100px"; @Nonnull private final String buttonHeight = "25px"; @Nullable private Nifty nifty; @Nullable private Element messageboxPopup; @Nullable private MessageBox msgBox; public MessageBox() { } public MessageBox( @Nonnull Nifty nifty, @Nonnull final MessageType messageType, @Nonnull final String message, final String buttonCaption, final String icon) { this.nifty = nifty; messageboxPopup = nifty.createPopup("niftyPopupMessageBox"); if (messageboxPopup == null) { return; } msgBox = messageboxPopup.findNiftyControl("#messagebox", MessageBox.class); if (msgBox != null) { msgBox.setMessageType(messageType); msgBox.setMessage(message); msgBox.setButtonCaption(buttonCaption); msgBox.setIcon(icon); msgBox.setupMessageBox(); } } public MessageBox( @Nonnull Nifty nifty, @Nonnull MessageType messageType, @Nonnull String message, String buttonCaption) { this(nifty, messageType, message, buttonCaption, null); } public MessageBox( @Nonnull Nifty nifty, @Nonnull final MessageType messageType, @Nonnull final String message, final String[] buttonCaptions, final String icon) { this.nifty = nifty; messageboxPopup = nifty.createPopup("niftyPopupMessageBox"); if (messageboxPopup == null) { return; } msgBox = messageboxPopup.findNiftyControl("#messagebox", MessageBox.class); if (msgBox != null) { msgBox.setMessageType(messageType); msgBox.setMessage(message); msgBox.setButtonCaptions(buttonCaptions); msgBox.setIcon(icon); msgBox.setupMessageBox(); } } public MessageBox( @Nonnull Nifty nifty, @Nonnull MessageType messageType, @Nonnull String message, String... buttonCaptions) { this(nifty, messageType, message, buttonCaptions, null); } @Override public void bind( @Nonnull Nifty nifty, @Nonnull Screen screen, @Nonnull Element element, @Nonnull Parameters parameter) { messageboxPopup = element; this.nifty = nifty; if (parameter.isSet("buttonCaptions")) { setButtonCaptions(parameter.getWithDefault("buttonCaptions", "").split(",")); } else if (parameter.isSet("buttonCaption")) { setButtonCaption(parameter.get("buttonCaption")); } if (messageType != MessageType.CUSTOM) { setIcon("messagebox/" + messageType.name() + ".png"); } } @Override public void onStartScreen() { // TODO Auto-generated method stub } @Override public boolean inputEvent(@Nonnull NiftyInputEvent inputEvent) { // TODO Auto-generated method stub return false; } public void show() { nifty.showPopup(nifty.getCurrentScreen(), messageboxPopup.getId(), null); } public void close(String command) { closeMessageBox(); nifty.closePopup(messageboxPopup.getParent().getId()); } public void setIcon(@Nullable String icon) { if (icon != null) { this.icon = nifty.createImage(icon, false); } } public void setMessage(@Nonnull String message) { this.message = message; } public void setButtonCaption(String buttonCaption) { this.buttonCaptions = new String[] { buttonCaption }; } public void setButtonCaptions(@Nonnull String... buttonCaptions) { this.buttonCaptions = Arrays.copyOf(buttonCaptions, buttonCaptions.length); } public void setButtonCaptions(@Nonnull String buttonCaptions) { this.buttonCaptions = buttonCaptions.split(","); } public void setMessageType(@Nonnull String messageType) { this.messageType = MessageType.valueOf(messageType); } public void setMessageType(@Nonnull MessageType messageType) { this.messageType = messageType; } private void setupMessageBox() { final Element imgIcon = messageboxPopup.findElementById("#messagebox").findElementById("#message-icon"); final ImageRenderer iconRenderer = imgIcon.getRenderer(ImageRenderer.class); iconRenderer.setImage(icon); final Element text = messageboxPopup.findElementById("#messagebox").findElementById("#message-text"); final TextRenderer textRenderer = text.getRenderer(TextRenderer.class); textRenderer.setText(message); int i = 0; for (String buttonCaption : buttonCaptions) { i++; createButton(buttonCaption, buttonCaption, "button_" + i); } messageboxPopup.findElementById("#messagebox").layoutElements(); nifty.getCurrentScreen().layoutLayers(); } private void closeMessageBox() { clearButtons(); // messageboxPopup.findElementById("#messagebox").findElementById("#buttons"); nifty.getCurrentScreen().layoutLayers(); } private void createButton(@Nonnull final String buttonCaption, final String command, final String buttonId) { Element buttonPanel = messageboxPopup.findElementById("#messagebox").findElementById("#buttons"); if (buttonPanel.findElementById("#" + buttonId) == null) { new ButtonBuilder("#" + buttonId) {{ style("nifty-button"); childLayout(ChildLayoutType.Horizontal); interactOnClick("close(" + command + ")"); if (buttonWidth != null) { width(buttonWidth); } if (buttonHeight != null) { height(buttonHeight); } else { height("25px"); } label(buttonCaption); }}.build(nifty, nifty.getCurrentScreen(), buttonPanel); } } private void clearButtons() { List<Element> buttons = messageboxPopup.findElementById("#messagebox").findElementById("#buttons").getChildren(); for (Element button : buttons) { button.markForRemoval(); } } @Nullable protected Element getMessageBoxPopup() { return messageboxPopup; } public enum MessageType { CUSTOM, INFO, WARNING, ERROR } }
// Copyright 2012 Mitchell Kember. Subject to the MIT License. package com.mitchellkember.mycraft; /** * Player represents the user in the Mycraft world. A Player is primarily a view * into the Mycraft world, and so much of the work is done by the Camera which * the Player owns. This class also manages a Player's movements and physics. * * @author Mitchell Kember * @since 09/12/2011 */ public class Player { /** * The number of units above this Player's feet that the head or Camera * is stationed. */ private static final float CAMERA_HEIGHT = 1.5f; /** * Speed in units per 60 FPS frame for this Player's movement. */ private static final float MOVE_SPEED = 0.07f; /** * The pull of gravity, in units per 60 FPS frame. */ private static final float GRAVITY = -0.005f; /** * The initial upward velocity this Player will have upon jumping. */ private static final float INITAL_JUMP_VELOCITY = 0.11f; /** * The height of what the Player is currently standing on. */ private float ground = 0; /** * The height of this Player; this Player's Y coordinate in 3D space where * positive Y is upwards. */ private float height = 5; /** * The vertical velocity of this Player, used for jumping and falling. */ private float velocity = 0; /** * The view of this Player into the Mycraft world. */ private Camera camera = new Camera(); { camera.setPositionY(height+CAMERA_HEIGHT); } /** * Used for collision detection, to determine which direction this Player is moving. */ private Vector deltaPosition; /** * Causes this Player to jump, unless this Player is already in the air * (jumping or falling) in which case nothing happens. */ void jump() { if (height == ground) { ground = 0; height += 0.0001f; velocity = INITAL_JUMP_VELOCITY; } } /** * Checks for collision with blocks and moves the Camera accordingly. * * @param chunk the Chunk this Player is in */ void collision(Chunk chunk) { // Boundaries (Y boundaries are handled by the jumping code in the move method). Vector position = camera.getPosition(); if (position.x < 0) camera.setPositionX(0); else if (position.x > 16) camera.setPositionX(16); if (position.z < 0) camera.setPositionZ(0); else if (position.z > 16) camera.setPositionZ(16); // Right and left if (deltaPosition.x > 0) { if ((int)Math.round(position.x) < 16 && (int)Math.round(position.x) > position.x && ((position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)Math.round(position.x), (int)(height), (int)(position.z-0.25f))) != 0) || (position.z+0.25f < 16 && chunk.getBlockType(new Block((int)Math.round(position.x), (int)(height), (int)(position.z+0.25f))) != 0) || (height + 1 < 16 && position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)Math.round(position.x), (int)(height+1), (int)(position.z-0.25f))) != 0) || (height + 1 < 16 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)Math.round(position.x), (int)(height+1), (int)(position.z+0.25f))) != 0))) { camera.setPositionX((int)Math.round(position.x) - 0.5f); } } else { if ((int)Math.round(position.x)-1 >= 0 && (int)Math.round(position.x) < position.x && ((position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)Math.round(position.x)-1, (int)(height), (int)(position.z-0.25f))) != 0) || (position.z+0.25f < 16 && chunk.getBlockType(new Block((int)Math.round(position.x)-1, (int)(height), (int)(position.z+0.25f))) != 0) || (height + 1 < 16 && position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)Math.round(position.x)-1, (int)(height+1), (int)(position.z-0.25f))) != 0) || (height + 1 < 16 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)Math.round(position.x)-1, (int)(height+1), (int)(position.z+0.25f))) != 0))) { camera.setPositionX((int)Math.round(position.x) + 0.5f); } } // Forward and backward if (deltaPosition.z > 0) { if ((int)Math.round(position.z) < 16 && (int)Math.round(position.z) > position.z && ((position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), (int)(height), (int)Math.round(position.z))) != 0) || (position.x+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), (int)(height), (int)Math.round(position.z))) != 0) || (height+1 < 16 && position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), (int)(height+1), (int)Math.round(position.z))) != 0) || (height+1 < 16 && position.x+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), (int)(height+1), (int)Math.round(position.z))) != 0))) { camera.setPositionZ((int)Math.ceil(position.z) - 0.5f); } } else { if ((int)Math.round(position.z)-1 >= 0 && (int)Math.round(position.z) < position.z && ((position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), (int)(height), (int)Math.round(position.z)-1)) != 0) || (position.x+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), (int)(height), (int)Math.round(position.z)-1)) != 0) || (height+1 < 16 && position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), (int)(height+1), (int)Math.round(position.z)-1)) != 0) || (height+1 < 16 && position.x+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), (int)(height+1), (int)Math.round(position.z)-1)) != 0))) { camera.setPositionZ((int)Math.round(position.z) + 0.5f); } } // Falling if (deltaPosition.y <= 0) { int drop = (int)height; // Cast down a line until it reaches a solid block, which is the ground. while (drop >= 1 && !(((int)position.x < 16 && (int)position.z < 16 && chunk.getBlockType(new Block((int)(position.x), drop-1, (int)(position.z))) != 0) || ((int)position.z < 16 && position.x+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), drop-1, (int)(position.z))) != 0) || ((int)position.x < 16 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x), drop-1, (int)(position.z+0.25f))) != 0) || (position.x+0.25f < 16 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), drop-1, (int)(position.z+0.25f))) != 0) || ((int)position.z < 16 && position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), drop-1, (int)(position.z))) != 0) || ((int)position.x < 16 && position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x), drop-1, (int)(position.z-0.25f))) != 0) || (position.z-0.25f >= 0 && position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), drop-1, (int)(position.z-0.25f))) != 0) || (position.x+0.25f < 16 && position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x+0.25f), drop-1, (int)(position.z-0.25f))) != 0) || (position.x-0.25f >= 0 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x-0.25f), drop-1, (int)(position.z+0.25f))) != 0))) { drop--; } ground = drop; } else { // Hitting your head when jumping if ((int)Math.round(position.y) < 16 && (((int)position.x < 16 && (int)position.z < 16 && chunk.getBlockType(new Block((int)(position.x), (int)Math.round(position.y), (int)(position.z))) != 0) || ((int)position.z < 16 && position.x+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), (int)Math.round(position.y), (int)(position.z))) != 0) || ((int)position.x < 16 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x), (int)Math.round(position.y), (int)(position.z+0.25f))) != 0) || (position.x+0.25f < 16 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x+0.25f), (int)Math.round(position.y), (int)(position.z+0.25f))) != 0) || ((int)position.z < 16 && position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), (int)Math.round(position.y), (int)(position.z))) != 0) || ((int)position.x < 16 && position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x), (int)Math.round(position.y), (int)(position.z-0.25f))) != 0) || (position.z-0.25f >= 0 && position.x-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x-0.25f), (int)Math.round(position.y), (int)(position.z-0.25f))) != 0) || (position.x+0.25f < 16 && position.z-0.25f >= 0 && chunk.getBlockType(new Block((int)(position.x+0.25f), (int)Math.round(position.y), (int)(position.z-0.25f))) != 0) || (position.x-0.25f >= 0 && position.z+0.25f < 16 && chunk.getBlockType(new Block((int)(position.x-0.25f), (int)Math.round(position.y), (int)(position.z+0.25f))) != 0))) { // Reposition and stop upward velocity height = (int)Math.ceil(position.y) - CAMERA_HEIGHT - 0.5f; velocity = 0; } } } /** * Moves this Player and orients this Player's view according to user input. * * @param input the user input * @param multiplier */ void move(GameStateInputData input, float multiplier) { Vector previousPosition = camera.getPosition(); // Movement if (input.forward) { camera.moveForward(MOVE_SPEED * multiplier); } if (input.backward) { camera.moveForward(-MOVE_SPEED * multiplier); } if (input.left) { camera.strafeRight(-MOVE_SPEED * multiplier); } if (input.right) { camera.strafeRight(MOVE_SPEED * multiplier); } if (height != ground) { height += velocity * multiplier; velocity += GRAVITY * multiplier; if (height < ground) { height = ground; velocity = 0; } else if (height + CAMERA_HEIGHT > 16) { height = 16 - CAMERA_HEIGHT; velocity = 0; } camera.setPositionY(height+CAMERA_HEIGHT); } // Calculate the delta position deltaPosition = camera.getPosition().minus(previousPosition); // Orient the camera camera.pitch(input.lookDeltaY); camera.yaw(input.lookDeltaX); } /** * Gets this Player's Camera object. * * @return the camera */ Camera getCamera() { return camera; } }
package com.nightonke.boommenu.BoomButtons; import android.content.Context; import android.graphics.PointF; import android.graphics.Rect; import android.graphics.Typeface; import android.text.TextUtils; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import com.nightonke.boommenu.ButtonEnum; import com.nightonke.boommenu.R; import com.nightonke.boommenu.Util; import java.util.ArrayList; /** * Created by Weiping Huang at 13:13 on 16/11/27 * For Personal Open Source * Contact me at 2584541288@qq.com or nightonke@outlook.com * For more projects: https://github.com/Nightonke */ @SuppressWarnings("unused") public class HamButton extends BoomButton { private HamButton(Builder builder, Context context) { super(context); this.context = context; this.buttonEnum = ButtonEnum.Ham; init(builder); } private void init(Builder builder) { LayoutInflater.from(context).inflate(R.layout.bmb_ham_button, this, true); initAttrs(builder); initShadow(builder.shadowCornerRadius); initHamButton(); initText(button); initSubText(button); initImage(); centerPoint = new PointF( buttonWidth / 2.0f + shadowRadius + shadowOffsetX, buttonHeight / 2.0f + shadowRadius + shadowOffsetY); } private void initAttrs(Builder builder) { super.initAttrs(builder); } @Override public ArrayList<View> goneViews() { ArrayList<View> goneViews = new ArrayList<>(); goneViews.add(image); goneViews.add(text); if (subText != null) goneViews.add(subText); return goneViews; } @Override public ArrayList<View> rotateViews() { ArrayList<View> rotateViews = new ArrayList<>(); if (rotateImage) rotateViews.add(image); return rotateViews; } @Override public int trueWidth() { return buttonWidth + shadowRadius * 2 + shadowOffsetX * 2; } @Override public int trueHeight() { return buttonHeight + shadowRadius * 2 + shadowOffsetY * 2; } @Override public int contentWidth() { return buttonWidth; } @Override public int contentHeight() { return buttonHeight; } @Override public void toHighlighted() { if (lastStateIsNormal && ableToHighlight) { toHighlightedImage(); toHighlightedText(); toHighlightedSubText(); lastStateIsNormal = false; } } @Override public void toNormal() { if (!lastStateIsNormal) { toNormalImage(); toNormalText(); toNormalSubText(); lastStateIsNormal = true; } } @Override public void setRotateAnchorPoints() { } @Override public void setSelfScaleAnchorPoints() { } public static class Builder extends BoomButtonWithTextBuilder<Builder> { public Builder() { imageRect = new Rect(0, 0, Util.dp2px(60), Util.dp2px(60)); textRect = new Rect(Util.dp2px(70), Util.dp2px(10), Util.dp2px(280), Util.dp2px(40)); textGravity = Gravity.START|Gravity.CENTER_VERTICAL; textSize = 15; } /** * Whether the ham-button contains a sub text-view. * * @param containsSubText contains a sub text-view or not * @return the builder */ public Builder containsSubText(boolean containsSubText) { this.containsSubText = containsSubText; return this; } /** * Set the sub-text when boom-button is at normal-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subNormalText sub-text * @return the builder */ public Builder subNormalText(String subNormalText) { if (this.subNormalText == null || !this.subNormalText.equals(subNormalText)) { this.subNormalText = subNormalText; BoomButton button = button(); if (button != null) { button.subNormalText = subNormalText; button.updateSubText(); } } return this; } /** * Set the sub-text resource when boom-button is at normal-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subNormalTextRes sub-text resource * @return the builder */ public Builder subNormalTextRes(int subNormalTextRes) { if (this.subNormalTextRes != subNormalTextRes) { this.subNormalTextRes = subNormalTextRes; BoomButton button = button(); if (button != null) { button.subNormalTextRes = subNormalTextRes; button.updateSubText(); } } return this; } /** * Set the sub-text when boom-button is at highlighted-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subHighlightedText sub-text * @return the builder */ public Builder subHighlightedText(String subHighlightedText) { if (this.subHighlightedText == null || !this.subHighlightedText.equals(subHighlightedText)) { this.subHighlightedText = subHighlightedText; BoomButton button = button(); if (button != null) { button.subHighlightedText = subHighlightedText; button.updateSubText(); } } return this; } /** * Set the sub-text resource when boom-button is at highlighted-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subHighlightedTextRes sub-text resource * @return the builder */ public Builder subHighlightedTextRes(int subHighlightedTextRes) { if (this.subHighlightedTextRes != subHighlightedTextRes) { this.subHighlightedTextRes = subHighlightedTextRes; BoomButton button = button(); if (button != null) { button.subHighlightedTextRes = subHighlightedTextRes; button.updateSubText(); } } return this; } /** * Set the sub-text when boom-button is at unable-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subUnableText sub-text * @return the builder */ public Builder subUnableText(String subUnableText) { if (this.subUnableText == null || !this.subUnableText.equals(subUnableText)) { this.subUnableText = subUnableText; BoomButton button = button(); if (button != null) { button.subUnableText = subUnableText; button.updateSubText(); } } return this; } /** * Set the sub-text resource when boom-button is at unable-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subUnableTextRes sub-text resource * @return the builder */ public Builder subUnableTextRes(int subUnableTextRes) { if (this.subUnableTextRes != subUnableTextRes) { this.subUnableTextRes = subUnableTextRes; BoomButton button = button(); if (button != null) { button.subUnableTextRes = subUnableTextRes; button.updateSubText(); } } return this; } /** * Set the color of sub-text when boom-button is at normal-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subNormalTextColor color of sub-text * @return the builder */ public Builder subNormalTextColor(int subNormalTextColor) { if (this.subNormalTextColor != subNormalTextColor) { this.subNormalTextColor = subNormalTextColor; BoomButton button = button(); if (button != null) { button.subNormalTextColor = subNormalTextColor; button.updateSubText(); } } return this; } /** * Set the color of sub-text when boom-button is at normal-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subNormalTextColorRes color resource of sub-text * @return the builder */ public Builder subNormalTextColorRes(int subNormalTextColorRes) { if (this.subNormalTextColorRes != subNormalTextColorRes) { this.subNormalTextColorRes = subNormalTextColorRes; BoomButton button = button(); if (button != null) { button.subNormalTextColorRes = subNormalTextColorRes; button.updateSubText(); } } return this; } /** * Set the color of sub-text when boom-button is at highlighted-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subHighlightedTextColor color of sub-text * @return the builder */ public Builder subHighlightedTextColor(int subHighlightedTextColor) { if (this.subHighlightedTextColor != subHighlightedTextColor) { this.subHighlightedTextColor = subHighlightedTextColor; BoomButton button = button(); if (button != null) { button.subHighlightedTextColor = subHighlightedTextColor; button.updateSubText(); } } return this; } /** * Set the color of sub-text when boom-button is at highlighted-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subHighlightedTextColorRes color resource of sub-text * @return the builder */ public Builder subHighlightedTextColorRes(int subHighlightedTextColorRes) { if (this.subHighlightedTextColorRes != subHighlightedTextColorRes) { this.subHighlightedTextColorRes = subHighlightedTextColorRes; BoomButton button = button(); if (button != null) { button.subHighlightedTextColorRes = subHighlightedTextColorRes; button.updateSubText(); } } return this; } /** * Set the color of sub-text when boom-button is at unable-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subUnableTextColor color of sub-text * @return the builder */ public Builder subUnableTextColor(int subUnableTextColor) { if (this.subUnableTextColor != subUnableTextColor) { this.subUnableTextColor = subUnableTextColor; BoomButton button = button(); if (button != null) { button.subUnableTextColor = subUnableTextColor; button.updateSubText(); } } return this; } /** * Set the color of sub-text when boom-button is at unable-state. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subUnableTextColorRes color resource of sub-text * @return the builder */ public Builder subUnableTextColorRes(int subUnableTextColorRes) { if (this.subUnableTextColorRes != subUnableTextColorRes) { this.subUnableTextColorRes = subUnableTextColorRes; BoomButton button = button(); if (button != null) { button.subUnableTextColorRes = subUnableTextColorRes; button.updateSubText(); } } return this; } /** * Set the rect of sub-text. * By this method, you can set the position and size of the sub-text-view in boom-button. * For example, builder.textRect(new Rect(0, 50, 100, 100)) will make the * sub-text-view's size to be 100 * 50 and margin-top to be 50 pixel. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subTextRect the sub-text rect, in pixel. * @return the builder */ public Builder subTextRect(Rect subTextRect) { if (this.subTextRect != subTextRect) { this.subTextRect = subTextRect; BoomButton button = button(); if (button != null) { button.subTextRect = subTextRect; button.updateSubTextRect(); } } return this; } /** * Set the padding of sub-text. * By this method, you can control the padding in the sub-text-view. * For instance, builder.textPadding(new Rect(10, 10, 10, 10)) will make the * sub-text-view content 10-pixel padding to itself. * <br><br> * <STRONG>Synchronicity:</STRONG> If the boom-button existed, * then synchronize this change to boom-button. * * @param subTextPadding the sub-text padding * @return the builder */ public Builder subTextPadding(Rect subTextPadding) { if (this.subTextPadding != subTextPadding) { this.subTextPadding = subTextPadding; BoomButton button = button(); if (button != null) { button.subTextPadding = subTextPadding; button.updateSubTextPadding(); } } return this; } /** * Set the typeface of the sub-text. * * @param subTypeface typeface * @return the builder */ public Builder subTypeface(Typeface subTypeface) { this.subTypeface = subTypeface; return this; } /** * Set the maximum of the lines of sub-text-view. * * @param subMaxLines maximum lines * @return the builder */ public Builder subMaxLines(int subMaxLines) { this.subMaxLines = subMaxLines; return this; } /** * Set the gravity of sub-text-view. * * @param subTextGravity gravity, for example, Gravity.CENTER * @return the builder */ public Builder subTextGravity(int subTextGravity) { this.subTextGravity = subTextGravity; return this; } /** * Set the ellipsize of the sub-text-view. * * @param subEllipsize ellipsize * @return the builder */ public Builder subEllipsize(TextUtils.TruncateAt subEllipsize) { this.subEllipsize = subEllipsize; return this; } /** * Set the text size of the sub-text-view. * * @param subTextSize size of sub-text, in sp * @return the builder */ public Builder subTextSize(int subTextSize) { this.subTextSize = subTextSize; return this; } /** * Set the width of boom-button, in pixel. * * @param buttonWidth width of button * @return the builder */ public Builder buttonWidth(int buttonWidth) { this.buttonWidth = buttonWidth; return this; } /** * Set the height of boom-button, in pixel. * * @param buttonHeight height of button * @return the builder */ public Builder buttonHeight(int buttonHeight) { this.buttonHeight = buttonHeight; return this; } /** * Set the corner-radius of button. * * @param buttonCornerRadius corner-radius of button * @return the builder */ public Builder buttonCornerRadius(int buttonCornerRadius) { this.buttonCornerRadius = buttonCornerRadius; return this; } /** * Get the width of boom-button. * * @return width of button */ public int getButtonWidth() { return buttonWidth; } /** * Get the height of boom-button * * @return height of button */ public int getButtonHeight() { return buttonHeight; } //endregion /** * Build ham button, don't use this method. * * @param context the context * @return the ham button */ public HamButton build(Context context) { HamButton button = new HamButton(this, context); weakReferenceButton(button); return button; } } }
package com.modeln.build.ctrl.command.chart; import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.GregorianCalendar; import java.util.Hashtable; import java.util.List; import java.util.Vector; import org.jfree.chart.ChartUtilities; import org.jfree.chart.JFreeChart; import com.modeln.build.web.errors.ApplicationError; import com.modeln.build.web.errors.ApplicationException; import com.modeln.build.web.application.ChartCommand; import com.modeln.build.web.application.CommandResult; import com.modeln.build.web.application.WebApplication; import com.modeln.build.web.database.RepositoryConnection; import com.modeln.build.web.errors.ErrorMap; import com.modeln.build.common.data.CMnTimeInterval; import com.modeln.build.common.data.account.CMnAccount; import com.modeln.build.ctrl.charts.CMnPatchCountChart; import com.modeln.build.ctrl.database.CMnPatchReportTable; import com.modeln.build.ctrl.forms.CMnChartForm; /** * The chart command is used to diplay a chart of the number * of patch requests per time period (weeks, months, quarters). * * @author Shawn Stafford */ public class CMnShowPatchTrendChart extends ChartCommand { /** * This is the primary method which will be used to perform the command * actions. The application will use this method to service incoming * requests. You must pass a reference to the calling application into * the service method to allow callback method calls to be performed. * * @param app Application which called the command * @param req HttpServletRequest object * @param res HttpServletResponse object */ public CommandResult execute(WebApplication app, HttpServletRequest req, HttpServletResponse res) throws ApplicationException { // Execute the generic actions for all commands CommandResult result = super.execute(app, req, res); // Execute the actions for the command if (!result.containsError()) { ApplicationException exApp = null; ApplicationError error = null; RepositoryConnection rc = null; try { // Get the format to present the data in String format = req.getParameter(CMnChartForm.FORMAT_LABEL); // Obtain the height and width for the chart int chartHeight = Integer.parseInt(req.getParameter(CMnChartForm.HEIGHT_LABEL)); int chartWidth = Integer.parseInt(req.getParameter(CMnChartForm.WIDTH_LABEL)); // Obtain the date range for the chart String startDate = req.getParameter(CMnChartForm.START_DATE_LABEL); Date start = CMnChartForm.DATE.parse(startDate); String endDate = req.getParameter(CMnChartForm.END_DATE_LABEL); Date end = CMnChartForm.DATE.parse(endDate); // Obtain the list of customers rc = app.getRepositoryConnection(); CMnPatchReportTable reportTable = CMnPatchReportTable.getInstance(); app.debug("CMnShowPatchTrendChart rendering chart from " + start + " to " + end); // Get a list of request dates that occurred witin the time frame Vector<Date> requests = reportTable.getRequestDate(rc.getConnection(), start, end); // Categorize the request dates by time interval String interval = req.getParameter("interval"); String label = "Month"; int size = Calendar.MONTH; if (interval != null) { if (interval.equalsIgnoreCase("week")) { size = Calendar.WEEK_OF_MONTH; label = "Week"; } else if (interval.equalsIgnoreCase("day")) { size = Calendar.DAY_OF_MONTH; label = "Day"; } else if (interval.equalsIgnoreCase("year")) { size = Calendar.YEAR; label = "Year"; } } Hashtable<CMnTimeInterval, Integer> intervals = groupBy(requests, size); if ((format != null) && format.equalsIgnoreCase(CMnChartForm.FORMAT_CSV)) { streamTimeSpreadsheet(app, req, res, intervals); } else { JFreeChart chart = CMnPatchCountChart.getPatchesByIntervalChart(intervals, label); // Display the chart to the user if (chart != null) { ChartUtilities.writeChartAsPNG(res.getOutputStream(), chart, chartWidth, chartHeight); res.setContentType("image/png"); } else { exApp = new ApplicationException( ErrorMap.APPLICATION_DISPLAY_FAILURE, "Failed to render chart"); } } } catch (ApplicationException aex) { exApp = aex; } catch (Exception ex) { exApp = new ApplicationException( ErrorMap.APPLICATION_DISPLAY_FAILURE, "Failed to process command."); exApp.setStackTrace(ex); } finally { app.releaseRepositoryConnection(rc); // Throw the exception once the connection has been cleaned up if (exApp != null) { throw exApp; } } // try/catch } // if no error return result; } /** * Count the number of requests that occurred in each interval. * The interval size is specified using the Calendar fields of * DAY_OF_MONTH, WEEK_OF_MONTH, MONTH, or YEAR. * * @param dates List of patch request dates * @param size Interval size * @return Summary count of requests per interval of time */ public Hashtable<CMnTimeInterval, Integer> groupBy(Vector<Date> dates, int size) { Hashtable<CMnTimeInterval, Integer> intervals = new Hashtable<CMnTimeInterval, Integer>(); // Initialize the list of intervals Enumeration dateList = dates.elements(); while (dateList.hasMoreElements()) { Date currentDate = (Date) dateList.nextElement(); CMnTimeInterval interval = getInterval(intervals.keys(), currentDate); Integer count = null; if (interval != null) { count = intervals.get(interval) + 1; } else { interval = new CMnTimeInterval(currentDate, size); count = new Integer(0); } // Add or update the interval with the latest count intervals.put(interval, count); } return intervals; } /** * Iterate through the list of intervals to find the one * that corresponds to the specified date. * * @param date Target date * @return Interval containing the specified date, or null if none found */ private CMnTimeInterval getInterval(Enumeration<CMnTimeInterval> intervals, Date date) { CMnTimeInterval match = null; while (intervals.hasMoreElements()) { CMnTimeInterval current = (CMnTimeInterval) intervals.nextElement(); if (current.contains(date)) { return current; } } return match; } /** * Export the results as a spreadsheet. * * @param */ protected static void streamTimeSpreadsheet( WebApplication app, HttpServletRequest req, HttpServletResponse res, Hashtable<CMnTimeInterval, Integer> intervals) throws ApplicationException { // spreadsheet content List<List<String>> content = new ArrayList<List<String>>(intervals.size()); // Represent each time interval as a row in the spreadsheet // interval name, start date, end date, patch count Enumeration intervalList = intervals.keys(); while (intervalList.hasMoreElements()) { CMnTimeInterval interval = (CMnTimeInterval) intervalList.nextElement(); if (interval != null) { ArrayList<String> row = new ArrayList(4); // Column value: interval name row.add(interval.getName()); // Column value: interval start row.add(interval.getStart().toString()); // Column value: interval end row.add(interval.getEnd().toString()); // Column value: service patch count Integer count = (Integer) intervals.get(interval); if (count != null) { row.add(count.toString()); } content.add(row); } } // Stream the spreadsheet content to the user try { app.streamAsSpreadsheet(req, res, content); } catch (Exception ex) { throw new ApplicationException( ErrorMap.APPLICATION_DISPLAY_FAILURE, "Failed to create spreadsheet content."); } } }
/* * Copyright 2006-2016 Edward Smith * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package root.cache; import java.util.NoSuchElementException; import java.util.concurrent.locks.ReentrantLock; import root.lang.ConcurrentItemizer; import root.util.Root; /** * TODO: How should a cache entry be invalidated, or marked dirty, when an update is performed?<br> * TODO: Think about a cache.update() method and what that should do<br> * TODO: Also think about an CacheLRUConcurrent<K, List<V>> implementation that caches lists of data (or Sets for that matter)<br> * TODO: What about cache synchronization across servers?<br> * TODO: Test all of the remove() methods for correctness (do this for every Cache class) * <p> * * @author Edward Smith * @version 0.5 * @since 0.5 * * @param <K> * The key type of the cache entry * @param <V> * The value type of the cache entry */ public final class CacheLRUConcurrent<K, V> implements RootCache<K, V> { // <><><><><><><><><><><><><>< Private Classes ><><><><><><><><><><><><><> /** * * @author Edward Smith * @version 0.5 * @since 0.5 * * @param <V> * The value type of the cache entry */ private final class Ascend implements ConcurrentItemizer<V> { // <><><><><><><><><><><><><><> Attributes <><><><><><><><><><><><><><> private int index; private CacheEntry<K, V> cursor; // <><><><><><><><><><><><><>< Constructors ><><><><><><><><><><><><><> private Ascend() { this.reset(); } // <><><><><><><><><><><><><> Public Methods <><><><><><><><><><><><><> @Override public final int getIndex() { return this.index; } @Override public final int getSize() { return CacheLRUConcurrent.this.size; } @Override public final boolean hasNext() { return this.cursor != null; } @Override public final ConcurrentItemizer<V> iterator() { return this; } @Override public final void lock() { CacheLRUConcurrent.this.cacheLock.lock(); this.cursor = CacheLRUConcurrent.this.listHead; } @Override public final V next() { if (this.cursor == null) { throw new NoSuchElementException(); } final V v = this.cursor.value; this.cursor = this.cursor.listNext; this.index++; return v; } /** * @throws UnsupportedOperationException */ @Override public final void remove() { throw new UnsupportedOperationException(); } @Override public final void reset() { this.index = -1; this.cursor = null; } @Override public final void unlock() { CacheLRUConcurrent.this.cacheLock.unlock(); } } // End Ascend // <><><><><><><><><><><><><><><> Attributes <><><><><><><><><><><><><><><> private CacheEntry<K, V> listHead; private CacheEntry<K, V> listTail; private int size; private final int capacity; private final CacheEntry<K, V>[] cache; private final ReentrantLock cacheLock; // <><><><><><><><><><><><><><>< Constructors ><><><><><><><><><><><><><><> public CacheLRUConcurrent(final int capacity) { this.capacity = Root.calculateHashTableCapacity(capacity); this.cache = CacheEntry.newArray(this.capacity); this.cacheLock = new ReentrantLock(); } // <><><><><><><><><><><><><><> Public Methods <><><><><><><><><><><><><><> /** * * * @see root.cache.RootCache#clear() */ @Override public final void clear() { this.cacheLock.lock(); try { if (this.size != 0) { CacheEntry<K, V> e, next; // 1. Clear all entries from the cache for (int i = 0; i < this.cache.length; i++) { for (e = this.cache[i]; e != null; e = next) { next = e.mapNext; e.key = null; e.value = null; e.listNext = null; e.listPrev = null; e.mapNext = null; e.mapPrev = null; } this.cache[i] = null; } // 2. Reset the listHead and listTail this.listHead = null; this.listTail = null; // 3. Reset the size to zero this.size = 0; } } finally { this.cacheLock.unlock(); } } /** * * * @param key * @return * * @see root.cache.RootCache#get(K) */ @Override public final V get(final K key) { final int i = Root.hashCode(key) % this.cache.length; this.cacheLock.lock(); try { for (CacheEntry<K, V> e = this.cache[i]; e != null; e = e.mapNext) { if (Root.equals(e.key, key)) { // 1. Move the cached item to the tail of the list if (this.listTail != e) { if (this.listHead == e) { this.listHead = this.listHead.listNext; this.listHead.listPrev = null; } else { e.listPrev.listNext = e.listNext; e.listNext.listPrev = e.listPrev; } this.listTail.listNext = e; e.listPrev = this.listTail; e.listNext = null; this.listTail = e; } // 2. Return cached value return e.value; } } } finally { this.cacheLock.unlock(); } return null; } /** * * * @return * * @see root.cache.RootCache#getCapacity() */ @Override public final int getCapacity() { return this.capacity; } @Override public final int getSize() { return this.size; } @Override public final boolean isEmpty() { return this.size == 0; } /** * @see root.cache.RootCache#iterator() */ @Override public final ConcurrentItemizer<V> iterator() { return new Ascend(); } /** * Associates the specified value with the specified key in this cache. * * @param key * the key with which the value will be associated * @param value * the value associated with the specified key * @return the previous value associated with key, or <code>null</code> if there was no mapping for key * * @see root.cache.RootCache#put(K, V) */ @Override public final V put(final K key, final V value) { final int i = Root.hashCode(key) % this.cache.length; this.cacheLock.lock(); try { CacheEntry<K, V> e = this.cache[i]; // 1. Check to see if the key is already mapped to the cache for (; e != null; e = e.mapNext) { if (Root.equals(e.key, key)) { final V oldValue = e.value; e.value = value; return oldValue; } } // 2. Recycle oldest cached item object if cache is full if (this.size == this.capacity) { // a) Remove the oldest CacheEntry from both the list and the cache final CacheEntry<K, V> oldestItem = this.listHead; this.listHead = this.listHead.listNext; this.listHead.listPrev = null; // TODO: Not sure if this logic is correct so test with JUnit (also test remove(key) // and removeNext() if (oldestItem == this.cache[oldestItem.index]) { this.cache[oldestItem.index] = oldestItem.mapNext; if (oldestItem.mapNext != null) { oldestItem.mapNext.mapPrev = null; } } else { oldestItem.mapPrev.mapNext = oldestItem.mapNext; if (oldestItem.mapNext != null) { oldestItem.mapNext.mapPrev = oldestItem.mapPrev; } } // b) Reuse the oldest CacheEntry for the new cache entry final V oldValue = oldestItem.value; this.cache[i] = oldestItem.recycle(key, value, i, this.cache[i]); // c) Attach recycled item to the tail of the list this.listTail.listNext = oldestItem; oldestItem.listPrev = this.listTail; oldestItem.listNext = null; this.listTail = oldestItem; // d) Return old cached value return oldValue; } // 3. Otherwise create new cache item and append it to the list tail e = this.cache[i] = new CacheEntry<K, V>(key, value, i, this.cache[i]); if (this.listTail == null) { this.listHead = e; } else { this.listTail.listNext = e; e.listPrev = this.listTail; } this.listTail = e; this.size++; } finally { this.cacheLock.unlock(); } return null; } /** * * * @param key * @return * * @see root.cache.RootCache#remove(Object) */ @Override public final V remove(final K key) { if (this.size != 0) { final int i = Root.hashCode(key) % this.cache.length; CacheEntry<K, V> foundItem; this.cacheLock.lock(); try { // 1. Find the CacheEntry associated with the key for (foundItem = this.cache[i]; foundItem != null; foundItem = foundItem.mapNext) { if (Root.equals(foundItem.key, key)) { // 2. Remove the found item from the cache and list if (this.size == 1) { this.listHead = null; this.listTail = null; this.cache[i] = null; } else { // a) Remove the found item from the list if (this.listHead == foundItem) { this.listHead = this.listHead.listNext; this.listHead.listPrev = null; } else if (this.listTail == foundItem) { this.listTail = foundItem.listPrev; this.listTail.listNext = null; } else { foundItem.listPrev.listNext = foundItem.listNext; foundItem.listNext.listPrev = foundItem.listPrev; } // b) Remove the found item from the cache if (foundItem == this.cache[i]) { this.cache[i] = foundItem.mapNext; if (foundItem.mapNext != null) { foundItem.mapNext.mapPrev = null; } } else { foundItem.mapPrev.mapNext = foundItem.mapNext; if (foundItem.mapNext != null) { foundItem.mapNext.mapPrev = foundItem.mapPrev; } } } // 3. Clean up foundItem for garbage collection purposes final V oldValue = foundItem.value; foundItem.key = null; foundItem.value = null; foundItem.listNext = null; foundItem.listPrev = null; foundItem.mapNext = null; foundItem.mapPrev = null; // 4. Decrement the size by one this.size--; // 5. Return old cached value return oldValue; } } } finally { this.cacheLock.unlock(); } } return null; } /** * * @return * @see root.cache.RootCache#removeNext() */ @Override public final V removeNext() { this.cacheLock.lock(); try { if (this.size != 0) { // 1. Remove the oldest CacheEntry from both the list and the cache final CacheEntry<K, V> oldestItem = this.listHead; if (this.size == 1) { this.listHead = null; this.listTail = null; this.cache[oldestItem.index] = null; } else { this.listHead = this.listHead.listNext; this.listHead.listPrev = null; if (oldestItem == this.cache[oldestItem.index]) { this.cache[oldestItem.index] = oldestItem.mapNext; if (oldestItem.mapNext != null) { oldestItem.mapNext.mapPrev = null; } } else { oldestItem.mapPrev.mapNext = oldestItem.mapNext; if (oldestItem.mapNext != null) { oldestItem.mapNext.mapPrev = oldestItem.mapPrev; } } } // 2. Clean up oldestItem for garbage collection purposes final V oldValue = oldestItem.value; oldestItem.key = null; oldestItem.value = null; oldestItem.listNext = null; oldestItem.listPrev = null; oldestItem.mapNext = null; oldestItem.mapPrev = null; // 3. Decrement the size by one this.size--; // 4. Return old cached value return oldValue; } } finally { this.cacheLock.unlock(); } return null; } } // End CacheLRUConcurrent
/* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.core.namedparam; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.junit.Test; import org.springframework.dao.InvalidDataAccessApiUsageException; import static org.junit.Assert.*; /** * @author Thomas Risberg * @author Juergen Hoeller * @author Rick Evans */ public class NamedParameterUtilsTests { @Test public void parseSql() { String sql = "xxx :a yyyy :b :c :a zzzzz"; ParsedSql psql = NamedParameterUtils.parseSqlStatement(sql); assertEquals("xxx ? yyyy ? ? ? zzzzz", NamedParameterUtils.substituteNamedParameters(psql, null)); assertEquals("a", psql.getParameterNames().get(0)); assertEquals("c", psql.getParameterNames().get(2)); assertEquals("a", psql.getParameterNames().get(3)); assertEquals(4, psql.getTotalParameterCount()); assertEquals(3, psql.getNamedParameterCount()); String sql2 = "xxx &a yyyy ? zzzzz"; ParsedSql psql2 = NamedParameterUtils.parseSqlStatement(sql2); assertEquals("xxx ? yyyy ? zzzzz", NamedParameterUtils.substituteNamedParameters(psql2, null)); assertEquals("a", psql2.getParameterNames().get(0)); assertEquals(2, psql2.getTotalParameterCount()); assertEquals(1, psql2.getNamedParameterCount()); String sql3 = "xxx &a+:b" + '\t' + ":c%10 yyyy ? zzzzz"; ParsedSql psql3 = NamedParameterUtils.parseSqlStatement(sql3); assertEquals("a", psql3.getParameterNames().get(0)); assertEquals("b", psql3.getParameterNames().get(1)); assertEquals("c", psql3.getParameterNames().get(2)); } @Test public void substituteNamedParameters() { MapSqlParameterSource namedParams = new MapSqlParameterSource(); namedParams.addValue("a", "a").addValue("b", "b").addValue("c", "c"); assertEquals("xxx ? ? ?", NamedParameterUtils.substituteNamedParameters("xxx :a :b :c", namedParams)); assertEquals("xxx ? ? ? xx ? ?", NamedParameterUtils.substituteNamedParameters("xxx :a :b :c xx :a :a", namedParams)); } @Test public void convertParamMapToArray() { Map<String, String> paramMap = new HashMap<String, String>(); paramMap.put("a", "a"); paramMap.put("b", "b"); paramMap.put("c", "c"); assertSame(3, NamedParameterUtils.buildValueArray("xxx :a :b :c", paramMap).length); assertSame(5, NamedParameterUtils.buildValueArray("xxx :a :b :c xx :a :b", paramMap).length); assertSame(5, NamedParameterUtils.buildValueArray("xxx :a :a :a xx :a :a", paramMap).length); assertEquals("b", NamedParameterUtils.buildValueArray("xxx :a :b :c xx :a :b", paramMap)[4]); try { NamedParameterUtils.buildValueArray("xxx :a :b ?", paramMap); fail("mixed named parameters and ? placeholders not detected"); } catch (InvalidDataAccessApiUsageException expected) { } } @Test public void convertTypeMapToArray() { MapSqlParameterSource namedParams = new MapSqlParameterSource(); namedParams.addValue("a", "a", 1).addValue("b", "b", 2).addValue("c", "c", 3); assertSame(3, NamedParameterUtils .buildSqlTypeArray(NamedParameterUtils.parseSqlStatement("xxx :a :b :c"), namedParams).length); assertSame(5, NamedParameterUtils .buildSqlTypeArray(NamedParameterUtils.parseSqlStatement("xxx :a :b :c xx :a :b"), namedParams).length); assertSame(5, NamedParameterUtils .buildSqlTypeArray(NamedParameterUtils.parseSqlStatement("xxx :a :a :a xx :a :a"), namedParams).length); assertEquals(2, NamedParameterUtils .buildSqlTypeArray(NamedParameterUtils.parseSqlStatement("xxx :a :b :c xx :a :b"), namedParams)[4]); } @Test public void convertTypeMapToSqlParameterList() { MapSqlParameterSource namedParams = new MapSqlParameterSource(); namedParams.addValue("a", "a", 1).addValue("b", "b", 2).addValue("c", "c", 3, "SQL_TYPE"); assertSame(3, NamedParameterUtils .buildSqlParameterList(NamedParameterUtils.parseSqlStatement("xxx :a :b :c"), namedParams).size()); assertSame(5, NamedParameterUtils .buildSqlParameterList(NamedParameterUtils.parseSqlStatement("xxx :a :b :c xx :a :b"), namedParams).size()); assertSame(5, NamedParameterUtils .buildSqlParameterList(NamedParameterUtils.parseSqlStatement("xxx :a :a :a xx :a :a"), namedParams).size()); assertEquals(2, NamedParameterUtils .buildSqlParameterList(NamedParameterUtils.parseSqlStatement("xxx :a :b :c xx :a :b"), namedParams).get(4).getSqlType()); assertEquals("SQL_TYPE", NamedParameterUtils .buildSqlParameterList(NamedParameterUtils.parseSqlStatement("xxx :a :b :c"), namedParams).get(2).getTypeName()); } @Test(expected = InvalidDataAccessApiUsageException.class) public void buildValueArrayWithMissingParameterValue() throws Exception { String sql = "select count(0) from foo where id = :id"; NamedParameterUtils.buildValueArray(sql, Collections.<String, Object>emptyMap()); } @Test public void substituteNamedParametersWithStringContainingQuotes() throws Exception { String expectedSql = "select 'first name' from artists where id = ? and quote = 'exsqueeze me?'"; String sql = "select 'first name' from artists where id = :id and quote = 'exsqueeze me?'"; String newSql = NamedParameterUtils.substituteNamedParameters(sql, new MapSqlParameterSource()); assertEquals(expectedSql, newSql); } @Test public void testParseSqlStatementWithStringContainingQuotes() throws Exception { String expectedSql = "select 'first name' from artists where id = ? and quote = 'exsqueeze me?'"; String sql = "select 'first name' from artists where id = :id and quote = 'exsqueeze me?'"; ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(sql); assertEquals(expectedSql, NamedParameterUtils.substituteNamedParameters(parsedSql, null)); } /* * SPR-4789 */ @Test public void parseSqlContainingComments() { String sql1 = "/*+ HINT */ xxx /* comment ? */ :a yyyy :b :c :a zzzzz -- :xx XX\n"; ParsedSql psql1 = NamedParameterUtils.parseSqlStatement(sql1); assertEquals("/*+ HINT */ xxx /* comment ? */ ? yyyy ? ? ? zzzzz -- :xx XX\n", NamedParameterUtils.substituteNamedParameters(psql1, null)); MapSqlParameterSource paramMap = new MapSqlParameterSource(); paramMap.addValue("a", "a"); paramMap.addValue("b", "b"); paramMap.addValue("c", "c"); Object[] params = NamedParameterUtils.buildValueArray(psql1, paramMap, null); assertEquals(4, params.length); assertEquals("a", params[0]); assertEquals("b", params[1]); assertEquals("c", params[2]); assertEquals("a", params[3]); String sql2 = "/*+ HINT */ xxx /* comment ? */ :a yyyy :b :c :a zzzzz -- :xx XX"; ParsedSql psql2 = NamedParameterUtils.parseSqlStatement(sql2); assertEquals("/*+ HINT */ xxx /* comment ? */ ? yyyy ? ? ? zzzzz -- :xx XX", NamedParameterUtils.substituteNamedParameters(psql2, null)); String sql3 = "/*+ HINT */ xxx /* comment ? */ :a yyyy :b :c :a zzzzz /* :xx XX*"; ParsedSql psql3 = NamedParameterUtils.parseSqlStatement(sql3); assertEquals("/*+ HINT */ xxx /* comment ? */ ? yyyy ? ? ? zzzzz /* :xx XX*", NamedParameterUtils.substituteNamedParameters(psql3, null)); String sql4 = "/*+ HINT */ xxx /* comment :a ? */ :a yyyy :b :c :a zzzzz /* :xx XX*"; ParsedSql psql4 = NamedParameterUtils.parseSqlStatement(sql4); Map<String, String> parameters = Collections.singletonMap("a", "0"); assertEquals("/*+ HINT */ xxx /* comment :a ? */ ? yyyy ? ? ? zzzzz /* :xx XX*", NamedParameterUtils.substituteNamedParameters(psql4, new MapSqlParameterSource(parameters))); } /* * SPR-4612 */ @Test public void parseSqlStatementWithPostgresCasting() throws Exception { String expectedSql = "select 'first name' from artists where id = ? and birth_date=?::timestamp"; String sql = "select 'first name' from artists where id = :id and birth_date=:birthDate::timestamp"; ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(sql); assertEquals(expectedSql, NamedParameterUtils.substituteNamedParameters(parsedSql, null)); } /* * SPR-7476 */ @Test public void parseSqlStatementWithEscapedColon() throws Exception { String expectedSql = "select '0\\:0' as a, foo from bar where baz < DATE(? 23:59:59) and baz = ?"; String sql = "select '0\\:0' as a, foo from bar where baz < DATE(:p1 23\\:59\\:59) and baz = :p2"; ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(sql); assertEquals(2, parsedSql.getParameterNames().size()); assertEquals("p1", parsedSql.getParameterNames().get(0)); assertEquals("p2", parsedSql.getParameterNames().get(1)); String finalSql = NamedParameterUtils.substituteNamedParameters(parsedSql, null); assertEquals(expectedSql, finalSql); } /* * SPR-7476 */ @Test public void parseSqlStatementWithBracketDelimitedParameterNames() throws Exception { String expectedSql = "select foo from bar where baz = b??z"; String sql = "select foo from bar where baz = b:{p1}:{p2}z"; ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(sql); assertEquals(2, parsedSql.getParameterNames().size()); assertEquals("p1", parsedSql.getParameterNames().get(0)); assertEquals("p2", parsedSql.getParameterNames().get(1)); String finalSql = NamedParameterUtils.substituteNamedParameters(parsedSql, null); assertEquals(expectedSql, finalSql); } /* * SPR-7476 */ @Test public void parseSqlStatementWithEmptyBracketsOrBracketsInQuotes() throws Exception { String expectedSql = "select foo from bar where baz = b:{}z"; String sql = "select foo from bar where baz = b:{}z"; ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(sql); assertEquals(0, parsedSql.getParameterNames().size()); String finalSql = NamedParameterUtils.substituteNamedParameters(parsedSql, null); assertEquals(expectedSql, finalSql); String expectedSql2 = "select foo from bar where baz = 'b:{p1}z'"; String sql2 = "select foo from bar where baz = 'b:{p1}z'"; ParsedSql parsedSql2 = NamedParameterUtils.parseSqlStatement(sql2); assertEquals(0, parsedSql2.getParameterNames().size()); String finalSql2 = NamedParameterUtils.substituteNamedParameters(parsedSql2, null); assertEquals(expectedSql2, finalSql2); } /* * SPR-2544 */ @Test public void parseSqlStatementWithLogicalAnd() { String expectedSql = "xxx & yyyy"; ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(expectedSql); assertEquals(expectedSql, NamedParameterUtils.substituteNamedParameters(parsedSql, null)); } /* * SPR-2544 */ @Test public void substituteNamedParametersWithLogicalAnd() throws Exception { String expectedSql = "xxx & yyyy"; String newSql = NamedParameterUtils.substituteNamedParameters(expectedSql, new MapSqlParameterSource()); assertEquals(expectedSql, newSql); } /* * SPR-3173 */ @Test public void variableAssignmentOperator() throws Exception { String expectedSql = "x := 1"; String newSql = NamedParameterUtils.substituteNamedParameters(expectedSql, new MapSqlParameterSource()); assertEquals(expectedSql, newSql); } /* * SPR-8280 */ @Test public void parseSqlStatementWithQuotedSingleQuote() { String sql = "SELECT ':foo'':doo', :xxx FROM DUAL"; ParsedSql psql = NamedParameterUtils.parseSqlStatement(sql); assertEquals(1, psql.getTotalParameterCount()); assertEquals("xxx", psql.getParameterNames().get(0)); } @Test public void parseSqlStatementWithQuotesAndCommentBefore() { String sql = "SELECT /*:doo*/':foo', :xxx FROM DUAL"; ParsedSql psql = NamedParameterUtils.parseSqlStatement(sql); assertEquals(1, psql.getTotalParameterCount()); assertEquals("xxx", psql.getParameterNames().get(0)); } @Test public void parseSqlStatementWithQuotesAndCommentAfter() { String sql2 = "SELECT ':foo'/*:doo*/, :xxx FROM DUAL"; ParsedSql psql2 = NamedParameterUtils.parseSqlStatement(sql2); assertEquals(1, psql2.getTotalParameterCount()); assertEquals("xxx", psql2.getParameterNames().get(0)); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.diff.tools.util; import com.intellij.diff.tools.util.base.TextDiffViewerUtil; import com.intellij.diff.util.DiffDividerDrawUtil; import com.intellij.diff.util.DiffDrawUtil; import com.intellij.diff.util.DiffUtil; import com.intellij.diff.util.LineRange; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.FoldRegion; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.editor.ex.DocumentEx; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.FoldingListener; import com.intellij.openapi.editor.ex.FoldingModelEx; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.util.BooleanGetter; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.UserDataHolder; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import java.util.HashSet; import gnu.trove.TIntFunction; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import static com.intellij.diff.util.DiffUtil.getLineCount; import static com.intellij.util.ArrayUtil.toObjectArray; /** * This class allows to add custom foldings to hide unchanged regions in diff. * EditorSettings#isAutoCodeFoldingEnabled() should be true, to avoid collisions with language-specific foldings * (as it's impossible to create partially overlapped folding regions) * * @see DiffUtil#setFoldingModelSupport(EditorEx) */ public class FoldingModelSupport { public static final String PLACEHOLDER = " "; private static final Key<FoldingCache> CACHE_KEY = Key.create("Diff.FoldingUtil.Cache"); protected final int myCount; @NotNull protected final EditorEx[] myEditors; @NotNull protected final List<FoldedBlock[]> myFoldings = new ArrayList<>(); private boolean myDuringSynchronize; private final boolean[] myShouldUpdateLineNumbers; public FoldingModelSupport(@NotNull EditorEx[] editors, @NotNull Disposable disposable) { myEditors = editors; myCount = myEditors.length; myShouldUpdateLineNumbers = new boolean[myCount]; MyDocumentListener documentListener = new MyDocumentListener(); List<Document> documents = ContainerUtil.map(myEditors, EditorEx::getDocument); TextDiffViewerUtil.installDocumentListeners(documentListener, documents, disposable); for (int i = 0; i < myCount; i++) { if (myCount > 1) { myEditors[i].getFoldingModel().addListener(new MyFoldingListener(i), disposable); } } } public int getCount() { return myCount; } // // Init // /* * Iterator returns ranges of changed lines: start1, end1, start2, end2, ... */ protected void install(@Nullable final Iterator<int[]> changedLines, @Nullable final UserDataHolder context, @NotNull final Settings settings) { ApplicationManager.getApplication().assertIsDispatchThread(); for (FoldedBlock folding : getFoldedBlocks()) { folding.destroyHighlighter(); } runBatchOperation(() -> { for (FoldedBlock folding : getFoldedBlocks()) { folding.destroyFolding(); } myFoldings.clear(); if (changedLines != null && settings.range != -1) { FoldingBuilder builder = new FoldingBuilder(context, settings); builder.build(changedLines); } }); updateLineNumbers(true); } private class FoldingBuilder { @NotNull private final Settings mySettings; @NotNull private final ExpandSuggester myExpandSuggester; @NotNull private final int[] myLineCount; public FoldingBuilder(@Nullable UserDataHolder context, @NotNull Settings settings) { FoldingCache cache = context != null ? context.getUserData(CACHE_KEY) : null; myExpandSuggester = new ExpandSuggester(cache, settings.defaultExpanded); mySettings = settings; myLineCount = new int[myCount]; for (int i = 0; i < myCount; i++) { myLineCount[i] = getLineCount(myEditors[i].getDocument()); } } private void build(@NotNull final Iterator<int[]> changedLines) { int[] starts = new int[myCount]; int[] ends = new int[myCount]; int[] last = new int[myCount]; for (int i = 0; i < myCount; i++) { last[i] = Integer.MIN_VALUE; } while (changedLines.hasNext()) { int[] offsets = changedLines.next(); for (int i = 0; i < myCount; i++) { starts[i] = last[i]; ends[i] = offsets[i * 2]; last[i] = offsets[i * 2 + 1]; } addRange(starts, ends); } for (int i = 0; i < myCount; i++) { starts[i] = last[i]; ends[i] = Integer.MAX_VALUE; } addRange(starts, ends); } private void addRange(int[] starts, int[] ends) { List<FoldedBlock> result = new ArrayList<>(3); int[] rangeStarts = new int[myCount]; int[] rangeEnds = new int[myCount]; for (int number = 0; ; number++) { int shift = getRangeShift(mySettings.range, number); if (shift == -1) break; for (int i = 0; i < myCount; i++) { rangeStarts[i] = DiffUtil.bound(starts[i] + shift, 0, myLineCount[i]); rangeEnds[i] = DiffUtil.bound(ends[i] - shift, 0, myLineCount[i]); } ContainerUtil.addAllNotNull(result, createRange(rangeStarts, rangeEnds, myExpandSuggester.isExpanded(rangeStarts, rangeEnds))); } if (result.size() > 0) { FoldedBlock[] block = toObjectArray(result, FoldedBlock.class); for (FoldedBlock folding : block) { folding.installHighlighter(block); } myFoldings.add(block); } } @Nullable private FoldedBlock createRange(int[] starts, int[] ends, boolean expanded) { boolean hasFolding = false; FoldRegion[] regions = new FoldRegion[myCount]; boolean hasExpanded = false; // do not desync on runBatchFoldingOperationDoNotCollapseCaret for (int i = 0; i < myCount; i++) { if (ends[i] - starts[i] < 2) continue; regions[i] = addFolding(myEditors[i], starts[i], ends[i], expanded); hasFolding |= regions[i] != null; hasExpanded |= regions[i] != null && regions[i].isExpanded(); } if (hasExpanded && !expanded) { for (FoldRegion region : regions) { if (region != null) region.setExpanded(true); } } return hasFolding ? new FoldedBlock(regions) : null; } } @Nullable public static FoldRegion addFolding(@NotNull EditorEx editor, int start, int end, boolean expanded) { DocumentEx document = editor.getDocument(); final int startOffset = document.getLineStartOffset(start); final int endOffset = document.getLineEndOffset(end - 1); FoldRegion value = editor.getFoldingModel().addFoldRegion(startOffset, endOffset, PLACEHOLDER); if (value != null) value.setExpanded(expanded); return value; } private void runBatchOperation(@NotNull Runnable runnable) { Runnable lastRunnable = runnable; for (EditorEx editor : myEditors) { final Runnable finalRunnable = lastRunnable; lastRunnable = () -> { if (DiffUtil.isFocusedComponent(editor.getComponent())) { editor.getFoldingModel().runBatchFoldingOperationDoNotCollapseCaret(finalRunnable); } else { editor.getFoldingModel().runBatchFoldingOperation(finalRunnable); } }; } myDuringSynchronize = true; try { lastRunnable.run(); } finally { myDuringSynchronize = false; } } public void destroy() { for (FoldedBlock folding : getFoldedBlocks()) { folding.destroyHighlighter(); } runBatchOperation(() -> { for (FoldedBlock folding : getFoldedBlocks()) { folding.destroyFolding(); } myFoldings.clear(); }); } // // Line numbers // private class MyDocumentListener implements DocumentListener { @Override public void documentChanged(DocumentEvent e) { if (StringUtil.indexOf(e.getOldFragment(), '\n') != -1 || StringUtil.indexOf(e.getNewFragment(), '\n') != -1) { for (int i = 0; i < myCount; i++) { if (myEditors[i].getDocument() == e.getDocument()) { myShouldUpdateLineNumbers[i] = true; } } } } } @NotNull public TIntFunction getLineConvertor(final int index) { return value -> { updateLineNumbers(false); for (FoldedBlock folding : getFoldedBlocks()) { // TODO: avoid full scan - it could slowdown painting int line = folding.getLine(index); if (line == -1) continue; if (line > value) break; FoldRegion region = folding.getRegion(index); if (line == value && region != null && !region.isExpanded()) return -1; } return value; }; } private void updateLineNumbers(boolean force) { for (int i = 0; i < myCount; i++) { if (!myShouldUpdateLineNumbers[i] && !force) continue; myShouldUpdateLineNumbers[i] = false; ApplicationManager.getApplication().assertReadAccessAllowed(); for (FoldedBlock folding : getFoldedBlocks()) { folding.updateLineNumber(i); } } } // // Synchronized toggling of ranges // public void expandAll(final boolean expanded) { if (myDuringSynchronize) return; myDuringSynchronize = true; try { for (int i = 0; i < myCount; i++) { final int index = i; final FoldingModelEx model = myEditors[index].getFoldingModel(); model.runBatchFoldingOperation(() -> { for (FoldedBlock folding : getFoldedBlocks()) { FoldRegion region = folding.getRegion(index); if (region != null) region.setExpanded(expanded); } }); } } finally { myDuringSynchronize = false; } } private class MyFoldingListener implements FoldingListener { private final int myIndex; @NotNull Set<FoldRegion> myModifiedRegions = new HashSet<>(); public MyFoldingListener(int index) { myIndex = index; } @Override public void onFoldRegionStateChange(@NotNull FoldRegion region) { if (myDuringSynchronize) return; myModifiedRegions.add(region); } @Override public void onFoldProcessingEnd() { if (myModifiedRegions.isEmpty()) return; myDuringSynchronize = true; try { for (int i = 0; i < myCount; i++) { if (i == myIndex) continue; final int pairedIndex = i; myEditors[pairedIndex].getFoldingModel().runBatchFoldingOperation(() -> { for (FoldedBlock folding : getFoldedBlocks()) { FoldRegion region = folding.getRegion(myIndex); if (region == null || !region.isValid()) continue; if (myModifiedRegions.contains(region)) { FoldRegion pairedRegion = folding.getRegion(pairedIndex); if (pairedRegion == null || !pairedRegion.isValid()) continue; pairedRegion.setExpanded(region.isExpanded()); } } }); } myModifiedRegions.clear(); } finally { myDuringSynchronize = false; } } } // // Highlighting // protected class MyPaintable implements DiffDividerDrawUtil.DividerSeparatorPaintable { private final int myLeft; private final int myRight; public MyPaintable(int left, int right) { myLeft = left; myRight = right; } @Override public void process(@NotNull Handler handler) { for (FoldedBlock[] block : myFoldings) { for (FoldedBlock folding : block) { FoldRegion region1 = folding.getRegion(myLeft); FoldRegion region2 = folding.getRegion(myRight); if (region1 == null || !region1.isValid() || region1.isExpanded()) continue; if (region2 == null || !region2.isValid() || region2.isExpanded()) continue; int line1 = myEditors[myLeft].getDocument().getLineNumber(region1.getStartOffset()); int line2 = myEditors[myRight].getDocument().getLineNumber(region2.getStartOffset()); if (!handler.process(line1, line2)) return; break; } } } public void paintOnDivider(@NotNull Graphics2D gg, @NotNull Component divider) { DiffDividerDrawUtil.paintSeparators(gg, divider.getWidth(), myEditors[myLeft], myEditors[myRight], this); } } // // Cache // /* * To Cache: * For each block of foldings (foldings for a single unchanged block in diff) we remember biggest expanded and biggest collapsed range. * * From Cache: * We look into cache while building ranges, trying to find corresponding range in cached state. * "Corresponding range" now is just smallest covering range. * * If document was modified since cache creation, this will lead to strange results. But this is a rare case, and we can't do anything with it. */ private class ExpandSuggester { @Nullable private final FoldingCache myCache; private final int[] myIndex = new int[myCount]; private final boolean myDefault; public ExpandSuggester(@Nullable FoldingCache cache, boolean defaultValue) { myCache = cache; myDefault = defaultValue; } public boolean isExpanded(int[] starts, int[] ends) { if (myCache == null || myCache.ranges.length != myCount) return myDefault; if (myDefault != myCache.expandByDefault) return myDefault; Boolean state = null; for (int index = 0; index < myCount; index++) { Boolean sideState = getCachedExpanded(starts[index], ends[index], index); if (sideState == null) continue; if (state == null) { state = sideState; continue; } if (state != sideState) return myDefault; } return state == null ? myDefault : state; } @Nullable private Boolean getCachedExpanded(int start, int end, int index) { if (start == end) return null; //noinspection ConstantConditions List<FoldedRangeState> ranges = myCache.ranges[index]; for (; myIndex[index] < ranges.size(); myIndex[index]++) { FoldedRangeState range = ranges.get(myIndex[index]); LineRange lineRange = range.getLineRange(); if (lineRange.end <= start) continue; if (lineRange.contains(start, end)) { if (range.collapsed != null && range.collapsed.contains(start, end)) return false; if (range.expanded != null && range.expanded.contains(start, end)) return true; assert false : "Invalid LineRange" + range.expanded + ", " + range.collapsed + ", " + new LineRange(start, end); } if (lineRange.start >= start) return null; // we could need current range for enclosing next-level foldings } return null; } } public void updateContext(@NotNull UserDataHolder context, @NotNull final Settings settings) { if (myFoldings.isEmpty()) return; // do not rewrite cache by initial state context.putUserData(CACHE_KEY, getFoldingCache(settings)); } @NotNull private FoldingCache getFoldingCache(@NotNull final Settings settings) { return ReadAction.compute(() -> { List<FoldedRangeState>[] result = new List[myCount]; for (int i = 0; i < myCount; i++) { result[i] = getFoldedRanges(i, settings); } return new FoldingCache(result, settings.defaultExpanded); }); } @NotNull private List<FoldedRangeState> getFoldedRanges(int index, @NotNull Settings settings) { ApplicationManager.getApplication().assertReadAccessAllowed(); List<FoldedRangeState> ranges = new ArrayList<>(); DocumentEx document = myEditors[index].getDocument(); for (FoldedBlock[] blocks : myFoldings) { LineRange expanded = null; LineRange collapsed = null; for (FoldedBlock folding : blocks) { FoldRegion region = folding.getRegion(index); if (region == null || !region.isValid()) continue; if (region.isExpanded()) { if (expanded == null) { int line1 = document.getLineNumber(region.getStartOffset()); int line2 = document.getLineNumber(region.getEndOffset()) + 1; expanded = new LineRange(line1, line2); } } else { int line1 = document.getLineNumber(region.getStartOffset()); int line2 = document.getLineNumber(region.getEndOffset()) + 1; collapsed = new LineRange(line1, line2); break; } } if (expanded != null || collapsed != null) { ranges.add(new FoldedRangeState(expanded, collapsed)); } } return ranges; } private static class FoldingCache { public final boolean expandByDefault; @NotNull public final List<FoldedRangeState>[] ranges; public FoldingCache(@NotNull List<FoldedRangeState>[] ranges, boolean expandByDefault) { this.ranges = ranges; this.expandByDefault = expandByDefault; } } private static class FoldedRangeState { @Nullable public final LineRange expanded; @Nullable public final LineRange collapsed; public FoldedRangeState(@Nullable LineRange expanded, @Nullable LineRange collapsed) { assert expanded != null || collapsed != null; this.expanded = expanded; this.collapsed = collapsed; } @NotNull public LineRange getLineRange() { //noinspection ConstantConditions return expanded != null ? expanded : collapsed; } } // // Impl // @NotNull private Iterable<FoldedBlock> getFoldedBlocks() { return () -> new Iterator<FoldedBlock>() { private int myGroupIndex = 0; private int myBlockIndex = 0; @Override public boolean hasNext() { return myGroupIndex < myFoldings.size(); } @Override public FoldedBlock next() { FoldedBlock[] group = myFoldings.get(myGroupIndex); FoldedBlock folding = group[myBlockIndex]; if (group.length > myBlockIndex + 1) { myBlockIndex++; } else { myGroupIndex++; myBlockIndex = 0; } return folding; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } protected class FoldedBlock { @NotNull private final FoldRegion[] myRegions; @NotNull private final int[] myLines; @NotNull private final List<RangeHighlighter> myHighlighters = new ArrayList<>(myCount); public FoldedBlock(@NotNull FoldRegion[] regions) { assert regions.length == myCount; myRegions = regions; myLines = new int[myCount]; } public void installHighlighter(@NotNull final FoldedBlock[] block) { assert myHighlighters.isEmpty(); for (int i = 0; i < myCount; i++) { FoldRegion region = myRegions[i]; if (region == null || !region.isValid()) continue; myHighlighters.addAll(DiffDrawUtil.createLineSeparatorHighlighter(myEditors[i], region.getStartOffset(), region.getEndOffset(), getHighlighterCondition(block, i))); } } public void destroyFolding() { for (int i = 0; i < myCount; i++) { FoldRegion region = myRegions[i]; if (region != null) myEditors[i].getFoldingModel().removeFoldRegion(region); } } public void destroyHighlighter() { for (RangeHighlighter highlighter : myHighlighters) { highlighter.dispose(); } myHighlighters.clear(); } public void updateLineNumber(int index) { FoldRegion region = myRegions[index]; if (region == null || !region.isValid()) { myLines[index] = -1; } else { myLines[index] = myEditors[index].getDocument().getLineNumber(region.getStartOffset()); } } @Nullable public FoldRegion getRegion(int index) { return myRegions[index]; } public int getLine(int index) { return myLines[index]; } @NotNull private BooleanGetter getHighlighterCondition(@NotNull final FoldedBlock[] block, final int index) { return () -> { if (!myEditors[index].getFoldingModel().isFoldingEnabled()) return false; for (FoldedBlock folding : block) { FoldRegion region = folding.getRegion(index); boolean visible = region != null && region.isValid() && !region.isExpanded(); if (folding == this) return visible; if (visible) return false; // do not paint separator, if 'parent' folding is collapsed } return false; }; } } // // Helpers // /* * number - depth of folding insertion (from zero) * return: number of context lines. ('-1' - end) */ private static int getRangeShift(int range, int number) { switch (number) { case 0: return range; case 1: return range * 2; case 2: return range * 4; default: return -1; } } @Nullable @Contract("null, _ -> null; !null, _ -> !null") protected static <T, V> Iterator<V> map(@Nullable final List<T> list, @NotNull final Function<T, V> mapping) { if (list == null) return null; final Iterator<T> it = list.iterator(); return new Iterator<V>() { @Override public boolean hasNext() { return it.hasNext(); } @Override public V next() { return mapping.fun(it.next()); } @Override public void remove() { } }; } public static class Settings { public final int range; public final boolean defaultExpanded; public Settings(int range, boolean defaultExpanded) { this.range = range; this.defaultExpanded = defaultExpanded; } } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.runtime.directio.keepalive; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.IOException; import java.util.Collections; import java.util.List; import org.junit.After; import org.junit.Test; import com.asakusafw.runtime.directio.Counter; import com.asakusafw.runtime.directio.DataDefinition; import com.asakusafw.runtime.directio.DirectDataSource; import com.asakusafw.runtime.directio.DirectInputFragment; import com.asakusafw.runtime.directio.OutputAttemptContext; import com.asakusafw.runtime.directio.OutputTransactionContext; import com.asakusafw.runtime.directio.ResourceInfo; import com.asakusafw.runtime.directio.ResourcePattern; import com.asakusafw.runtime.io.ModelInput; import com.asakusafw.runtime.io.ModelOutput; /** * Test for {@link KeepAliveDataSource}. */ public class KeepAliveDataSourceTest { final KeepAliveDataSource ds = new KeepAliveDataSource(new WaitDataSource(), 10); final Mock counter = new Mock(); /** * Cleans up the test. * @throws Exception if some errors were occurred */ @After public void tearDown() throws Exception { try { assertThat(ds.heartbeat.isEmpty(), is(true)); } finally { ds.heartbeat.close(); } } /** * Test method for openInput. * @throws Exception if failed */ @Test public void testOpenInput() throws Exception { try (ModelInput<Object> input = ds.openInput(null, null, counter)) { assertKeepAlive(true); } assertKeepAlive(false); } /** * Test method for openOutput. * @throws Exception if failed */ @Test public void testOpenOutput() throws Exception { try (ModelOutput<Object> output = ds.openOutput(null, null, null, null, counter)) { assertKeepAlive(true); } assertKeepAlive(false); } /** * Test method for setupAttemptOutput. * @throws Exception if failed */ @Test public void testSetupAttemptOutput() throws Exception { OutputAttemptContext context = context(); long s1 = counter.count; ds.setupAttemptOutput(context); long s2 = counter.count; assertThat(s2, greaterThan(s1)); assertKeepAlive(false); } /** * Test method for commitAttemptOutput. * @throws Exception if failed */ @Test public void testCommitAttemptOutput() throws Exception { OutputAttemptContext context = context(); long s1 = counter.count; ds.commitAttemptOutput(context); long s2 = counter.count; assertThat(s2, greaterThan(s1)); assertKeepAlive(false); } /** * Test method for cleanupAttemptOutput. * @throws Exception if failed */ @Test public void testCleanupAttemptOutput() throws Exception { OutputAttemptContext context = context(); long s1 = counter.count; ds.cleanupAttemptOutput(context); long s2 = counter.count; assertThat(s2, greaterThan(s1)); assertKeepAlive(false); } /** * Test method for setupTransactionOutput. * @throws Exception if failed */ @Test public void testSetupTransactionOutput() throws Exception { OutputAttemptContext context = context(); long s1 = counter.count; ds.setupTransactionOutput(context.getTransactionContext()); long s2 = counter.count; assertThat(s2, greaterThan(s1)); assertKeepAlive(false); } /** * Test method for commitTransactionOutput. * @throws Exception if failed */ @Test public void testCommitTransactionOutput() throws Exception { OutputAttemptContext context = context(); long s1 = counter.count; ds.commitTransactionOutput(context.getTransactionContext()); long s2 = counter.count; assertThat(s2, greaterThan(s1)); assertKeepAlive(false); } /** * Test method for cleanupTransactionOutput. * @throws Exception if failed */ @Test public void testCleanupTransactionOutput() throws Exception { OutputAttemptContext context = context(); long s1 = counter.count; ds.cleanupTransactionOutput(context.getTransactionContext()); long s2 = counter.count; assertThat(s2, greaterThan(s1)); assertKeepAlive(false); } private OutputAttemptContext context() { return new OutputAttemptContext("tx", "at", "o", counter); } private void assertKeepAlive(boolean b) throws InterruptedException { long s1 = counter.count; Thread.sleep(200); long s2 = counter.count; assertThat(s2, b ? greaterThan(s1) : is(s1)); } private static class Mock extends Counter { volatile long count; Mock() { return; } @Override protected void onChanged() { count++; } } private static class WaitDataSource implements DirectDataSource { public WaitDataSource() { return; } @Override public String path(String basePath, ResourcePattern resourcePattern) { return String.format("%s/%s", basePath, resourcePattern); } @Override public String path(String basePath) { return basePath; } @Override public <T> List<DirectInputFragment> findInputFragments( DataDefinition<T> definition, String basePath, ResourcePattern resourcePattern) throws IOException, InterruptedException { return Collections.emptyList(); } @Override public <T> ModelInput<T> openInput(DataDefinition<T> definition, DirectInputFragment fragment, Counter counter) throws IOException, InterruptedException { return new ModelInput<T>() { @Override public boolean readTo(T model) throws IOException { return false; } @Override public void close() throws IOException { return; } }; } @Override public <T> ModelOutput<T> openOutput( OutputAttemptContext context, DataDefinition<T> definition, String basePath, String resourcePath, Counter counter) throws IOException, InterruptedException { return new ModelOutput<T>() { @Override public void write(T model) throws IOException { return; } @Override public void close() throws IOException { return; } }; } @Override public List<ResourceInfo> list( String basePath, ResourcePattern resourcePattern, Counter counter) throws IOException, InterruptedException { return Collections.emptyList(); } @Override public boolean delete( String basePath, ResourcePattern resourcePattern, boolean recursive, Counter counter) throws IOException, InterruptedException { return false; } @Override public void setupAttemptOutput(OutputAttemptContext context) throws IOException, InterruptedException { Thread.sleep(200); } @Override public void commitAttemptOutput(OutputAttemptContext context) throws IOException, InterruptedException { Thread.sleep(200); } @Override public void cleanupAttemptOutput(OutputAttemptContext context) throws IOException, InterruptedException { Thread.sleep(200); } @Override public void setupTransactionOutput(OutputTransactionContext context) throws IOException, InterruptedException { Thread.sleep(200); } @Override public void commitTransactionOutput(OutputTransactionContext context) throws IOException, InterruptedException { Thread.sleep(200); } @Override public void cleanupTransactionOutput(OutputTransactionContext context) throws IOException, InterruptedException { Thread.sleep(200); } } }
package org.openfact.models.jpa.ubl.common; import java.util.List; import java.util.stream.Collectors; import javax.persistence.EntityManager; import org.jboss.logging.Logger; import org.openfact.models.OpenfactSession; import org.openfact.models.jpa.JpaModel; import org.openfact.models.jpa.entities.ubl.common.HazardousGoodsTransitEntity; import org.openfact.models.jpa.entities.ubl.common.HazardousItemEntity; import org.openfact.models.jpa.entities.ubl.common.SecondaryHazardEntity; import org.openfact.models.jpa.entities.ubl.common.TemperatureEntity; import org.openfact.models.ubl.common.HazardousGoodsTransitModel; import org.openfact.models.ubl.common.HazardousItemModel; import org.openfact.models.ubl.common.MeasureModel; import org.openfact.models.ubl.common.PartyModel; import org.openfact.models.ubl.common.QuantityModel; import org.openfact.models.ubl.common.SecondaryHazardModel; import org.openfact.models.ubl.common.TemperatureModel; public class HazardousItemAdapter implements HazardousItemModel, JpaModel<HazardousItemEntity> { protected static final Logger logger = Logger.getLogger(HazardousItemAdapter.class); protected HazardousItemEntity hazardousItem; protected EntityManager em; protected OpenfactSession session; public HazardousItemAdapter(OpenfactSession session, EntityManager em, HazardousItemEntity hazardousItem) { this.session = session; this.em = em; this.hazardousItem = hazardousItem; } @Override public String getID() { return this.hazardousItem.getID(); } @Override public void setID(String value) { this.hazardousItem.setID(value); } @Override public String getPlacardNotation() { return this.hazardousItem.getPlacardNotation(); } @Override public void setPlacardNotation(String value) { this.hazardousItem.setPlacardNotation(value); } @Override public String getPlacardEndorsement() { return this.hazardousItem.getPlacardEndorsement(); } @Override public void setPlacardEndorsement(String value) { this.hazardousItem.setPlacardEndorsement(value); } @Override public String getAdditionalInformation() { return this.hazardousItem.getAdditionalInformation(); } @Override public void setAdditionalInformation(String value) { this.hazardousItem.setAdditionalInformation(value); } @Override public String getUNDGCode() { return this.hazardousItem.getUndgCode(); } @Override public void setUNDGCode(String value) { this.hazardousItem.setUndgCode(value); } @Override public String getEmergencyProceduresCode() { return this.hazardousItem.getEmergencyProceduresCode(); } @Override public void setEmergencyProceduresCode(String value) { this.hazardousItem.setEmergencyProceduresCode(value); } @Override public String getMedicalFirstAidGuideCode() { return this.hazardousItem.getMedicalFirstAidGuideCode(); } @Override public void setMedicalFirstAidGuideCode(String value) { this.hazardousItem.setMedicalFirstAidGuideCode(value); } @Override public String getTechnicalName() { return this.hazardousItem.getTechnicalName(); } @Override public void setTechnicalName(String value) { this.hazardousItem.setTechnicalName(value); } @Override public String getCategoryName() { return this.hazardousItem.getCategoryName(); } @Override public void setCategoryName(String value) { this.hazardousItem.setCategoryName(value); } @Override public String getHazardousCategoryCode() { return this.hazardousItem.getHazardousCategoryCode(); } @Override public void setHazardousCategoryCode(String value) { this.hazardousItem.setHazardousCategoryCode(value); } @Override public String getUpperOrangeHazardPlacardID() { return this.hazardousItem.getUpperOrangeHazardPlacardID(); } @Override public void setUpperOrangeHazardPlacardID(String value) { this.hazardousItem.setUpperOrangeHazardPlacardID(value); } @Override public String getLowerOrangeHazardPlacardID() { return this.hazardousItem.getLowerOrangeHazardPlacardID(); } @Override public void setLowerOrangeHazardPlacardID(String value) { this.hazardousItem.setLowerOrangeHazardPlacardID(value); } @Override public String getMarkingID() { return this.hazardousItem.getMarkingID(); } @Override public void setMarkingID(String value) { this.hazardousItem.setMarkingID(value); } @Override public String getHazardClassID() { return this.hazardousItem.getHazardClassID(); } @Override public void setHazardClassID(String value) { this.hazardousItem.setHazardClassID(value); } @Override public MeasureModel getNetWeightMeasure() { return new MeasureAdapter(session, em, hazardousItem.getNetWeightMeasure()); } @Override public void setNetWeightMeasure(MeasureModel value) { this.hazardousItem.setNetWeightMeasure(MeasureAdapter.toEntity(value, em)); } @Override public MeasureModel getNetVolumeMeasure() { return new MeasureAdapter(session, em, hazardousItem.getNetVolumeMeasure()); } @Override public void setNetVolumeMeasure(MeasureModel value) { this.hazardousItem.setNetVolumeMeasure(MeasureAdapter.toEntity(value, em)); } @Override public QuantityModel getQuantity() { return new QuantityAdapter(session, em, hazardousItem.getQuantity()); } @Override public void setQuantity(QuantityModel value) { this.hazardousItem.setQuantity(QuantityAdapter.toEntity(value, em)); } @Override public PartyModel getContactParty() { return new PartyAdapter(session, em, hazardousItem.getContactParty()); } @Override public void setContactParty(PartyModel value) { this.hazardousItem.setContactParty(PartyAdapter.toEntity(value, em)); } @Override public List<SecondaryHazardModel> getSecondaryHazard() { return hazardousItem.getSecondaryHazard().stream() .map(f -> new SecondaryHazardAdapter(session, em, f)).collect(Collectors.toList()); } @Override public void setSecondaryHazard(List<SecondaryHazardModel> secondaryHazard) { List<SecondaryHazardEntity> entities = secondaryHazard.stream() .map(f -> SecondaryHazardAdapter.toEntity(f, em)).collect(Collectors.toList()); this.hazardousItem.setSecondaryHazard(entities); } @Override public List<HazardousGoodsTransitModel> getHazardousGoodsTransit() { return hazardousItem.getHazardousGoodsTransit().stream() .map(f -> new HazardousGoodsTransitAdapter(session, em, f)).collect(Collectors.toList()); } @Override public void setHazardousGoodsTransit(List<HazardousGoodsTransitModel> hazardousGoodsTransit) { List<HazardousGoodsTransitEntity> entities = hazardousGoodsTransit.stream() .map(f -> HazardousGoodsTransitAdapter.toEntity(f, em)).collect(Collectors.toList()); this.hazardousItem.setHazardousGoodsTransit(entities); } @Override public TemperatureModel getEmergencyTemperature() { return new TemperatureAdapter(session, em, hazardousItem.getEmergencyTemperature()); } @Override public void setEmergencyTemperature(TemperatureModel value) { this.hazardousItem.setEmergencyTemperature(TemperatureAdapter.toEntity(value, em)); } @Override public TemperatureModel getFlashpointTemperature() { return new TemperatureAdapter(session, em, hazardousItem.getFlashpointTemperature()); } @Override public void setFlashpointTemperature(TemperatureModel value) { this.hazardousItem.setFlashpointTemperature(TemperatureAdapter.toEntity(value, em)); } @Override public List<TemperatureModel> getAdditionalTemperature() { return hazardousItem.getAdditionalTemperature().stream() .map(f -> new TemperatureAdapter(session, em, f)).collect(Collectors.toList()); } @Override public void setAdditionalTemperature(List<TemperatureModel> additionalTemperature) { List<TemperatureEntity> entities = additionalTemperature.stream() .map(f -> TemperatureAdapter.toEntity(f, em)).collect(Collectors.toList()); this.hazardousItem.setAdditionalTemperature(entities); } @Override public String getId() { return this.hazardousItem.getId(); } @Override public HazardousItemEntity getEntity() { return hazardousItem; } public static HazardousItemEntity toEntity(HazardousItemModel model, EntityManager em) { if (model instanceof HazardousItemAdapter) { return ((HazardousItemAdapter) model).getEntity(); } return em.getReference(HazardousItemEntity.class, model.getId()); } }
package com.example.android.sunshine.app.data; import android.annotation.TargetApi; import android.content.ContentProvider; import android.content.ContentValues; import android.content.UriMatcher; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; import android.support.annotation.Nullable; import com.example.android.sunshine.app.data.WeatherContract.LocationEntry; import com.example.android.sunshine.app.data.WeatherContract.WeatherEntry; /** * Created by Francis Rodrigues on 2/26/17. * See more: {@link ContentProvider} */ public class WeatherProvider extends ContentProvider { // The URI Matcher used by this content provider. private static final UriMatcher sUriMatcher = buildUriMatcher(); private WeatherDbHelper mOpenHelper; static final int WEATHER = 100; static final int WEATHER_WITH_LOCATION = 101; static final int WEATHER_WITH_LOCATION_AND_DATE = 102; static final int LOCATION = 300; private static final SQLiteQueryBuilder sWeatherByLocationSettingQueryBuilder; static { sWeatherByLocationSettingQueryBuilder = new SQLiteQueryBuilder(); // This is an inner join which looks like // weather INNER JOIN location ON weather.location_id = location>_id sWeatherByLocationSettingQueryBuilder.setTables( WeatherEntry.TABLE_NAME + " INNER JOIN " + LocationEntry.TABLE_NAME + " ON " + WeatherEntry.TABLE_NAME + "." + WeatherEntry.COLUMN_LOC_KEY + " = " + LocationEntry.TABLE_NAME + "." + LocationEntry._ID); } // location.location_setting = ? private static final String sLocationSettingSelection = LocationEntry.TABLE_NAME + "." + LocationEntry.COLUMN_LOCATION_SETTING + " = ?"; // location.location_setting = ? AND date >= ? private static final String sLocationSettingWithStartDateSelection = LocationEntry.TABLE_NAME + "." + LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherEntry.COLUMN_DATE + " >= ?"; // location.location_setting = ? AND date = ? private static final String sLocationSettingAndDaySelection = LocationEntry.TABLE_NAME + "." + LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherEntry.COLUMN_DATE + " = ?"; private Cursor getWeatherByLocationSetting(Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherEntry.getLocationSettingFromUri(uri); long startDate = WeatherEntry.getStartDateFromUri(uri); String[] selectionArgs; String selection; if (startDate == 0) { selection = sLocationSettingSelection; selectionArgs = new String[]{locationSetting}; } else { selectionArgs = new String[]{locationSetting, Long.toString(startDate)}; selection = sLocationSettingWithStartDateSelection; } return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, selection, selectionArgs, null, null, sortOrder); } private Cursor getWeatherByLocationSettingAndDate(Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherEntry.getLocationSettingFromUri(uri); long date = WeatherEntry.getDateFromUri(uri); return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, sLocationSettingAndDaySelection, new String[]{locationSetting, Long.toString(date)}, null, null, sortOrder); } /* Students: Here is where you need to create the UriMatcher. This UriMatcher will match each URI to the WEATHER, WEATHER_WITH_LOCATION, WEATHER_WITH_LOCATION_AND_DATE, and LOCATION integer constants defined above. You can test this by uncommenting the testUriMatcher test within TestUriMatcher. */ static UriMatcher buildUriMatcher() { // I know what you're thinking. Why create a UriMatcher when you can use regular // expressions instead? Because you're not crazy, that's why. // All paths added to the UriMatcher have a corresponding code to return when a match is // found. The code passed into the constructor represents the code to return for the root // URI. It's common to use NO_MATCH as the code for this case. final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH); final String authority = WeatherContract.CONTENT_AUTHORITY; // For each type of URI you want to add, create a corresponding code. matcher.addURI(authority, WeatherContract.PATH_WEATHER, WEATHER); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*", WEATHER_WITH_LOCATION); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*/#", WEATHER_WITH_LOCATION_AND_DATE); matcher.addURI(authority, WeatherContract.PATH_LOCATION, LOCATION); return matcher; } /* Students: We've coded this for you. We just create a new WeatherDbHelper for later use here. */ @Override public boolean onCreate() { mOpenHelper = new WeatherDbHelper(getContext()); return true; } /* Students: Here's where you'll code the getType function that uses the UriMatcher. You can test this by uncommenting testGetType in TestProvider. */ @Nullable @Override public String getType(Uri uri) { // Use the Uri Matcher to determine what kind of URI this is. final int match = sUriMatcher.match(uri); switch (match) { // Student: Uncomment and fill out these two cases case WEATHER_WITH_LOCATION_AND_DATE: return WeatherEntry.CONTENT_ITEM_TYPE; case WEATHER_WITH_LOCATION: return WeatherEntry.CONTENT_TYPE; case WEATHER: return WeatherEntry.CONTENT_TYPE; case LOCATION: return LocationEntry.CONTENT_TYPE; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } } @Nullable @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { // Here's the switch statement that, giver a URI, will determine what kind of request it is, // and query the database accordingly. Cursor retCursor; switch (sUriMatcher.match(uri)) { // "weather/*/*" case WEATHER_WITH_LOCATION_AND_DATE: retCursor = getWeatherByLocationSettingAndDate(uri, projection, sortOrder); break; // "weather/*" case WEATHER_WITH_LOCATION: retCursor = getWeatherByLocationSetting(uri, projection, sortOrder); break; // "weather" case WEATHER: retCursor = mOpenHelper.getReadableDatabase().query( WeatherEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; // "location" case LOCATION: retCursor = mOpenHelper.getReadableDatabase().query( LocationEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } retCursor.setNotificationUri(getContext().getContentResolver(), uri); return retCursor; } /* Student: Add the ability to insert Locations to the implementation of this function. */ @Nullable @Override public Uri insert(Uri uri, ContentValues values) { final SQLiteDatabase database = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); Uri returnUri; switch (match) { case WEATHER: { normalizeDate(values); long _id = database.insert(WeatherEntry.TABLE_NAME, null, values); if (_id > 0) returnUri = WeatherEntry.buildWeatherUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } case LOCATION: { long _id = database.insert(LocationEntry.TABLE_NAME, null, values); if (_id > 0) returnUri = LocationEntry.buildLocationUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } default: throw new UnsupportedOperationException("Unknown uri " + uri); } getContext().getContentResolver().notifyChange(uri, null); return returnUri; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { final SQLiteDatabase database = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowsDeleted; // This makes delete all rows return the number of rows deleted. if (null == selection) selection = "1"; switch (match) { case WEATHER: rowsDeleted = database.delete(WeatherEntry.TABLE_NAME, selection, selectionArgs); break; case LOCATION: rowsDeleted = database.delete(LocationEntry.TABLE_NAME, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } // Because a null deletes all rows. if (rowsDeleted != 0) { getContext().getContentResolver().notifyChange(uri, null); } return rowsDeleted; } private void normalizeDate(ContentValues values) { // Normalize the data value. if (values.containsKey(WeatherEntry.COLUMN_DATE)) { long dateValue = values.getAsLong(WeatherEntry.COLUMN_DATE); values.put(WeatherEntry.COLUMN_DATE, WeatherContract.normalizeDate(dateValue)); } } /** * Return the number of rows impacted. * * @param uri * @param values * @param selection * @param selectionArgs * @return */ @Override public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) { final SQLiteDatabase database = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowsUpdated; switch (match) { case WEATHER: normalizeDate(values); rowsUpdated = database.update(WeatherEntry.TABLE_NAME, values, selection, selectionArgs); break; case LOCATION: rowsUpdated = database.update(LocationEntry.TABLE_NAME, values, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } if (rowsUpdated != 0) { getContext().getContentResolver().notifyChange(uri, null); } return rowsUpdated; } @Override public int bulkInsert(Uri uri, ContentValues[] values) { final SQLiteDatabase database = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); switch (match) { case WEATHER: database.beginTransaction(); int returnCount = 0; try { for (ContentValues value : values) { normalizeDate(value); long _id = database.insert(WeatherEntry.TABLE_NAME, null, value); if (_id != -1) { returnCount++; } } database.setTransactionSuccessful(); } finally { database.endTransaction(); } getContext().getContentResolver().notifyChange(uri, null); return returnCount; default: return super.bulkInsert(uri, values); } } /* You do not need to call this method. This is a method specifically to assist the testing framework in running smoothly. You can read more at: http://developer.android.com/reference/android/content/ContentProvider.html#shutdown() */ @Override @TargetApi(11) public void shutdown() { mOpenHelper.close(); super.shutdown(); } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.truth.Truth.assertThat; import static java.util.Arrays.asList; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.testing.ListTestSuiteBuilder; import com.google.common.collect.testing.MinimalCollection; import com.google.common.collect.testing.SetTestSuiteBuilder; import com.google.common.collect.testing.TestStringListGenerator; import com.google.common.collect.testing.TestStringSetGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.google.MultisetTestSuiteBuilder; import com.google.common.collect.testing.google.TestStringMultisetGenerator; import com.google.common.collect.testing.google.UnmodifiableCollectionTests; import com.google.common.testing.CollectorTester; import com.google.common.testing.EqualsTester; import com.google.common.testing.NullPointerTester; import com.google.common.testing.SerializableTester; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.function.BiPredicate; import java.util.stream.Collector; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * Tests for {@link ImmutableMultiset}. * * @author Jared Levy */ @GwtCompatible(emulated = true) public class ImmutableMultisetTest extends TestCase { @GwtIncompatible // suite // TODO(cpovirk): add to collect/gwt/suites public static Test suite() { TestSuite suite = new TestSuite(); suite.addTestSuite(ImmutableMultisetTest.class); suite.addTest(MultisetTestSuiteBuilder.using( new TestStringMultisetGenerator() { @Override protected Multiset<String> create(String[] elements) { return ImmutableMultiset.copyOf(elements); } }) .named("ImmutableMultiset") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(SetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { return ImmutableMultiset.copyOf(elements).elementSet(); } }) .named("ImmutableMultiset, element set") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() { @Override protected List<String> create(String[] elements) { return ImmutableMultiset.copyOf(elements).asList(); } @Override public List<String> order(List<String> insertionOrder) { List<String> order = new ArrayList<String>(); for (String s : insertionOrder) { int index = order.indexOf(s); if (index == -1) { order.add(s); } else { order.add(index, s); } } return order; } }) .named("ImmutableMultiset.asList") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() { @Override protected List<String> create(String[] elements) { Set<String> set = new HashSet<String>(); ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); for (String s : elements) { checkArgument(set.add(s)); builder.addCopies(s, 2); } ImmutableSet<String> elementSet = (ImmutableSet<String>) builder.build().elementSet(); return elementSet.asList(); } }) .named("ImmutableMultiset.elementSet.asList") .withFeatures(CollectionSize.ANY, CollectionFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); return suite; } public void testCreation_noArgs() { Multiset<String> multiset = ImmutableMultiset.of(); assertTrue(multiset.isEmpty()); } public void testCreation_oneElement() { Multiset<String> multiset = ImmutableMultiset.of("a"); assertEquals(HashMultiset.create(asList("a")), multiset); } public void testCreation_twoElements() { Multiset<String> multiset = ImmutableMultiset.of("a", "b"); assertEquals(HashMultiset.create(asList("a", "b")), multiset); } public void testCreation_threeElements() { Multiset<String> multiset = ImmutableMultiset.of("a", "b", "c"); assertEquals(HashMultiset.create(asList("a", "b", "c")), multiset); } public void testCreation_fourElements() { Multiset<String> multiset = ImmutableMultiset.of("a", "b", "c", "d"); assertEquals(HashMultiset.create(asList("a", "b", "c", "d")), multiset); } public void testCreation_fiveElements() { Multiset<String> multiset = ImmutableMultiset.of("a", "b", "c", "d", "e"); assertEquals(HashMultiset.create(asList("a", "b", "c", "d", "e")), multiset); } public void testCreation_sixElements() { Multiset<String> multiset = ImmutableMultiset.of( "a", "b", "c", "d", "e", "f"); assertEquals(HashMultiset.create(asList("a", "b", "c", "d", "e", "f")), multiset); } public void testCreation_sevenElements() { Multiset<String> multiset = ImmutableMultiset.of( "a", "b", "c", "d", "e", "f", "g"); assertEquals( HashMultiset.create(asList("a", "b", "c", "d", "e", "f", "g")), multiset); } public void testCreation_emptyArray() { String[] array = new String[0]; Multiset<String> multiset = ImmutableMultiset.copyOf(array); assertTrue(multiset.isEmpty()); } public void testCreation_arrayOfOneElement() { String[] array = new String[] { "a" }; Multiset<String> multiset = ImmutableMultiset.copyOf(array); assertEquals(HashMultiset.create(asList("a")), multiset); } public void testCreation_arrayOfArray() { String[] array = new String[] { "a" }; Multiset<String[]> multiset = ImmutableMultiset.<String[]>of(array); Multiset<String[]> expected = HashMultiset.create(); expected.add(array); assertEquals(expected, multiset); } public void testCreation_arrayContainingOnlyNull() { String[] array = new String[] { null }; try { ImmutableMultiset.copyOf(array); fail(); } catch (NullPointerException expected) {} } public void testCopyOf_collection_empty() { // "<String>" is required to work around a javac 1.5 bug. Collection<String> c = MinimalCollection.<String>of(); Multiset<String> multiset = ImmutableMultiset.copyOf(c); assertTrue(multiset.isEmpty()); } public void testCopyOf_collection_oneElement() { Collection<String> c = MinimalCollection.of("a"); Multiset<String> multiset = ImmutableMultiset.copyOf(c); assertEquals(HashMultiset.create(asList("a")), multiset); } public void testCopyOf_collection_general() { Collection<String> c = MinimalCollection.of("a", "b", "a"); Multiset<String> multiset = ImmutableMultiset.copyOf(c); assertEquals(HashMultiset.create(asList("a", "b", "a")), multiset); } public void testCopyOf_collectionContainingNull() { Collection<String> c = MinimalCollection.of("a", null, "b"); try { ImmutableMultiset.copyOf(c); fail(); } catch (NullPointerException expected) {} } public void testCopyOf_multiset_empty() { Multiset<String> c = HashMultiset.create(); Multiset<String> multiset = ImmutableMultiset.copyOf(c); assertTrue(multiset.isEmpty()); } public void testCopyOf_multiset_oneElement() { Multiset<String> c = HashMultiset.create(asList("a")); Multiset<String> multiset = ImmutableMultiset.copyOf(c); assertEquals(HashMultiset.create(asList("a")), multiset); } public void testCopyOf_multiset_general() { Multiset<String> c = HashMultiset.create(asList("a", "b", "a")); Multiset<String> multiset = ImmutableMultiset.copyOf(c); assertEquals(HashMultiset.create(asList("a", "b", "a")), multiset); } public void testCopyOf_multisetContainingNull() { Multiset<String> c = HashMultiset.create(asList("a", null, "b")); try { ImmutableMultiset.copyOf(c); fail(); } catch (NullPointerException expected) {} } public void testCopyOf_iterator_empty() { Iterator<String> iterator = Iterators.emptyIterator(); Multiset<String> multiset = ImmutableMultiset.copyOf(iterator); assertTrue(multiset.isEmpty()); } public void testCopyOf_iterator_oneElement() { Iterator<String> iterator = Iterators.singletonIterator("a"); Multiset<String> multiset = ImmutableMultiset.copyOf(iterator); assertEquals(HashMultiset.create(asList("a")), multiset); } public void testCopyOf_iterator_general() { Iterator<String> iterator = asList("a", "b", "a").iterator(); Multiset<String> multiset = ImmutableMultiset.copyOf(iterator); assertEquals(HashMultiset.create(asList("a", "b", "a")), multiset); } public void testCopyOf_iteratorContainingNull() { Iterator<String> iterator = asList("a", null, "b").iterator(); try { ImmutableMultiset.copyOf(iterator); fail(); } catch (NullPointerException expected) {} } public void testToImmutableMultiset() { BiPredicate<ImmutableMultiset<String>, ImmutableMultiset<String>> equivalence = (ms1, ms2) -> ms1.equals(ms2) && ms1.entrySet().asList().equals(ms2.entrySet().asList()); CollectorTester.of(ImmutableMultiset.<String>toImmutableMultiset(), equivalence) .expectCollects(ImmutableMultiset.of()) .expectCollects( ImmutableMultiset.of("a", "a", "b", "c", "c", "c"), "a", "a", "b", "c", "c", "c"); } public void testToImmutableMultisetCountFunction() { BiPredicate<ImmutableMultiset<String>, ImmutableMultiset<String>> equivalence = (ms1, ms2) -> ms1.equals(ms2) && ms1.entrySet().asList().equals(ms2.entrySet().asList()); CollectorTester.of( ImmutableMultiset.<Multiset.Entry<String>, String>toImmutableMultiset( Multiset.Entry::getElement, Multiset.Entry::getCount), equivalence) .expectCollects(ImmutableMultiset.of()) .expectCollects( ImmutableMultiset.of("a", "a", "b", "c", "c", "c"), Multisets.immutableEntry("a", 1), Multisets.immutableEntry("b", 1), Multisets.immutableEntry("a", 1), Multisets.immutableEntry("c", 3)); } public void testToImmutableMultiset_duplicates() { class TypeWithDuplicates { final int a; final int b; TypeWithDuplicates(int a, int b) { this.a = a; this.b = b; } @Override public int hashCode() { return a; } @Override public boolean equals(Object obj) { return obj instanceof TypeWithDuplicates && ((TypeWithDuplicates) obj).a == a; } public boolean fullEquals(TypeWithDuplicates other) { return other != null && a == other.a && b == other.b; } } Collector<TypeWithDuplicates, ?, ImmutableMultiset<TypeWithDuplicates>> collector = ImmutableMultiset.toImmutableMultiset(); BiPredicate<ImmutableMultiset<TypeWithDuplicates>, ImmutableMultiset<TypeWithDuplicates>> equivalence = (ms1, ms2) -> { if (!ms1.equals(ms2)) { return false; } List<TypeWithDuplicates> elements1 = ImmutableList.copyOf(ms1.elementSet()); List<TypeWithDuplicates> elements2 = ImmutableList.copyOf(ms2.elementSet()); for (int i = 0; i < ms1.elementSet().size(); i++) { if (!elements1.get(i).fullEquals(elements2.get(i))) { return false; } } return true; }; TypeWithDuplicates a = new TypeWithDuplicates(1, 1); TypeWithDuplicates b1 = new TypeWithDuplicates(2, 1); TypeWithDuplicates b2 = new TypeWithDuplicates(2, 2); TypeWithDuplicates c = new TypeWithDuplicates(3, 1); CollectorTester.of(collector, equivalence) .expectCollects( ImmutableMultiset.<TypeWithDuplicates>builder().add(a).addCopies(b1, 2).add(c).build(), a, b1, c, b2); collector = ImmutableMultiset.toImmutableMultiset(e -> e, e -> 1); CollectorTester.of(collector, equivalence) .expectCollects( ImmutableMultiset.<TypeWithDuplicates>builder().add(a).addCopies(b1, 2).add(c).build(), a, b1, c, b2); } private static class CountingIterable implements Iterable<String> { int count = 0; @Override public Iterator<String> iterator() { count++; return asList("a", "b", "a").iterator(); } } public void testCopyOf_plainIterable() { CountingIterable iterable = new CountingIterable(); Multiset<String> multiset = ImmutableMultiset.copyOf(iterable); assertEquals(HashMultiset.create(asList("a", "b", "a")), multiset); assertEquals(1, iterable.count); } public void testCopyOf_shortcut_empty() { Collection<String> c = ImmutableMultiset.of(); assertSame(c, ImmutableMultiset.copyOf(c)); } public void testCopyOf_shortcut_singleton() { Collection<String> c = ImmutableMultiset.of("a"); assertSame(c, ImmutableMultiset.copyOf(c)); } public void testCopyOf_shortcut_immutableMultiset() { Collection<String> c = ImmutableMultiset.of("a", "b", "c"); assertSame(c, ImmutableMultiset.copyOf(c)); } public void testBuilderAdd() { ImmutableMultiset<String> multiset = new ImmutableMultiset.Builder<String>() .add("a") .add("b") .add("a") .add("c") .build(); assertEquals(HashMultiset.create(asList("a", "b", "a", "c")), multiset); } public void testBuilderAddAll() { List<String> a = asList("a", "b"); List<String> b = asList("c", "d"); ImmutableMultiset<String> multiset = new ImmutableMultiset.Builder<String>() .addAll(a) .addAll(b) .build(); assertEquals(HashMultiset.create(asList("a", "b", "c", "d")), multiset); } public void testBuilderAddAllMultiset() { Multiset<String> a = HashMultiset.create(asList("a", "b", "b")); Multiset<String> b = HashMultiset.create(asList("c", "b")); ImmutableMultiset<String> multiset = new ImmutableMultiset.Builder<String>() .addAll(a) .addAll(b) .build(); assertEquals( HashMultiset.create(asList("a", "b", "b", "b", "c")), multiset); } public void testBuilderAddAllIterator() { Iterator<String> iterator = asList("a", "b", "a", "c").iterator(); ImmutableMultiset<String> multiset = new ImmutableMultiset.Builder<String>() .addAll(iterator) .build(); assertEquals(HashMultiset.create(asList("a", "b", "a", "c")), multiset); } public void testBuilderAddCopies() { ImmutableMultiset<String> multiset = new ImmutableMultiset.Builder<String>() .addCopies("a", 2) .addCopies("b", 3) .addCopies("c", 0) .build(); assertEquals( HashMultiset.create(asList("a", "a", "b", "b", "b")), multiset); } public void testBuilderSetCount() { ImmutableMultiset<String> multiset = new ImmutableMultiset.Builder<String>() .add("a") .setCount("a", 2) .setCount("b", 3) .build(); assertEquals( HashMultiset.create(asList("a", "a", "b", "b", "b")), multiset); } public void testBuilderAddHandlesNullsCorrectly() { ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); try { builder.add((String) null); fail("expected NullPointerException"); } catch (NullPointerException expected) {} } public void testBuilderAddAllHandlesNullsCorrectly() { ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); try { builder.addAll((Collection<String>) null); fail("expected NullPointerException"); } catch (NullPointerException expected) {} builder = ImmutableMultiset.builder(); List<String> listWithNulls = asList("a", null, "b"); try { builder.addAll(listWithNulls); fail("expected NullPointerException"); } catch (NullPointerException expected) {} builder = ImmutableMultiset.builder(); Multiset<String> multisetWithNull = LinkedHashMultiset.create(asList("a", null, "b")); try { builder.addAll(multisetWithNull); fail("expected NullPointerException"); } catch (NullPointerException expected) {} } public void testBuilderAddCopiesHandlesNullsCorrectly() { ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); try { builder.addCopies(null, 2); fail("expected NullPointerException"); } catch (NullPointerException expected) {} } public void testBuilderAddCopiesIllegal() { ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); try { builder.addCopies("a", -2); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException expected) {} } public void testBuilderSetCountHandlesNullsCorrectly() { ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); try { builder.setCount(null, 2); fail("expected NullPointerException"); } catch (NullPointerException expected) {} } public void testBuilderSetCountIllegal() { ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); try { builder.setCount("a", -2); fail("expected IllegalArgumentException"); } catch (IllegalArgumentException expected) {} } @GwtIncompatible // NullPointerTester public void testNullPointers() { NullPointerTester tester = new NullPointerTester(); tester.testAllPublicStaticMethods(ImmutableMultiset.class); } @GwtIncompatible // SerializableTester public void testSerialization_empty() { Collection<String> c = ImmutableMultiset.of(); assertSame(c, SerializableTester.reserialize(c)); } @GwtIncompatible // SerializableTester public void testSerialization_multiple() { Collection<String> c = ImmutableMultiset.of("a", "b", "a"); Collection<String> copy = SerializableTester.reserializeAndAssert(c); assertThat(copy).containsExactly("a", "a", "b").inOrder(); } @GwtIncompatible // SerializableTester public void testSerialization_elementSet() { Multiset<String> c = ImmutableMultiset.of("a", "b", "a"); Collection<String> copy = LenientSerializableTester.reserializeAndAssertLenient(c.elementSet()); assertThat(copy).containsExactly("a", "b").inOrder(); } @GwtIncompatible // SerializableTester public void testSerialization_entrySet() { Multiset<String> c = ImmutableMultiset.of("a", "b", "c"); SerializableTester.reserializeAndAssert(c.entrySet()); } public void testEquals_immutableMultiset() { Collection<String> c = ImmutableMultiset.of("a", "b", "a"); assertEquals(c, ImmutableMultiset.of("a", "b", "a")); assertEquals(c, ImmutableMultiset.of("a", "a", "b")); assertThat(c).isNotEqualTo(ImmutableMultiset.of("a", "b")); assertThat(c).isNotEqualTo(ImmutableMultiset.of("a", "b", "c", "d")); } public void testIterationOrder() { Collection<String> c = ImmutableMultiset.of("a", "b", "a"); assertThat(c).containsExactly("a", "a", "b").inOrder(); assertThat(ImmutableMultiset.of("c", "b", "a", "c").elementSet()) .containsExactly("c", "b", "a") .inOrder(); } public void testMultisetWrites() { Multiset<String> multiset = ImmutableMultiset.of("a", "b", "a"); UnmodifiableCollectionTests.assertMultisetIsUnmodifiable(multiset, "test"); } public void testAsList() { ImmutableMultiset<String> multiset = ImmutableMultiset.of("a", "a", "b", "b", "b"); ImmutableList<String> list = multiset.asList(); assertEquals(ImmutableList.of("a", "a", "b", "b", "b"), list); assertEquals(2, list.indexOf("b")); assertEquals(4, list.lastIndexOf("b")); } @GwtIncompatible // SerializableTester public void testSerialization_asList() { ImmutableMultiset<String> multiset = ImmutableMultiset.of("a", "a", "b", "b", "b"); SerializableTester.reserializeAndAssert(multiset.asList()); } public void testEquals() { new EqualsTester() .addEqualityGroup(ImmutableMultiset.of(), ImmutableMultiset.of()) .addEqualityGroup(ImmutableMultiset.of(1), ImmutableMultiset.of(1)) .addEqualityGroup(ImmutableMultiset.of(1, 1), ImmutableMultiset.of(1, 1)) .addEqualityGroup(ImmutableMultiset.of(1, 2, 1), ImmutableMultiset.of(2, 1, 1)) .testEquals(); } public void testIterationOrderThroughBuilderRemovals() { ImmutableMultiset.Builder<String> builder = ImmutableMultiset.builder(); builder.addCopies("a", 2); builder.add("b"); builder.add("c"); builder.setCount("b", 0); ImmutableMultiset<String> multiset = builder.build(); assertThat(multiset.elementSet()).containsExactly("a", "c").inOrder(); builder.add("b"); assertThat(builder.build().elementSet()).containsExactly("a", "c", "b").inOrder(); assertThat(multiset.elementSet()).containsExactly("a", "c").inOrder(); } }
/* * This is the source code of Telegram for Android v. 1.3.2. * It is licensed under GNU GPL v. 2 or later. * You should have received a copy of the license in this archive (see LICENSE). * * Copyright Nikolai Kudashov, 2013. */ package com.jmv.frre.moduloestudiante.messenger; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; public class SerializedData extends AbsSerializedData { protected boolean isOut = true; private ByteArrayOutputStream outbuf; private DataOutputStream out; private ByteArrayInputStream inbuf; private DataInputStream in; private boolean justCalc = false; private int len; public SerializedData() { outbuf = new ByteArrayOutputStream(); out = new DataOutputStream(outbuf); } public SerializedData(boolean calculate) { if (!calculate) { outbuf = new ByteArrayOutputStream(); out = new DataOutputStream(outbuf); } justCalc = calculate; len = 0; } public SerializedData(int size) { outbuf = new ByteArrayOutputStream(size); out = new DataOutputStream(outbuf); } public SerializedData(byte[] data) { isOut = false; inbuf = new ByteArrayInputStream(data); in = new DataInputStream(inbuf); } public SerializedData(File file) throws Exception { FileInputStream is = new FileInputStream(file); byte[] data = new byte[(int)file.length()]; new DataInputStream(is).readFully(data); is.close(); isOut = false; inbuf = new ByteArrayInputStream(data); in = new DataInputStream(inbuf); } public void writeInt32(int x) { if (!justCalc) { writeInt32(x, out); } else { len += 4; } } private void writeInt32(int x, DataOutputStream out) { try { for(int i = 0; i < 4; i++) { out.write(x >> (i * 8)); } } catch(Exception e) { FileLog.e("tmessages", "write int32 error"); } } public void writeInt64(long i) { if (!justCalc) { writeInt64(i, out); } else { len += 8; } } private void writeInt64(long x, DataOutputStream out) { try { for(int i = 0; i < 8; i++) { out.write((int)(x >> (i * 8))); } } catch(Exception e) { FileLog.e("tmessages", "write int64 error"); } } public boolean readBool() { int consructor = readInt32(); if (consructor == 0x997275b5) { return true; } else if (consructor == 0xbc799737) { return false; } FileLog.e("tmessages", "Not bool value!"); return false; } public void writeBool(boolean value) { if (!justCalc) { if (value) { writeInt32(0x997275b5); } else { writeInt32(0xbc799737); } } else { len += 4; } } public void writeByteBuffer(ByteBufferDesc buffer) { if (!justCalc) { //TODO ? } else { int l = buffer.limit(); if (l <= 253) { len += 1; } else { len += 4; } len += l; int i = l <= 253 ? 1 : 4; while((l + i) % 4 != 0) { len += 1; i++; } } } public int readInt32() { return readInt32(null); } public int readInt32(boolean[] error) { try { int i = 0; for(int j = 0; j < 4; j++) { i |= (in.read() << (j * 8)); } if (error != null) { error[0] = false; } return i; } catch(Exception x) { if (error != null) { error[0] = true; } FileLog.e("tmessages", "read int32 error"); } return 0; } public long readInt64() { return readInt64(null); } public long readInt64(boolean[] error) { try { long i = 0; for(int j = 0; j < 8; j++) { i |= ((long)in.read() << (j * 8)); } if (error != null) { error[0] = false; } return i; } catch (Exception x) { if (error != null) { error[0] = true; } FileLog.e("tmessages", "read int64 error"); } return 0; } public void writeRaw(byte[] b) { try { if (!justCalc) { out.write(b); } else { len += b.length; } } catch (Exception x) { FileLog.e("tmessages", "write raw error"); } } public void writeRaw(byte[] b, int offset, int count) { try { if (!justCalc) { out.write(b, offset, count); } else { len += count; } } catch (Exception x) { FileLog.e("tmessages", "write raw error"); } } public void writeByte(int i) { try { if (!justCalc) { out.writeByte((byte)i); } else { len += 1; } } catch (Exception e) { FileLog.e("tmessages", "write byte error"); } } public void writeByte(byte b) { try { if (!justCalc) { out.writeByte(b); } else { len += 1; } } catch (Exception e) { FileLog.e("tmessages", "write byte error"); } } public void readRaw(byte[] b) { try { in.read(b); } catch (Exception x) { FileLog.e("tmessages", "read raw error"); } } public byte[] readData(int count) { byte[] arr = new byte[count]; readRaw(arr); return arr; } public String readString() { try { int sl = 1; int l = in.read(); if(l >= 254) { l = in.read() | (in.read() << 8) | (in.read() << 16); sl = 4; } byte[] b = new byte[l]; in.read(b); int i=sl; while((l + i) % 4 != 0) { in.read(); i++; } return new String(b, "UTF-8"); } catch (Exception x) { FileLog.e("tmessages", "read string error"); } return null; } public byte[] readByteArray() { try { int sl = 1; int l = in.read(); if (l >= 254) { l = in.read() | (in.read() << 8) | (in.read() << 16); sl = 4; } byte[] b = new byte[l]; in.read(b); int i = sl; while((l + i) % 4 != 0) { in.read(); i++; } return b; } catch (Exception x) { FileLog.e("tmessages", "read byte array error"); } return null; } public ByteBufferDesc readByteBuffer() { throw new RuntimeException("SerializedData don't support readByteBuffer"); } public void writeByteArray(byte[] b) { try { if (b.length <= 253) { if (!justCalc) { out.write(b.length); } else { len += 1; } } else { if (!justCalc) { out.write(254); out.write(b.length); out.write(b.length >> 8); out.write(b.length >> 16); } else { len += 4; } } if (!justCalc) { out.write(b); } else { len += b.length; } int i = b.length <= 253 ? 1 : 4; while((b.length + i) % 4 != 0) { if (!justCalc) { out.write(0); } else { len += 1; } i++; } } catch (Exception x) { FileLog.e("tmessages", "write byte array error"); } } public void writeString(String s) { try { writeByteArray(s.getBytes("UTF-8")); } catch(Exception x) { FileLog.e("tmessages", "write string error"); } } public void writeByteArray(byte[] b, int offset, int count) { try { if(count <= 253) { if (!justCalc) { out.write(count); } else { len += 1; } } else { if (!justCalc) { out.write(254); out.write(count); out.write(count >> 8); out.write(count >> 16); } else { len += 4; } } if (!justCalc) { out.write(b, offset, count); } else { len += count; } int i = count <= 253 ? 1 : 4; while ((count + i) % 4 != 0) { if (!justCalc) { out.write(0); } else { len += 1; } i++; } } catch (Exception x) { FileLog.e("tmessages", "write byte array error"); } } public double readDouble() { try { return Double.longBitsToDouble(readInt64()); } catch(Exception x) { FileLog.e("tmessages", "read double error"); } return 0; } public void writeDouble(double d) { try { writeInt64(Double.doubleToRawLongBits(d)); } catch(Exception x) { FileLog.e("tmessages", "write double error"); } } public int length() { if (!justCalc) { return isOut ? outbuf.size() : inbuf.available(); } return len; } protected void set(byte[] newData) { isOut = false; inbuf = new ByteArrayInputStream(newData); in = new DataInputStream(inbuf); } public byte[] toByteArray() { return outbuf.toByteArray(); } }
/* * * Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ // Copyright (c) 1995-96 by Cisco Systems, Inc. package com.sun.jmx.snmp.daemon; // JAVA imports // import java.net.InetAddress; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.SocketException; import java.io.IOException; import java.util.logging.Level; // SNMP Runtime imports // import static com.sun.jmx.defaults.JmxProperties.SNMP_ADAPTOR_LOGGER; /** * This class creates an SNMP Datagram Socket. This class has methods helpful * to send SNMP inform request packets to an arbitrary port of a specified device. * It also runs a thread that is devoted to receiving SNMP inform response on the socket. * <BR> * A socket imposes an upper limit on size of inform response packet. Any * packet which exceeds this limit is truncated. By default, this * limit is {@link SnmpAdaptorServer#bufferSize}. */ final class SnmpSocket implements java.lang.Runnable { // VARIABLES //---------- private DatagramSocket _socket = null; private SnmpResponseHandler _dgramHdlr = null; private Thread _sockThread = null; private byte[] _buffer = null; private transient boolean isClosing = false; int _socketPort = 0; int responseBufSize = 1024; // CONSTRUCTORS //------------- /** * Creates a new <CODE>SnmpSocket</CODE> object. * @param rspHdlr A Datagram handler. * @param bufferSize The SNMP adaptor buffer size. * @exception SocketException A socket could not be created. */ public SnmpSocket(SnmpResponseHandler rspHdlr, InetAddress addr, int bufferSize) throws SocketException { super(); if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINER)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINER, SnmpSocket.class.getName(), "constructor", "Creating new SNMP datagram socket"); } // TIME BOMB HERE _socket = new DatagramSocket(0, addr); _socketPort = _socket.getLocalPort(); responseBufSize = bufferSize; _buffer = new byte[responseBufSize]; _dgramHdlr = rspHdlr; _sockThread = new Thread(this, "SnmpSocket"); _sockThread.start(); } // PUBLIC METHODS //--------------- /** * Sends a datagram packet to a specified device at specified port. * @param buff The packet data. * @param length The packet length. * @param addr The destination address. * @param port The destination port number. * @exception IOException Signals that an I/O exception of some sort has occurred. */ public synchronized void sendPacket(byte[] buff, int length, InetAddress addr, int port) throws IOException { DatagramPacket dgrmpkt; dgrmpkt = new DatagramPacket(buff, length, addr, port); sendPacket(dgrmpkt); } /** * Sends a datagram packet to a specified device at specified port. * @param dgrmpkt The datagram packet. * @exception IOException Signals that an I/O exception of some sort has occurred. */ public synchronized void sendPacket(DatagramPacket dgrmpkt) throws IOException { try { if (isValid()) { if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINER)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINER, SnmpSocket.class.getName(), "sendPacket", "Sending DatagramPacket. Length = " + dgrmpkt.getLength() + " through socket = " + _socket.toString()); } _socket.send(dgrmpkt); } else throw new IOException("Invalid state of SNMP datagram socket."); } catch (IOException e) { if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINEST)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINEST, SnmpSocket.class.getName(), "sendPacket", "I/O error while sending", e); } throw e; } } /** * Checks if the socket is initialised correctly and if it is still active. * @return <CODE>true</CODE> if the socket is initialised correctly and if it is still active, * <CODE>false</CODE> otherwise. */ public synchronized boolean isValid() { return _socket != null && _sockThread != null && _sockThread.isAlive(); } /** * Closes the socket and its associated resources. */ public synchronized void close() { isClosing = true; if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINER)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINER, SnmpSocket.class.getName(), "close", "Closing and destroying the SNMP datagram socket -> " + toString()); } try { // We send an empty datagram packet to fix bug 4293791 (it's a jdk 1.1 bug) // DatagramSocket sn = new java.net.DatagramSocket(0); byte[] ob = new byte[1]; DatagramPacket pk = new DatagramPacket(ob , 1, java.net.InetAddress.getLocalHost(), _socketPort); sn.send(pk); sn.close(); } catch (Exception e) {} // First close the datagram socket. // This may generates an IO exception at the run method (_socket.receive). // if (_socket != null) { _socket.close() ; _socket = null ; } // Then stop the thread socket. // if (_sockThread != null && _sockThread.isAlive()) { _sockThread.interrupt(); try { // Wait until the thread die. // _sockThread.join(); } catch (InterruptedException e) { // Ignore... } _sockThread = null ; } } /** * Dispatcher method for this socket thread. This is the dispatcher method * which goes in an endless-loop and waits for receiving datagram packets on the socket. */ @Override public void run() { Thread.currentThread().setPriority(8); while (true) { try { DatagramPacket dgrmpkt = new DatagramPacket (_buffer, _buffer.length); if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINER)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINER, SnmpSocket.class.getName(), "run", "[" + Thread.currentThread().toString() + "]:" + "Blocking for receiving packet"); } _socket.receive(dgrmpkt); // If the corresponding session is being destroyed, stop handling received responses. // if (isClosing) break; if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINER)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINER, SnmpSocket.class.getName(), "run", "[" + Thread.currentThread().toString() + "]:" + "Received a packet"); } if (dgrmpkt.getLength() <= 0) continue; if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINER)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINER, SnmpSocket.class.getName(), "run", "[" + Thread.currentThread().toString() + "]:" + "Received a packet from : " + dgrmpkt.getAddress().toString() + ", Length = " + dgrmpkt.getLength()); } handleDatagram(dgrmpkt); // We are closing the snmp socket while handling the datagram. // if (isClosing) break; } catch (IOException io) { // If the IO exception has been generated because of closing this SNMP socket, // (call to _socket.close while _socket is blocked for receiving packet) simply terminate closing properly. // if (isClosing) { break; } if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINEST)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINEST, SnmpSocket.class.getName(), "run", "IOEXception while receiving datagram", io); } } catch (Exception e) { // If the exception (NullPointerException) has been generated because of closing this SNMP socket, // (call to _socket = null while _socket is blocked for receiving packet) simply terminate closing properly. // if (isClosing) { break; } if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINEST)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINEST, SnmpSocket.class.getName(), "run", "Exception in socket thread...", e); } } catch (ThreadDeath d) { if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINEST)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINEST, SnmpSocket.class.getName(), "run", "Socket Thread DEAD..." + toString(), d); } close(); throw d; // rethrow dead thread. } catch (Error err) { if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINEST)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINEST, SnmpSocket.class.getName(), "run", "Got unexpected error", err); } handleJavaError(err); } } } /** * Finalizer of the <CODE>SnmpSocket</CODE> objects. * This method is called by the garbage collector on an object * when garbage collection determines that there are no more references to the object. * <P>Closes the datagram socket and stops the socket thread associated to this SNMP socket. */ @Override protected synchronized void finalize() { close(); } // PRIVATE METHODS //---------------- /* * Keep this locked so that send can't happen. */ private synchronized void handleJavaError(Throwable thr) { if (thr instanceof OutOfMemoryError) { if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINEST)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINEST, SnmpSocket.class.getName(), "handleJavaError", "OutOfMemory error", thr); } Thread.yield(); return ; } if (_socket != null) { _socket.close(); _socket = null; } if (SNMP_ADAPTOR_LOGGER.isLoggable(Level.FINEST)) { SNMP_ADAPTOR_LOGGER.logp(Level.FINEST, SnmpSocket.class.getName(), "handleJavaError", "Global Internal error"); } Thread.yield(); } private synchronized void handleDatagram(DatagramPacket dgrmpkt) { _dgramHdlr.processDatagram(dgrmpkt); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end.salted; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.Properties; import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT; import org.junit.Test; public class SaltedTableUpsertSelectIT extends BaseHBaseManagedTimeIT { @Test public void testUpsertIntoSaltedTableFromNormalTable() throws Exception { Properties props = new Properties(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { String ddl = "CREATE TABLE IF NOT EXISTS source" + " (pk VARCHAR NOT NULL PRIMARY KEY, col INTEGER)"; createTestTable(getUrl(), ddl); ddl = "CREATE TABLE IF NOT EXISTS target" + " (pk VARCHAR NOT NULL PRIMARY KEY, col INTEGER) SALT_BUCKETS=4"; createTestTable(getUrl(), ddl); String query = "UPSERT INTO source(pk, col) VALUES(?,?)"; PreparedStatement stmt = conn.prepareStatement(query); stmt.setString(1, "1"); stmt.setInt(2, 1); stmt.execute(); conn.commit(); query = "UPSERT INTO target(pk, col) SELECT pk, col from source"; stmt = conn.prepareStatement(query); stmt.execute(); conn.commit(); query = "SELECT * FROM target"; stmt = conn.prepareStatement(query); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); assertEquals("1", rs.getString(1)); assertEquals(1, rs.getInt(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testUpsertIntoNormalTableFromSaltedTable() throws Exception { Properties props = new Properties(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { String ddl = "CREATE TABLE IF NOT EXISTS source" + " (pk VARCHAR NOT NULL PRIMARY KEY, col INTEGER) SALT_BUCKETS=4"; createTestTable(getUrl(), ddl); ddl = "CREATE TABLE IF NOT EXISTS target" + " (pk VARCHAR NOT NULL PRIMARY KEY, col INTEGER)"; createTestTable(getUrl(), ddl); String query = "UPSERT INTO source(pk, col) VALUES(?,?)"; PreparedStatement stmt = conn.prepareStatement(query); stmt.setString(1, "1"); stmt.setInt(2, 1); stmt.execute(); conn.commit(); query = "UPSERT INTO target(pk, col) SELECT pk, col from source"; stmt = conn.prepareStatement(query); stmt.execute(); conn.commit(); query = "SELECT * FROM target"; stmt = conn.prepareStatement(query); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); assertEquals("1", rs.getString(1)); assertEquals(1, rs.getInt(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testUpsertSaltedTableIntoSaltedTable() throws Exception { Properties props = new Properties(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { String ddl = "CREATE TABLE IF NOT EXISTS source" + " (pk VARCHAR NOT NULL PRIMARY KEY, col INTEGER) SALT_BUCKETS=4"; createTestTable(getUrl(), ddl); ddl = "CREATE TABLE IF NOT EXISTS target" + " (pk VARCHAR NOT NULL PRIMARY KEY, col INTEGER) SALT_BUCKETS=4"; createTestTable(getUrl(), ddl); String query = "UPSERT INTO source(pk, col) VALUES(?,?)"; PreparedStatement stmt = conn.prepareStatement(query); stmt.setString(1, "1"); stmt.setInt(2, 1); stmt.execute(); conn.commit(); query = "UPSERT INTO target(pk, col) SELECT pk, col from source"; stmt = conn.prepareStatement(query); stmt.execute(); conn.commit(); query = "SELECT * FROM target"; stmt = conn.prepareStatement(query); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); assertEquals("1", rs.getString(1)); assertEquals(1, rs.getInt(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testUpsertSelectOnSameSaltedTable() throws Exception { Properties props = new Properties(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { String ddl = "CREATE TABLE IF NOT EXISTS source" + " (pk VARCHAR NOT NULL PRIMARY KEY, col1 INTEGER, col2 INTEGER) SALT_BUCKETS=4"; createTestTable(getUrl(), ddl); String query = "UPSERT INTO source(pk, col1) VALUES(?,?)"; PreparedStatement stmt = conn.prepareStatement(query); stmt.setString(1, "1"); stmt.setInt(2, 1); stmt.execute(); conn.commit(); query = "UPSERT INTO source(pk, col2) SELECT pk, col1 from source"; stmt = conn.prepareStatement(query); stmt.execute(); conn.commit(); query = "SELECT col2 FROM source"; stmt = conn.prepareStatement(query); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testUpsertSelectOnSameSaltedTableWithEmptyPKColumn() throws Exception { Properties props = new Properties(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { String ddl = "CREATE TABLE IF NOT EXISTS source" + " (pk1 varchar NULL, pk2 varchar NULL, pk3 integer NOT NULL, col1 INTEGER" + " CONSTRAINT pk PRIMARY KEY (pk1, pk2, pk3)) SALT_BUCKETS=4"; createTestTable(getUrl(), ddl); String query = "UPSERT INTO source(pk1, pk2, pk3, col1) VALUES(?,?,?,?)"; PreparedStatement stmt = conn.prepareStatement(query); stmt.setString(1, "1"); stmt.setString(2, "2"); stmt.setInt(3, 1); stmt.setInt(4, 1); stmt.execute(); conn.commit(); conn.setAutoCommit(true); query = "UPSERT INTO source(pk3, col1, pk1) SELECT pk3+1, col1+1, pk2 from source"; stmt = conn.prepareStatement(query); stmt.execute(); conn.commit(); query = "SELECT col1 FROM source"; stmt = conn.prepareStatement(query); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); assertFalse(rs.next()); } finally { conn.close(); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static com.google.devtools.build.lib.packages.BuildType.LABEL; import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST; import static com.google.devtools.build.lib.packages.ImplicitOutputsFunction.fromTemplates; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Streams; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.LanguageDependentFragment.LibraryLanguage; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment; import com.google.devtools.build.lib.analysis.Runfiles; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.Substitution.ComputedSubstitution; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Attribute.LabelLateBoundDefault; import com.google.devtools.build.lib.packages.Attribute.LabelListLateBoundDefault; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SafeImplicitOutputsFunction; import com.google.devtools.build.lib.rules.java.DeployArchiveBuilder.Compression; import com.google.devtools.build.lib.rules.java.JavaCompilationArgsProvider.ClasspathType; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaOptimizationMode; import com.google.devtools.build.lib.rules.java.JavaConfiguration.OneVersionEnforcementLevel; import com.google.devtools.build.lib.rules.java.proto.GeneratedExtensionRegistryProvider; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.File; import java.io.Serializable; import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nullable; /** Pluggable Java compilation semantics. */ public interface JavaSemantics { LibraryLanguage LANGUAGE = new LibraryLanguage("Java"); SafeImplicitOutputsFunction JAVA_LIBRARY_CLASS_JAR = fromTemplates("lib%{name}.jar"); SafeImplicitOutputsFunction JAVA_LIBRARY_SOURCE_JAR = fromTemplates("lib%{name}-src.jar"); SafeImplicitOutputsFunction JAVA_BINARY_CLASS_JAR = fromTemplates("%{name}.jar"); SafeImplicitOutputsFunction JAVA_BINARY_SOURCE_JAR = fromTemplates("%{name}-src.jar"); SafeImplicitOutputsFunction JAVA_BINARY_DEPLOY_JAR = fromTemplates("%{name}_deploy.jar"); SafeImplicitOutputsFunction JAVA_BINARY_MERGED_JAR = fromTemplates("%{name}_merged.jar"); SafeImplicitOutputsFunction JAVA_UNSTRIPPED_BINARY_DEPLOY_JAR = fromTemplates("%{name}_deploy.jar.unstripped"); SafeImplicitOutputsFunction JAVA_BINARY_PROGUARD_MAP = fromTemplates("%{name}_proguard.map"); SafeImplicitOutputsFunction JAVA_BINARY_PROGUARD_PROTO_MAP = fromTemplates("%{name}_proguard.pbmap"); SafeImplicitOutputsFunction JAVA_BINARY_PROGUARD_SEEDS = fromTemplates("%{name}_proguard.seeds"); SafeImplicitOutputsFunction JAVA_BINARY_PROGUARD_USAGE = fromTemplates("%{name}_proguard.usage"); SafeImplicitOutputsFunction JAVA_BINARY_PROGUARD_CONFIG = fromTemplates("%{name}_proguard.config"); SafeImplicitOutputsFunction JAVA_ONE_VERSION_ARTIFACT = fromTemplates("%{name}-one-version.txt"); SafeImplicitOutputsFunction JAVA_COVERAGE_RUNTIME_CLASS_PATH_TXT = fromTemplates("%{name}-runtime-classpath.txt"); SafeImplicitOutputsFunction JAVA_BINARY_DEPLOY_SOURCE_JAR = fromTemplates("%{name}_deploy-src.jar"); SafeImplicitOutputsFunction JAVA_TEST_CLASSPATHS_FILE = fromTemplates("%{name}_classpaths_file"); FileType JAVA_SOURCE = FileType.of(".java"); FileType JAR = FileType.of(".jar"); FileType PROPERTIES = FileType.of(".properties"); FileType SOURCE_JAR = FileType.of(".srcjar"); // TODO(bazel-team): Rename this metadata extension to something meaningful. FileType COVERAGE_METADATA = FileType.of(".em"); /** * Label to the Java Toolchain rule. It is resolved from a label given in the java options. */ String JAVA_TOOLCHAIN_LABEL = "//tools/jdk:toolchain"; /** The java_toolchain.compatible_javacopts key for Java 7 javacopts */ public static final String JAVA7_JAVACOPTS_KEY = "java7"; /** The java_toolchain.compatible_javacopts key for Android javacopts */ public static final String ANDROID_JAVACOPTS_KEY = "android"; /** The java_toolchain.compatible_javacopts key for proto compilations. */ public static final String PROTO_JAVACOPTS_KEY = "proto"; /** The java_toolchain.compatible_javacopts key for testonly compilations. */ public static final String TESTONLY_JAVACOPTS_KEY = "testonly"; static LabelLateBoundDefault<JavaConfiguration> javaToolchainAttribute( RuleDefinitionEnvironment environment) { return LabelLateBoundDefault.fromTargetConfiguration( JavaConfiguration.class, environment.getToolsLabel(JAVA_TOOLCHAIN_LABEL), (Attribute.LateBoundDefault.Resolver<JavaConfiguration, Label> & Serializable) (rule, attributes, javaConfig) -> javaConfig.getToolchainLabel()); } /** * Name of the output group used for source jars. */ String SOURCE_JARS_OUTPUT_GROUP = OutputGroupInfo.HIDDEN_OUTPUT_GROUP_PREFIX + "source_jars"; /** Implementation for the :jvm attribute. */ static LabelLateBoundDefault<JavaConfiguration> jvmAttribute(RuleDefinitionEnvironment env) { return LabelLateBoundDefault.fromTargetConfiguration( JavaConfiguration.class, env.getToolsLabel(JavaImplicitAttributes.JDK_LABEL), (Attribute.LateBoundDefault.Resolver<JavaConfiguration, Label> & Serializable) (rule, attributes, configuration) -> configuration.getRuntimeLabel()); } /** Implementation for the :host_jdk attribute. */ static LabelLateBoundDefault<JavaConfiguration> hostJdkAttribute(RuleDefinitionEnvironment env) { return LabelLateBoundDefault.fromHostConfiguration( JavaConfiguration.class, env.getToolsLabel(JavaImplicitAttributes.HOST_JDK_LABEL), (Attribute.LateBoundDefault.Resolver<JavaConfiguration, Label> & Serializable) (rule, attributes, configuration) -> configuration.getRuntimeLabel()); } /** * Implementation for the :java_launcher attribute. Note that the Java launcher is disabled by * default, so it returns null for the configuration-independent default value. */ @AutoCodec LabelLateBoundDefault<JavaConfiguration> JAVA_LAUNCHER = LabelLateBoundDefault.fromTargetConfiguration( JavaConfiguration.class, null, (rule, attributes, javaConfig) -> { // This nullness check is purely for the sake of a test that doesn't bother to include // an // attribute map when calling this method. if (attributes != null) { // Don't depend on the launcher if we don't create an executable anyway if (attributes.has("create_executable") && !attributes.get("create_executable", Type.BOOLEAN)) { return null; } // don't read --java_launcher if this target overrides via a launcher attribute if (attributes.isAttributeValueExplicitlySpecified("launcher")) { return attributes.get("launcher", LABEL); } } return javaConfig.getJavaLauncherLabel(); }); @AutoCodec LabelListLateBoundDefault<JavaConfiguration> JAVA_PLUGINS = LabelListLateBoundDefault.fromTargetConfiguration( JavaConfiguration.class, (rule, attributes, javaConfig) -> ImmutableList.copyOf(javaConfig.getPlugins())); /** Implementation for the :proguard attribute. */ @AutoCodec LabelLateBoundDefault<JavaConfiguration> PROGUARD = LabelLateBoundDefault.fromTargetConfiguration( JavaConfiguration.class, null, (rule, attributes, javaConfig) -> javaConfig.getProguardBinary()); @AutoCodec LabelListLateBoundDefault<JavaConfiguration> EXTRA_PROGUARD_SPECS = LabelListLateBoundDefault.fromTargetConfiguration( JavaConfiguration.class, (rule, attributes, javaConfig) -> ImmutableList.copyOf(javaConfig.getExtraProguardSpecs())); @AutoCodec LabelListLateBoundDefault<JavaConfiguration> BYTECODE_OPTIMIZERS = LabelListLateBoundDefault.fromTargetConfiguration( JavaConfiguration.class, (rule, attributes, javaConfig) -> { // Use a modicum of smarts to avoid implicit dependencies where we don't need them. JavaOptimizationMode optMode = javaConfig.getJavaOptimizationMode(); boolean hasProguardSpecs = attributes.has("proguard_specs") && !attributes.get("proguard_specs", LABEL_LIST).isEmpty(); if (optMode == JavaOptimizationMode.NOOP || (optMode == JavaOptimizationMode.LEGACY && !hasProguardSpecs)) { return ImmutableList.<Label>of(); } return ImmutableList.copyOf( Optional.presentInstances(javaConfig.getBytecodeOptimizers().values())); }); String JACOCO_METADATA_PLACEHOLDER = "%set_jacoco_metadata%"; String JACOCO_MAIN_CLASS_PLACEHOLDER = "%set_jacoco_main_class%"; String JACOCO_JAVA_RUNFILES_ROOT_PLACEHOLDER = "%set_jacoco_java_runfiles_root%"; /** * Substitution for exporting the jars needed for jacoco coverage. */ class ComputedJacocoSubstitution extends ComputedSubstitution { private final NestedSet<Artifact> jars; private final String pathPrefix; public ComputedJacocoSubstitution(NestedSet<Artifact> jars, String workspacePrefix) { super(JACOCO_METADATA_PLACEHOLDER); this.jars = jars; this.pathPrefix = "${JAVA_RUNFILES}/" + workspacePrefix; } /** * Concatenating the root relative paths of the artifacts. Each relative path entry is prepended * with "${JAVA_RUNFILES}" and the workspace prefix. */ @Override public String getValue() { return Streams.stream(jars) .map(artifact -> pathPrefix + "/" + artifact.getRootRelativePathString()) .collect(Collectors.joining(File.pathSeparator, "export JACOCO_METADATA_JARS=", "")); } } /** * Verifies if the rule contains any errors. * * <p>Errors should be signaled through {@link RuleContext}. */ void checkRule(RuleContext ruleContext, JavaCommon javaCommon); /** * Verifies there are no conflicts in protos. * * <p>Errors should be signaled through {@link RuleContext}. */ void checkForProtoLibraryAndJavaProtoLibraryOnSameProto( RuleContext ruleContext, JavaCommon javaCommon); /** * Returns the main class of a Java binary. */ String getMainClass(RuleContext ruleContext, ImmutableList<Artifact> srcsArtifacts); /** * Returns the primary class for a Java binary - either the main class, or, in case of a test, * the test class (not the test runner main class). */ String getPrimaryClass(RuleContext ruleContext, ImmutableList<Artifact> srcsArtifacts); /** * Returns the resources contributed by a Java rule (usually the contents of the * {@code resources} attribute) */ ImmutableList<Artifact> collectResources(RuleContext ruleContext); /** * Constructs the command line to call SingleJar to join all artifacts from {@code classpath} * (java code) and {@code resources} into {@code output}. */ CustomCommandLine buildSingleJarCommandLine( String toolchainIdentifier, Artifact output, String mainClass, ImmutableList<String> manifestLines, Iterable<Artifact> buildInfoFiles, ImmutableList<Artifact> resources, NestedSet<Artifact> classpath, boolean includeBuildData, Compression compression, Artifact launcher, boolean usingNativeSinglejar, OneVersionEnforcementLevel oneVersionEnforcementLevel, Artifact oneVersionWhitelistArtifact); /** * Creates the action that writes the Java executable stub script. * * <p>Returns the launcher script artifact. This may or may not be the same as {@code executable}, * depending on the implementation of this method. If they are the same, then this Artifact should * be used when creating both the {@code RunfilesProvider} and the {@code RunfilesSupport}. If * they are different, the new value should be used when creating the {@code RunfilesProvider} (so * it will be the stub script executed by "bazel run" for example), and the old value should be * used when creating the {@code RunfilesSupport} (so the runfiles directory will be named after * it). * * <p>For example on Windows we use a double dispatch approach: the launcher is a batch file (and * is created and returned by this method) which shells out to a shell script (the {@code * executable} argument). * * <p>In Blaze, this method considers {@code javaExecutable} as a substitution that can be * directly used to replace %javabin% in stub script, but in Bazel this method considers {@code * javaExecutable} as a file path for the JVM binary (java). */ Artifact createStubAction( RuleContext ruleContext, JavaCommon javaCommon, List<String> jvmFlags, Artifact executable, String javaStartClass, String javaExecutable); /** * Same as {@link #createStubAction(RuleContext, JavaCommon, List, Artifact, String, String)}. * * <p> In *experimental* coverage mode creates a txt file containing the runtime jars names. * {@code JacocoCoverageRunner} will use it to retrieve the name of the jars considered for * collecting coverage. {@code JacocoCoverageRunner} will *not* collect coverage implicitly * for all the runtime jars, only for those that pack a file ending in "-paths-for-coverage.txt". */ public Artifact createStubAction( RuleContext ruleContext, JavaCommon javaCommon, List<String> jvmFlags, Artifact executable, String javaStartClass, String coverageStartClass, NestedSetBuilder<Artifact> filesBuilder, String javaExecutable); /** * Returns true if {@code createStubAction} considers {@code javaExecutable} as a substitution. * Returns false if {@code createStubAction} considers {@code javaExecutable} as a file path. */ boolean isJavaExecutableSubstitution(); /** * Optionally creates a file containing the relative classpaths within the runfiles tree. If * {@link Optional#isPresent()}, then the caller should ensure the file appears in the runfiles. */ Optional<Artifact> createClasspathsFile(RuleContext ruleContext, JavaCommon javaCommon) throws InterruptedException; /** * Adds extra runfiles for a {@code java_binary} rule. */ void addRunfilesForBinary(RuleContext ruleContext, Artifact launcher, Runfiles.Builder runfilesBuilder); /** * Adds extra runfiles for a {@code java_library} rule. */ void addRunfilesForLibrary(RuleContext ruleContext, Runfiles.Builder runfilesBuilder); /** * Returns the command line options to be used when compiling Java code for {@code java_*} rules. * * <p>These will come after the default options specified by the toolchain, and before the ones in * the {@code javacopts} attribute. */ ImmutableList<String> getCompatibleJavacOptions( RuleContext ruleContext, JavaToolchainProvider toolchain); /** * Add additional targets to be treated as direct dependencies. */ void collectTargetsTreatedAsDeps( RuleContext ruleContext, ImmutableList.Builder<TransitiveInfoCollection> builder, ClasspathType type); /** * Enables coverage support for the java target - adds instrumented jar to the classpath and * modifies main class. * * @return new main class */ String addCoverageSupport( JavaCompilationHelper helper, JavaTargetAttributes.Builder attributes, Artifact executable, Artifact instrumentationMetadata, JavaCompilationArtifacts.Builder javaArtifactsBuilder, String mainClass) throws InterruptedException; /** * Same as {@link #addCoverageSupport(JavaCompilationHelper, JavaTargetAttributes.Builder, * Artifact, Artifact, JavaCompilationArtifacts.Builder, String)}. * * <p> In *experimental* coverage mode omits dealing with instrumentation metadata and does not * create the instrumented jar. */ String addCoverageSupport( JavaCompilationHelper helper, JavaTargetAttributes.Builder attributes, Artifact executable, Artifact instrumentationMetadata, JavaCompilationArtifacts.Builder javaArtifactsBuilder, String mainClass, boolean isExperimentalCoverage) throws InterruptedException; /** * Return the JVM flags to be used in a Java binary. */ Iterable<String> getJvmFlags( RuleContext ruleContext, ImmutableList<Artifact> srcsArtifacts, List<String> userJvmFlags); /** * Adds extra providers to a Java target. * @throws InterruptedException */ void addProviders( RuleContext ruleContext, JavaCommon javaCommon, Artifact gensrcJar, RuleConfiguredTargetBuilder ruleBuilder) throws InterruptedException; /** * Translates XMB messages to translations artifact suitable for Java targets. */ ImmutableList<Artifact> translate(RuleContext ruleContext, JavaConfiguration javaConfig, List<Artifact> messages); /** * Get the launcher artifact for a java binary, creating the necessary actions for it. * * @param ruleContext The rule context * @param common The common helper class. * @param deployArchiveBuilder the builder to construct the deploy archive action (mutable). * @param unstrippedDeployArchiveBuilder the builder to construct the unstripped deploy archive * action (mutable). * @param runfilesBuilder the builder to construct the list of runfiles (mutable). * @param jvmFlags the list of flags to pass to the JVM when running the Java binary (mutable). * @param attributesBuilder the builder to construct the list of attributes of this target * (mutable). * @return the launcher and unstripped launcher as an artifact pair. If shouldStrip is false, then * they will be the same. * @throws InterruptedException */ Pair<Artifact, Artifact> getLauncher( final RuleContext ruleContext, final JavaCommon common, DeployArchiveBuilder deployArchiveBuilder, DeployArchiveBuilder unstrippedDeployArchiveBuilder, Runfiles.Builder runfilesBuilder, List<String> jvmFlags, JavaTargetAttributes.Builder attributesBuilder, boolean shouldStrip) throws InterruptedException; /** * Add a source artifact to a {@link JavaTargetAttributes.Builder}. It is called when a source * artifact is processed but is not matched by default patterns in the * {@link JavaTargetAttributes.Builder#addSourceArtifacts(Iterable)} method. The semantics can * then detect its custom artifact types and add it to the builder. */ void addArtifactToJavaTargetAttribute(JavaTargetAttributes.Builder builder, Artifact srcArtifact); /** * Takes the path of a Java resource and tries to determine the Java * root relative path of the resource. * * <p>This is only used if the Java rule doesn't have a {@code resource_strip_prefix} attribute. * * @param path the root relative path of the resource. * @return the Java root relative path of the resource of the root * relative path of the resource if no Java root relative path can be * determined. */ PathFragment getDefaultJavaResourcePath(PathFragment path); /** * @return a list of extra arguments to appends to the runfiles support. */ List<String> getExtraArguments(RuleContext ruleContext, ImmutableList<Artifact> sources); /** * @return main class (entry point) for the Java compiler. */ String getJavaBuilderMainClass(); /** * @return An artifact representing the protobuf-format version of the * proguard mapping, or null if the proguard version doesn't support this. */ Artifact getProtoMapping(RuleContext ruleContext) throws InterruptedException; /** * Produces the proto generated extension registry artifacts, or <tt>null</tt> * if no registry needs to be generated for the provided <tt>ruleContext</tt>. */ @Nullable GeneratedExtensionRegistryProvider createGeneratedExtensionRegistry( RuleContext ruleContext, JavaCommon common, NestedSetBuilder<Artifact> filesBuilder, JavaCompilationArtifacts.Builder javaCompilationArtifactsBuilder, JavaRuleOutputJarsProvider.Builder javaRuleOutputJarsProviderBuilder, JavaSourceJarsProvider.Builder javaSourceJarsProviderBuilder) throws InterruptedException; Artifact getObfuscatedConstantStringMap(RuleContext ruleContext) throws InterruptedException; /** * Checks if dependency errors coming from java_proto_library rules should be treated as errors * even if the java_proto_library rule sets strict_deps = 0. */ boolean isJavaProtoLibraryStrictDeps(RuleContext ruleContext); }
package com.dianping.cat.report.page.heartbeat.config; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map.Entry; import java.util.Set; import org.unidal.dal.jdbc.DalNotFoundException; import org.unidal.lookup.annotation.Inject; import org.unidal.lookup.extension.Initializable; import org.unidal.lookup.extension.InitializationException; import com.dianping.cat.Cat; import com.dianping.cat.config.content.ContentFetcher; import com.dianping.cat.core.config.Config; import com.dianping.cat.core.config.ConfigDao; import com.dianping.cat.core.config.ConfigEntity; import com.dianping.cat.home.heartbeat.entity.HeartbeatDisplayPolicy; import com.dianping.cat.home.heartbeat.entity.Group; import com.dianping.cat.home.heartbeat.entity.Metric; import com.dianping.cat.home.heartbeat.transform.DefaultSaxParser; public class HeartbeatDisplayPolicyManager implements Initializable { @Inject private ConfigDao m_configDao; @Inject private ContentFetcher m_fetcher; private static final int K = 1024; private int m_configId; private HeartbeatDisplayPolicy m_config; private static final String CONFIG_NAME = "heartbeat-display-policy"; public HeartbeatDisplayPolicy getHeartbeatDisplayPolicy() { return m_config; } @Override public void initialize() throws InitializationException { try { Config config = m_configDao.findByName(CONFIG_NAME, ConfigEntity.READSET_FULL); String content = config.getContent(); m_configId = config.getId(); m_config = DefaultSaxParser.parse(content); } catch (DalNotFoundException e) { try { String content = m_fetcher.getConfigContent(CONFIG_NAME); Config config = m_configDao.createLocal(); config.setName(CONFIG_NAME); config.setContent(content); m_configDao.insert(config); m_configId = config.getId(); m_config = DefaultSaxParser.parse(content); } catch (Exception ex) { Cat.logError(ex); } } catch (Exception e) { Cat.logError(e); } if (m_config == null) { m_config = new HeartbeatDisplayPolicy(); } } public boolean insert(String xml) { try { m_config = DefaultSaxParser.parse(xml); return storeConfig(); } catch (Exception e) { Cat.logError(e); return false; } } public boolean isDelta(String groupName, String metricName) { Group group = m_config.findGroup(groupName); if (group != null) { Metric metric = group.findMetric(metricName); if (metric != null) { return metric.isDelta(); } } return false; } public Metric queryMetric(String groupName,String metricName){ Group group = m_config.findGroup(groupName); if (group != null) { Metric metric = group.findMetric(metricName); if (metric != null) { return metric; } } return null; } public List<String> queryAlertMetrics() { List<String> metrics = new ArrayList<String>(); for (Group group : m_config.getGroups().values()) { String groupId = group.getId(); for (Metric metric : group.getMetrics().values()) { if (metric.isAlert()) { metrics.add(groupId + ":" + metric.getId()); } } } return metrics; } public int queryUnit(String groupName, String metricName) { Group group = m_config.findGroup(groupName); if (group != null) { Metric metric = group.findMetric(metricName); if (metric != null) { String metricUnit = metric.getUnit(); if ("K".equals(metricUnit)) { return K; } else if ("M".equals(metricUnit)) { return K * K; } else if ("G".equals(metricUnit)) { return K * K * K; } else { return Integer.parseInt(metricUnit); } } } return 1; } public List<String> sortGroupNames(List<String> originGroupNames) { List<Group> groups = new ArrayList<Group>(); for (Entry<String, Group> entry : m_config.getGroups().entrySet()) { if (originGroupNames.contains(entry.getKey())) { groups.add(entry.getValue()); } } Collections.sort(groups, new Comparator<Group>() { @Override public int compare(Group g1, Group g2) { return g1.getOrder() - g2.getOrder(); } }); List<String> result = new ArrayList<String>(); for (Group group : groups) { result.add(group.getId()); } for (String originGroupName : originGroupNames) { if (!result.contains(originGroupName)) { result.add(originGroupName); } } return result; } public List<String> sortGroupNames(Set<String> originGroupNameSet) { return sortGroupNames(new ArrayList<String>(originGroupNameSet)); } public List<String> sortMetricNames(String groupName, List<String> originMetricNames) { Group group = m_config.findGroup(groupName); List<String> result = new ArrayList<String>(); if (group != null) { List<Metric> list = new ArrayList<Metric>(); for (Entry<String, Metric> entry : group.getMetrics().entrySet()) { if (originMetricNames.contains(entry.getKey())) { list.add(entry.getValue()); } } Collections.sort(list, new Comparator<Metric>() { @Override public int compare(Metric m1, Metric m2) { return m1.getOrder() - m2.getOrder(); } }); for (Metric metric : list) { result.add(metric.getId()); } } for (String originMetricName : originMetricNames) { if (!result.contains(originMetricName)) { result.add(originMetricName); } } return result; } public List<String> sortMetricNames(String groupName, Set<String> originMetricNames) { return sortMetricNames(groupName, new ArrayList<String>(originMetricNames)); } private boolean storeConfig() { synchronized (this) { try { Config config = m_configDao.createLocal(); config.setId(m_configId); config.setKeyId(m_configId); config.setName(CONFIG_NAME); config.setContent(m_config.toString()); m_configDao.updateByPK(config, ConfigEntity.UPDATESET_FULL); } catch (Exception e) { Cat.logError(e); return false; } } return true; } }
/******************************************************************************* * Copyright Duke Comprehensive Cancer Center and SemanticBits * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/c3pr/LICENSE.txt for details. ******************************************************************************/ package edu.duke.cabig.c3pr.esb.impl; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.io.StringWriter; import java.rmi.RemoteException; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.acegisecurity.Authentication; import org.acegisecurity.context.SecurityContext; import org.acegisecurity.context.SecurityContextHolder; import org.acegisecurity.context.SecurityContextImpl; import org.apache.axis.message.MessageElement; import org.apache.axis.message.addressing.EndpointReferenceType; import org.apache.axis.types.URI; import org.apache.axis.types.URI.MalformedURIException; import org.apache.log4j.Logger; import org.globus.gsi.GlobusCredential; import org.w3c.dom.Document; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import edu.duke.cabig.c3pr.esb.BroadcastException; import edu.duke.cabig.c3pr.esb.CCTSApplicationNames; import edu.duke.cabig.c3pr.esb.CCTSMessageBroadcaster; import edu.duke.cabig.c3pr.esb.CaXchangeMessageHelper; import edu.duke.cabig.c3pr.esb.CaXchangeMessageResponseHandler; import edu.duke.cabig.c3pr.esb.CaXchangeMessageResponseHandlerSet; import edu.duke.cabig.c3pr.esb.CaXchangeMessageResponseNotifier; import edu.duke.cabig.c3pr.esb.DelegatedCredential; import edu.duke.cabig.c3pr.esb.DelegatedCredentialProvider; import edu.duke.cabig.c3pr.esb.ESBCommunicationException; import edu.duke.cabig.c3pr.esb.MessageWorkflowCallback; import edu.duke.cabig.c3pr.esb.OperationNameEnum; import edu.duke.cabig.c3pr.esb.ResponseErrors; import gov.nih.nci.cagrid.caxchange.client.CaXchangeRequestProcessorClient; import gov.nih.nci.cagrid.caxchange.context.client.CaXchangeResponseServiceClient; import gov.nih.nci.cagrid.caxchange.context.stubs.types.CaXchangeResponseServiceReference; import gov.nih.nci.cagrid.common.Utils; import gov.nih.nci.caxchange.Credentials; import gov.nih.nci.caxchange.Message; import gov.nih.nci.caxchange.MessagePayload; import gov.nih.nci.caxchange.Metadata; import gov.nih.nci.caxchange.Request; import gov.nih.nci.caxchange.ResponseMessage; import gov.nih.nci.caxchange.Statuses; import gov.nih.nci.caxchange.TargetResponseMessage; /** * Sends messages to caXchange. Also, will notify of the message status * by impelementing MessageWorkflowNotifier * <p/> * <p/> * Created by IntelliJ IDEA. * User: kherm, Vinay Gangoli * Date: Nov 13, 2007 * Time: 3:40:56 PM * To change this template use File | Settings | File Templates. */ public class CaXchangeMessageBroadcasterImpl implements CCTSMessageBroadcaster, CaXchangeMessageResponseNotifier { private String caXchangeURL; //default value. Should not change private Map messageTypesMapping; private CaXchangeMessageResponseHandlerSet messageResponseHandlers = new CaXchangeMessageResponseHandlerSet(); private DelegatedCredentialProvider delegatedCredentialProvider; private Logger log = Logger.getLogger(CaXchangeMessageBroadcasterImpl.class); private MessageWorkflowCallback messageWorkflowCallback; private int timeout; public static final String namespaceURI = "http://caXchange.nci.nih.gov/messaging"; public static final String localPart = "caXchangeResponseMessage"; public static final String FAILURE = "FAILURE"; public void setTimeout(int timeout) { this.timeout = timeout; } /** * Will just use a dummy id to broadcast message * * @param message * @throws BroadcastException */ public void broadcast(String message) throws BroadcastException { broadcast(message, getLocalMetadataWithDummyId()); } private edu.duke.cabig.c3pr.esb.Metadata getLocalMetadataWithDummyId() { return new edu.duke.cabig.c3pr.esb.Metadata(OperationNameEnum.NA.name(), "DUMMY_ID"); } /** * * Broadcasts the domain object to caXchange * * @param cctsDomainObjectXML xml message * @param edu.duke.cabig.c3pr.esb.Metadata localMetadata * localMetadata includes attributes like externalId(business id of the message. You can track messages by this id) * and operationType (e.g. NA, PERSON, ORGANIZATION etc) * @throws BroadcastException */ public void broadcast(String cctsDomainObjectXML, edu.duke.cabig.c3pr.esb.Metadata localMetadata) throws BroadcastException { GlobusCredential proxy = getProxy(); Credentials credentials = getCredentials(); String externalId = localMetadata.getExternalIdentifier(); //marshall the bean Document messageDOM = marshallBean(cctsDomainObjectXML); CaXchangeRequestProcessorClient caXchangeClient = null; CaXchangeResponseServiceReference responseRef = null; try { caXchangeClient = new CaXchangeRequestProcessorClient(caXchangeURL, proxy); Message xchangeMessage = CaXchangeMessageHelper.createXchangeMessage(messageDOM); Metadata mData = buildMetadata(localMetadata, messageDOM, credentials); xchangeMessage.setMetadata(mData); log.debug("Sending message to caXchange"); responseRef = caXchangeClient.processRequestAsynchronously(xchangeMessage); if (messageWorkflowCallback != null) { messageWorkflowCallback.messageSendSuccessful(externalId); } } catch (RemoteException e) { if (messageWorkflowCallback != null) { messageWorkflowCallback.messageSendFailed(externalId); } throw new ESBCommunicationException(e); }catch (MalformedURIException e) { if (messageWorkflowCallback != null) { messageWorkflowCallback.messageSendFailed(externalId); } throw new ESBCommunicationException(e); } //logging epr info logEPR(responseRef.getEndpointReference()); //check on the response asynchronously only if someone is interested checkResponseAsynchronously(responseRef, externalId, proxy); } /** * * Broadcasts the COPPA content to caXchange * * @param cctsDomainObjectXML xml message * @param edu.duke.cabig.c3pr.esb.Metadata localMetadata * localMetadata includes attributes like externalId(business id of the message. You can track messages by this id) * and operationType (e.g. NA, PERSON, ORGANIZATION etc) * @throws BroadcastException * @return responseXML as string */ public String broadcastCoppaMessage(String cctsDomainObjectXML, edu.duke.cabig.c3pr.esb.Metadata localMetadata) throws BroadcastException { String serviceResponsePayload = null; Credentials credentials = getCredentials(); //marshall the bean Document messageDOM = marshallBean(cctsDomainObjectXML); try { Message xchangeMessage = new Message(); Metadata mData = buildMetadataForCoppa(localMetadata, messageDOM, credentials); xchangeMessage.setMetadata(mData); MessageElement messageElement = new MessageElement(messageDOM.getDocumentElement()); MessagePayload messagePayload = new MessagePayload(); messagePayload.setXmlSchemaDefinition(new URI("http://po.coppa.nci.nih.gov")); messagePayload.set_any(new MessageElement[]{messageElement}); Request request = new Request(); xchangeMessage.setRequest(request); xchangeMessage.getRequest().setBusinessMessagePayload(messagePayload); serviceResponsePayload = broadcastCoppaMessage(xchangeMessage); } catch (MalformedURIException e) { log.error("Could not instantiate CaXchangeRequestProcessorClient"); log.error(e.getMessage()); } return serviceResponsePayload; } /** * * Broadcasts the COPPA content to caXchange. This is used by the PA searches which return more than one result * and needs an offset to be specified. * * @param cctsDomainObjectXMLList list of xml messages * @param edu.duke.cabig.c3pr.esb.Metadata localMetadata * localMetadata includes attributes like externalId(business id of the message. You can track messages by this id) * and operationType (e.g. NA, PERSON, ORGANIZATION etc) * @throws BroadcastException * @return responseXML as string */ public String broadcastCoppaMessage(List<String> cctsDomainObjectXMLList, edu.duke.cabig.c3pr.esb.Metadata localMetadata) throws BroadcastException { String serviceResponsePayload = null; Credentials credentials = getCredentials(); MessageElement[] messageElements = new MessageElement[cctsDomainObjectXMLList.size()]; Document messageDOM = null; MessageElement messageElement = null; for(int i=0;i < cctsDomainObjectXMLList.size() ; i++){ //marshall the bean messageDOM = marshallBean(cctsDomainObjectXMLList.get(i)); messageElement = new MessageElement(messageDOM.getDocumentElement()); messageElements[i] = messageElement; } try { Message xchangeMessage = new Message(); Metadata mData = buildMetadataForCoppa(localMetadata, messageDOM, credentials); xchangeMessage.setMetadata(mData); MessagePayload messagePayload = new MessagePayload(); messagePayload.setXmlSchemaDefinition(new URI("http://pa.services.coppa.nci.nih.gov")); messagePayload.set_any(messageElements); Request request = new Request(); xchangeMessage.setRequest(request); xchangeMessage.getRequest().setBusinessMessagePayload(messagePayload); serviceResponsePayload = broadcastCoppaMessage(xchangeMessage); } catch (MalformedURIException e) { log.error("Could not instantiate CaXchangeRequestProcessorClient"); log.error(e.getMessage()); } return serviceResponsePayload; } /** private method used by the broadcastCoppa overloaded methods * * @param xchangeMessage * @return * @throws BroadcastException */ private String broadcastCoppaMessage(Message xchangeMessage) throws BroadcastException{ String serviceResponsePayload = null; CaXchangeRequestProcessorClient caXchangeClient = null; GlobusCredential proxy = getProxy(); ResponseMessage responseMessage = null; try { caXchangeClient = new CaXchangeRequestProcessorClient(caXchangeURL, proxy); log.debug("Sending message to caXchange "); responseMessage = caXchangeClient.processRequestSynchronously(xchangeMessage); if(responseMessage.getResponse().getCaXchangeError() != null){ log.error("Int hub returned error: "+responseMessage.getResponse().getCaXchangeError().getErrorDescription()); } if(responseMessage.getResponse().getResponseStatus().getValue().equals(FAILURE)){ log.error("Int hub returned response error: "); if(responseMessage.getResponse().getTargetResponse(0) != null && responseMessage.getResponse().getTargetResponse(0).getTargetError() != null){ log.error(responseMessage.getResponse().getTargetResponse(0).getTargetError().getErrorDescription()); } } InputStream serializeStream = CaXchangeRequestProcessorClient.class.getResourceAsStream("client-config.wsdd"); StringWriter writer = new StringWriter(); Utils.serializeObject(responseMessage, new QName(namespaceURI,localPart),writer, serializeStream); serviceResponsePayload = writer.getBuffer().toString(); } catch (RemoteException e) { log.error("caXchange could not process request", e); throw new BroadcastException("caXchange could not process message", e); } catch (Exception e) { log.error("Could not serialize "); log.error(e.getMessage()); } return serviceResponsePayload; } /** * Gets the proxy * * @param caXchangeClient * @param proxy * @return credentials * @throws BroadcastException */ private GlobusCredential getProxy() throws BroadcastException { GlobusCredential proxy = null; try { if (delegatedCredentialProvider != null) { log.debug("Using delegated crential provider to set credentials"); DelegatedCredential cred = delegatedCredentialProvider.provideDelegatedCredentials(); if (cred != null) { proxy = cred.getCredential(); log.debug("Found valid proxy. Using it for esb communication"); } } } catch (Exception e) { throw new BroadcastException("caXchange could not initialize caExchange client. Using URL " + caXchangeURL, e); } return proxy; } /** * Gets the credentials. * * @return the credentials * * @throws BroadcastException the broadcast exception */ private Credentials getCredentials() throws BroadcastException{ Credentials creds = new Credentials(); //commented out as they are optional elements for ihub // creds.setUserName("hmarwaha"); // creds.setPassword("password"); try { // if a provider is registered then use it to get credentials if (delegatedCredentialProvider != null) { log.debug("Using delegated crential provider to set credentials"); DelegatedCredential cred = delegatedCredentialProvider.provideDelegatedCredentials(); if (cred != null) { //set the delegated epr. creds.setDelegatedCredentialReference(cred.getDelegatedEPR()); } } } catch (Exception e) { throw new BroadcastException("caXchange could not initialize caExchange client. Using URL " + caXchangeURL, e); } return creds; } /** * Marshall bean. * * @param cctsDomainObjectXML the ccts domain object xml * * @return the document * * @throws BroadcastException the broadcast exception */ private Document marshallBean(String cctsDomainObjectXML) throws BroadcastException { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); Document messageDOM; try { DocumentBuilder db =dbf.newDocumentBuilder(); messageDOM = db.parse(new InputSource(new StringReader(cctsDomainObjectXML))); } catch (SAXException e) { throw new BroadcastException("caXchange could not serialize domain object", e); } catch (IOException e) { throw new BroadcastException("caXchange could not serialize domain object", e); } catch (ParserConfigurationException e) { throw new BroadcastException("caXchange could not serialize domain object", e); } return messageDOM; } /** * Builds the metadata for COPPa scenarios. * * @param localMetadata the local metadata * @param messageDOM the message dom * @param creds the creds * * @return the metadata */ private Metadata buildMetadataForCoppa(edu.duke.cabig.c3pr.esb.Metadata localMetadata, Document messageDOM, Credentials creds) { Metadata mData = new Metadata(); mData.setOperationName(localMetadata.getOperationName()); mData.setExternalIdentifier(localMetadata.getExternalIdentifier()); mData.setServiceType(localMetadata.getServiceType()); //will be removed. temp mData.setCredentials(creds); return mData; } /** * Builds the metadata For interoperability. * * @param localMetadata the local metadata * @param messageDOM the message dom * @param creds the creds * * @return the metadata */ private Metadata buildMetadata(edu.duke.cabig.c3pr.esb.Metadata localMetadata, Document messageDOM, Credentials creds) { Metadata mData = new Metadata(); //mData.setOperationName(OperationNameEnum.PROCESS.getName()); mData.setServiceType((String) messageTypesMapping.get(messageDOM.getDocumentElement().getNodeName())); mData.setExternalIdentifier(localMetadata.getExternalIdentifier()); //will be removed. temp mData.setCredentials(creds); return mData; } /** * Check response asynchronously. * * @param responseRef the response ref * @param externalId the external id * @param proxy the proxy */ private void checkResponseAsynchronously(CaXchangeResponseServiceReference responseRef, String externalId, GlobusCredential proxy){ if (messageWorkflowCallback != null || messageResponseHandlers.size() > 0) { log.debug("Will track response from caXchange"); try { FutureTask asyncTask = new AsynchronousResponseRetreiver(new SynchronousResponseProcessor(responseRef,messageWorkflowCallback, externalId, proxy, timeout),SecurityContextHolder.getContext().getAuthentication()); //ToDo make this like a global service not single thread executor ExecutorService es = Executors.newSingleThreadExecutor(); es.submit(asyncTask); es.shutdown(); //these exceptions do not mean a message send failure } catch (URI.MalformedURIException e) { log.error(e); } catch (RemoteException e) { log.error(e); } } } public void setDelegatedCredentialProvider(DelegatedCredentialProvider delegatedCredentialProvider) { this.delegatedCredentialProvider = delegatedCredentialProvider; } public void addResponseHandler(CaXchangeMessageResponseHandler handler) { messageResponseHandlers.add(handler); } public CaXchangeMessageResponseHandlerSet getMessageResponseHandlers() { return messageResponseHandlers; } public void setMessageResponseHandlers(CaXchangeMessageResponseHandlerSet messageResponseHandlers) { this.messageResponseHandlers = messageResponseHandlers; } public String getCaXchangeURL() { return caXchangeURL; } public void setCaXchangeURL(String caXchangeURL) { this.caXchangeURL = caXchangeURL; } public void setNotificationHandler(MessageWorkflowCallback handler) { this.messageWorkflowCallback = handler; } public Map getMessageTypesMapping() { return messageTypesMapping; } public void setMessageTypesMapping(Map messageTypesMapping) { this.messageTypesMapping = messageTypesMapping; } class AsynchronousResponseRetreiver extends FutureTask { Authentication auth; public AsynchronousResponseRetreiver(Callable callable, Authentication auth) { super(callable); this.auth = auth; } protected void done() { try { ResponseMessage response = (ResponseMessage) get(); if (response != null) { log.debug("Received response from caXchange"); log.debug("caXchange Response follows.....\n"); log.debug(response.getResponse().toString()); String objectId = response.getResponseMetadata().getExternalIdentifier(); log.debug("Received response from caXchange for externalId" + objectId); log.debug("Setting authentication object in SecurityContext."); SecurityContext ctx = new SecurityContextImpl(); ctx.setAuthentication(this.auth); SecurityContextHolder.setContext(ctx); if (response.getResponse().getResponseStatus().equals(Statuses.SUCCESS)) { log.debug("Received delivery confirmation from caXchange"); messageWorkflowCallback.messageSendConfirmed(objectId); // notify response handlers log.debug("Notifying " + messageResponseHandlers.size() + " handlers"); messageResponseHandlers.notifyAll(objectId, response.getResponse()); } if (response.getResponse().getResponseStatus().equals(Statuses.FAILURE)) { log.debug("Received failure from caXchange"); messageWorkflowCallback.messageSendFailed(objectId); ResponseErrors<CCTSApplicationNames> errors=new ResponseErrors<CCTSApplicationNames>(); log.debug("looking at caXchange error.."); if(response.getResponse().getCaXchangeError()!=null && response.getResponse().getCaXchangeError().getErrorDescription()!=null){ log.error("Found caXchange error : " + response.getResponse().getCaXchangeError().getErrorDescription()); errors.addError(CCTSApplicationNames.CAXCHANGE, response.getResponse().getCaXchangeError().getErrorDescription()); }else log.debug("caXchange Error is null...."); log.debug("looking at aplication level error.."); CCTSApplicationNames cApplicationName=null; for (TargetResponseMessage tResponse : response.getResponse().getTargetResponse()){ log.debug("looking at aplication "+tResponse.getTargetServiceIdentifier()+".."); if (tResponse.getTargetServiceIdentifier().indexOf("C3D") > -1) { log.debug("Found c3d response. Processing..."); cApplicationName=CCTSApplicationNames.C3D; }else if (tResponse.getTargetServiceIdentifier().indexOf("caAERS") > -1) { log.debug("Found caAERS response. Processing..."); cApplicationName=CCTSApplicationNames.CAAERS; }else if (tResponse.getTargetServiceIdentifier().indexOf("LabViewer") > -1) { log.debug("Found CTODS response. Processing..."); cApplicationName=CCTSApplicationNames.CTODS; }else if (tResponse.getTargetServiceIdentifier().indexOf("psc") > -1) { log.debug("Found PSC response. Processing..."); cApplicationName=CCTSApplicationNames.PSC; } log.debug("App:"+cApplicationName); if(tResponse.getTargetError()!=null && tResponse.getTargetError().getErrorDescription()!=null){ log.debug("Error: "+tResponse.getTargetError().getErrorDescription()); if(cApplicationName!=null){ String errorString=tResponse.getTargetError().getErrorDescription(); String errorCode=tResponse.getTargetError().getErrorCode(); errorString=errorCode!=null || !errorCode.equalsIgnoreCase("")?errorCode+" : "+errorString:errorString; log.debug("Found error in response : "+errorString); errors.addError(cApplicationName, errorString); } }else{ log.debug("Error is null"); } } messageWorkflowCallback.recordError(objectId, errors); } } } catch (InterruptedException e) { log.warn(e); } catch (ExecutionException e) { log.warn(e); } //call handlers for the result } } class SynchronousResponseProcessor implements Callable { CaXchangeResponseServiceClient responseService; String objectId; private MessageWorkflowCallback messageWorkflowCallback; private long startTime; private int timeout; public SynchronousResponseProcessor(CaXchangeResponseServiceReference responseRef, MessageWorkflowCallback messageWorkflowCallback, String objectId, GlobusCredential proxy, int timeout) throws org.apache.axis.types.URI.MalformedURIException, RemoteException { responseService = new CaXchangeResponseServiceClient(responseRef.getEndpointReference(),proxy); this.messageWorkflowCallback=messageWorkflowCallback; this.objectId=objectId; this.timeout=timeout; } public ResponseMessage call() throws Exception { //only run this for 60 seconds if (startTime == 0l) { startTime = System.currentTimeMillis(); } long elapsedTime = (System.currentTimeMillis() - startTime) / 1000; log.debug("Elapsed time : " + elapsedTime + " seconds"); if (elapsedTime > timeout) { log.debug("Giving up. caXchange never returned a response for more than 60 seconds. Recording Error."); messageWorkflowCallback.messageAcknowledgmentFailed(objectId); return null; } try { log.debug("Checking caXchange for response"); return responseService.getResponse(); } catch (RemoteException e) { //sleep for 3 seconds and check again log.info("Response not yet ready. Waiting..."); Thread.sleep(3000); return call(); } } protected void finalize() throws Throwable { log.debug("Killing listening thread"); super.finalize(); //To change body of overridden methods use File | Settings | File Templates. } } private void logEPR(EndpointReferenceType endpointReference){ //logging epr info log.debug("logging EPR info.."); log.debug(endpointReference.getAddress()); log.debug(endpointReference.getAddress().toString()); log.debug(endpointReference.getAddress().getHost()); log.debug(endpointReference.getAddress().getPath()); log.debug(endpointReference.getAddress().getFragment()); log.debug(endpointReference.getAddress().getQueryString()); log.debug(endpointReference.getAddress().getRegBasedAuthority()); log.debug(endpointReference.getAddress().getScheme()); log.debug(endpointReference.getAddress().getSchemeSpecificPart()); log.debug(endpointReference.getAddress().getUserinfo()); log.debug(endpointReference.getAddress().getPort()); } public DelegatedCredentialProvider getDelegatedCredentialProvider() { return delegatedCredentialProvider; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.component.dsl; import javax.annotation.Generated; import org.apache.camel.Component; import org.apache.camel.builder.component.AbstractComponentBuilder; import org.apache.camel.builder.component.ComponentBuilder; import org.apache.camel.component.nagios.NagiosComponent; /** * Send passive checks to Nagios using JSendNSCA. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.ComponentDslMojo") public interface NagiosComponentBuilderFactory { /** * Nagios (camel-nagios) * Send passive checks to Nagios using JSendNSCA. * * Category: monitoring * Since: 2.3 * Maven coordinates: org.apache.camel:camel-nagios * * @return the dsl builder */ @Deprecated static NagiosComponentBuilder nagios() { return new NagiosComponentBuilderImpl(); } /** * Builder for the Nagios component. */ interface NagiosComponentBuilder extends ComponentBuilder<NagiosComponent> { /** * Connection timeout in millis. * * The option is a: &lt;code&gt;int&lt;/code&gt; type. * * Default: 5000 * Group: producer * * @param connectionTimeout the value to set * @return the dsl builder */ default NagiosComponentBuilder connectionTimeout(int connectionTimeout) { doSetProperty("connectionTimeout", connectionTimeout); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default NagiosComponentBuilder lazyStartProducer( boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Sending timeout in millis. * * The option is a: &lt;code&gt;int&lt;/code&gt; type. * * Default: 5000 * Group: producer * * @param timeout the value to set * @return the dsl builder */ default NagiosComponentBuilder timeout(int timeout) { doSetProperty("timeout", timeout); return this; } /** * Whether autowiring is enabled. This is used for automatic autowiring * options (the option must be marked as autowired) by looking up in the * registry to find if there is a single instance of matching type, * which then gets configured on the component. This can be used for * automatic configuring JDBC data sources, JMS connection factories, * AWS Clients, etc. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: advanced * * @param autowiredEnabled the value to set * @return the dsl builder */ default NagiosComponentBuilder autowiredEnabled(boolean autowiredEnabled) { doSetProperty("autowiredEnabled", autowiredEnabled); return this; } /** * To use a shared NagiosConfiguration. * * The option is a: * &lt;code&gt;org.apache.camel.component.nagios.NagiosConfiguration&lt;/code&gt; type. * * Group: advanced * * @param configuration the value to set * @return the dsl builder */ default NagiosComponentBuilder configuration( org.apache.camel.component.nagios.NagiosConfiguration configuration) { doSetProperty("configuration", configuration); return this; } /** * To specify an encryption method. * * The option is a: * &lt;code&gt;com.googlecode.jsendnsca.encryption.Encryption&lt;/code&gt; type. * * Group: security * * @param encryption the value to set * @return the dsl builder */ default NagiosComponentBuilder encryption( com.googlecode.jsendnsca.encryption.Encryption encryption) { doSetProperty("encryption", encryption); return this; } /** * Password to be authenticated when sending checks to Nagios. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: security * * @param password the value to set * @return the dsl builder */ default NagiosComponentBuilder password(java.lang.String password) { doSetProperty("password", password); return this; } } class NagiosComponentBuilderImpl extends AbstractComponentBuilder<NagiosComponent> implements NagiosComponentBuilder { @Override protected NagiosComponent buildConcreteComponent() { return new NagiosComponent(); } private org.apache.camel.component.nagios.NagiosConfiguration getOrCreateConfiguration( org.apache.camel.component.nagios.NagiosComponent component) { if (component.getConfiguration() == null) { component.setConfiguration(new org.apache.camel.component.nagios.NagiosConfiguration()); } return component.getConfiguration(); } @Override protected boolean setPropertyOnComponent( Component component, String name, Object value) { switch (name) { case "connectionTimeout": getOrCreateConfiguration((NagiosComponent) component).setConnectionTimeout((int) value); return true; case "lazyStartProducer": ((NagiosComponent) component).setLazyStartProducer((boolean) value); return true; case "timeout": getOrCreateConfiguration((NagiosComponent) component).setTimeout((int) value); return true; case "autowiredEnabled": ((NagiosComponent) component).setAutowiredEnabled((boolean) value); return true; case "configuration": ((NagiosComponent) component).setConfiguration((org.apache.camel.component.nagios.NagiosConfiguration) value); return true; case "encryption": getOrCreateConfiguration((NagiosComponent) component).setEncryption((com.googlecode.jsendnsca.encryption.Encryption) value); return true; case "password": getOrCreateConfiguration((NagiosComponent) component).setPassword((java.lang.String) value); return true; default: return false; } } } }
/* * Copyright (c) 1995, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /*- * news stream opener */ package sun.net.www; import java.io.*; import java.util.Collections; import java.util.*; /** An RFC 844 or MIME message header. Includes methods for parsing headers from incoming streams, fetching values, setting values, and printing headers. Key values of null are legal: they indicate lines in the header that don't have a valid key, but do have a value (this isn't legal according to the standard, but lines like this are everywhere). */ public class MessageHeader { private String keys[]; private String values[]; private int nkeys; public MessageHeader () { grow(); } public MessageHeader (InputStream is) throws java.io.IOException { parseHeader(is); } /** * Returns list of header names in a comma separated list */ public synchronized String getHeaderNamesInList() { StringJoiner joiner = new StringJoiner(","); for (int i=0; i<nkeys; i++) { joiner.add(keys[i]); } return joiner.toString(); } /** * Reset a message header (all key/values removed) */ public synchronized void reset() { keys = null; values = null; nkeys = 0; grow(); } /** * Find the value that corresponds to this key. * It finds only the first occurrence of the key. * @param k the key to find. * @return null if not found. */ public synchronized String findValue(String k) { if (k == null) { for (int i = nkeys; --i >= 0;) if (keys[i] == null) return values[i]; } else for (int i = nkeys; --i >= 0;) { if (k.equalsIgnoreCase(keys[i])) return values[i]; } return null; } // return the location of the key public synchronized int getKey(String k) { for (int i = nkeys; --i >= 0;) if ((keys[i] == k) || (k != null && k.equalsIgnoreCase(keys[i]))) return i; return -1; } public synchronized String getKey(int n) { if (n < 0 || n >= nkeys) return null; return keys[n]; } public synchronized String getValue(int n) { if (n < 0 || n >= nkeys) return null; return values[n]; } /** Deprecated: Use multiValueIterator() instead. * * Find the next value that corresponds to this key. * It finds the first value that follows v. To iterate * over all the values of a key use: * <pre> * for(String v=h.findValue(k); v!=null; v=h.findNextValue(k, v)) { * ... * } * </pre> */ public synchronized String findNextValue(String k, String v) { boolean foundV = false; if (k == null) { for (int i = nkeys; --i >= 0;) if (keys[i] == null) if (foundV) return values[i]; else if (values[i] == v) foundV = true; } else for (int i = nkeys; --i >= 0;) if (k.equalsIgnoreCase(keys[i])) if (foundV) return values[i]; else if (values[i] == v) foundV = true; return null; } /** * Removes bare Negotiate and Kerberos headers when an "NTLM ..." * appears. All Performed on headers with key being k. * @return true if there is a change */ public boolean filterNTLMResponses(String k) { boolean found = false; for (int i=0; i<nkeys; i++) { if (k.equalsIgnoreCase(keys[i]) && values[i] != null && values[i].length() > 5 && values[i].substring(0, 5).equalsIgnoreCase("NTLM ")) { found = true; break; } } if (found) { int j = 0; for (int i=0; i<nkeys; i++) { if (k.equalsIgnoreCase(keys[i]) && ( "Negotiate".equalsIgnoreCase(values[i]) || "Kerberos".equalsIgnoreCase(values[i]))) { continue; } if (i != j) { keys[j] = keys[i]; values[j] = values[i]; } j++; } if (j != nkeys) { nkeys = j; return true; } } return false; } class HeaderIterator implements Iterator<String> { int index = 0; int next = -1; String key; boolean haveNext = false; Object lock; public HeaderIterator (String k, Object lock) { key = k; this.lock = lock; } public boolean hasNext () { synchronized (lock) { if (haveNext) { return true; } while (index < nkeys) { if (key.equalsIgnoreCase (keys[index])) { haveNext = true; next = index++; return true; } index ++; } return false; } } public String next() { synchronized (lock) { if (haveNext) { haveNext = false; return values [next]; } if (hasNext()) { return next(); } else { throw new NoSuchElementException ("No more elements"); } } } public void remove () { throw new UnsupportedOperationException ("remove not allowed"); } } /** * return an Iterator that returns all values of a particular * key in sequence */ public Iterator<String> multiValueIterator (String k) { return new HeaderIterator (k, this); } public synchronized Map<String, List<String>> getHeaders() { return getHeaders(null); } public synchronized Map<String, List<String>> getHeaders(String[] excludeList) { return filterAndAddHeaders(excludeList, null); } public synchronized Map<String, List<String>> filterAndAddHeaders( String[] excludeList, Map<String, List<String>> include) { boolean skipIt = false; Map<String, List<String>> m = new HashMap<>(); for (int i = nkeys; --i >= 0;) { if (excludeList != null) { // check if the key is in the excludeList. // if so, don't include it in the Map. for (int j = 0; j < excludeList.length; j++) { if ((excludeList[j] != null) && (excludeList[j].equalsIgnoreCase(keys[i]))) { skipIt = true; break; } } } if (!skipIt) { List<String> l = m.get(keys[i]); if (l == null) { l = new ArrayList<>(); m.put(keys[i], l); } l.add(values[i]); } else { // reset the flag skipIt = false; } } if (include != null) { for (Map.Entry<String,List<String>> entry: include.entrySet()) { List<String> l = m.get(entry.getKey()); if (l == null) { l = new ArrayList<>(); m.put(entry.getKey(), l); } l.addAll(entry.getValue()); } } for (String key : m.keySet()) { m.put(key, Collections.unmodifiableList(m.get(key))); } return Collections.unmodifiableMap(m); } /** Check if a line of message header looks like a request line. * This method does not perform a full validation but simply * returns false if the line does not end with 'HTTP/[1-9].[0-9]' * @param line the line to check. * @return true if the line might be a request line. */ private static boolean isRequestline(String line) { String k = line.trim(); int i = k.lastIndexOf(' '); if (i <= 0) return false; int len = k.length(); if (len - i < 9) return false; char c1 = k.charAt(len-3); char c2 = k.charAt(len-2); char c3 = k.charAt(len-1); if (c1 < '1' || c1 > '9') return false; if (c2 != '.') return false; if (c3 < '0' || c3 > '9') return false; return (k.substring(i+1, len-3).equalsIgnoreCase("HTTP/")); } /** Prints the key-value pairs represented by this header. Also prints the RFC required blank line at the end. Omits pairs with a null key. Omits colon if key-value pair is the requestline. */ public void print(PrintStream p) { // no synchronization: use cloned arrays instead. String[] k; String[] v; int n; synchronized (this) { n = nkeys; k = keys.clone(); v = values.clone(); } print(n, k, v, p); } /** Prints the key-value pairs represented by this header. Also prints the RFC required blank line at the end. Omits pairs with a null key. Omits colon if key-value pair is the requestline. */ private static void print(int nkeys, String[] keys, String[] values, PrintStream p) { for (int i = 0; i < nkeys; i++) if (keys[i] != null) { StringBuilder sb = new StringBuilder(keys[i]); if (values[i] != null) { sb.append(": " + values[i]); } else if (i != 0 || !isRequestline(keys[i])) { sb.append(":"); } p.print(sb.append("\r\n")); } p.print("\r\n"); p.flush(); } /** Adds a key value pair to the end of the header. Duplicates are allowed */ public synchronized void add(String k, String v) { grow(); keys[nkeys] = k; values[nkeys] = v; nkeys++; } /** Prepends a key value pair to the beginning of the header. Duplicates are allowed */ public synchronized void prepend(String k, String v) { grow(); for (int i = nkeys; i > 0; i--) { keys[i] = keys[i-1]; values[i] = values[i-1]; } keys[0] = k; values[0] = v; nkeys++; } /** Overwrite the previous key/val pair at location 'i' * with the new k/v. If the index didn't exist before * the key/val is simply tacked onto the end. */ public synchronized void set(int i, String k, String v) { grow(); if (i < 0) { return; } else if (i >= nkeys) { add(k, v); } else { keys[i] = k; values[i] = v; } } /** grow the key/value arrays as needed */ private void grow() { if (keys == null || nkeys >= keys.length) { String[] nk = new String[nkeys + 4]; String[] nv = new String[nkeys + 4]; if (keys != null) System.arraycopy(keys, 0, nk, 0, nkeys); if (values != null) System.arraycopy(values, 0, nv, 0, nkeys); keys = nk; values = nv; } } /** * Remove the key from the header. If there are multiple values under * the same key, they are all removed. * Nothing is done if the key doesn't exist. * After a remove, the other pairs' order are not changed. * @param k the key to remove */ public synchronized void remove(String k) { if(k == null) { for (int i = 0; i < nkeys; i++) { while (keys[i] == null && i < nkeys) { for(int j=i; j<nkeys-1; j++) { keys[j] = keys[j+1]; values[j] = values[j+1]; } nkeys--; } } } else { for (int i = 0; i < nkeys; i++) { while (k.equalsIgnoreCase(keys[i]) && i < nkeys) { for(int j=i; j<nkeys-1; j++) { keys[j] = keys[j+1]; values[j] = values[j+1]; } nkeys--; } } } } /** Sets the value of a key. If the key already exists in the header, it's value will be changed. Otherwise a new key/value pair will be added to the end of the header. */ public synchronized void set(String k, String v) { for (int i = nkeys; --i >= 0;) if (k.equalsIgnoreCase(keys[i])) { values[i] = v; return; } add(k, v); } /** Set's the value of a key only if there is no * key with that value already. */ public synchronized void setIfNotSet(String k, String v) { if (findValue(k) == null) { add(k, v); } } /** Convert a message-id string to canonical form (strips off leading and trailing {@literal <>s}) */ public static String canonicalID(String id) { if (id == null) return ""; int st = 0; int len = id.length(); boolean substr = false; int c; while (st < len && ((c = id.charAt(st)) == '<' || c <= ' ')) { st++; substr = true; } while (st < len && ((c = id.charAt(len - 1)) == '>' || c <= ' ')) { len--; substr = true; } return substr ? id.substring(st, len) : id; } /** Parse a MIME header from an input stream. */ public void parseHeader(InputStream is) throws java.io.IOException { synchronized (this) { nkeys = 0; } mergeHeader(is); } /** Parse and merge a MIME header from an input stream. */ @SuppressWarnings("fallthrough") public void mergeHeader(InputStream is) throws java.io.IOException { if (is == null) return; char s[] = new char[10]; int firstc = is.read(); while (firstc != '\n' && firstc != '\r' && firstc >= 0) { int len = 0; int keyend = -1; int c; boolean inKey = firstc > ' '; s[len++] = (char) firstc; parseloop:{ while ((c = is.read()) >= 0) { switch (c) { case ':': if (inKey && len > 0) keyend = len; inKey = false; break; case '\t': c = ' '; /*fall through*/ case ' ': inKey = false; break; case '\r': case '\n': firstc = is.read(); if (c == '\r' && firstc == '\n') { firstc = is.read(); if (firstc == '\r') firstc = is.read(); } if (firstc == '\n' || firstc == '\r' || firstc > ' ') break parseloop; /* continuation */ c = ' '; break; } if (len >= s.length) { char ns[] = new char[s.length * 2]; System.arraycopy(s, 0, ns, 0, len); s = ns; } s[len++] = (char) c; } firstc = -1; } while (len > 0 && s[len - 1] <= ' ') len--; String k; if (keyend <= 0) { k = null; keyend = 0; } else { k = String.copyValueOf(s, 0, keyend); if (keyend < len && s[keyend] == ':') keyend++; while (keyend < len && s[keyend] <= ' ') keyend++; } String v; if (keyend >= len) v = new String(); else v = String.copyValueOf(s, keyend, len - keyend); add(k, v); } } public synchronized String toString() { String result = super.toString() + nkeys + " pairs: "; for (int i = 0; i < keys.length && i < nkeys; i++) { result += "{"+keys[i]+": "+values[i]+"}"; } return result; } }