gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * (c) Copyright 2018 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.sweep.queue; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.PeekingIterator; import com.google.common.collect.Streams; import com.palantir.atlasdb.AtlasDbConstants; import com.palantir.atlasdb.keyvalue.api.Cell; import com.palantir.atlasdb.keyvalue.api.CellReference; import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.ImmutableTargetedSweepMetadata; import com.palantir.atlasdb.keyvalue.api.KeyValueService; import com.palantir.atlasdb.keyvalue.api.RowColumnRangeIterator; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.api.TargetedSweepMetadata; import com.palantir.atlasdb.keyvalue.api.Value; import com.palantir.atlasdb.keyvalue.api.WriteReference; import com.palantir.atlasdb.keyvalue.api.WriteReferencePersister; import com.palantir.atlasdb.logging.LoggingArgs; import com.palantir.atlasdb.schema.generated.SweepableCellsTable; import com.palantir.atlasdb.schema.generated.SweepableCellsTable.SweepableCellsColumnValue; import com.palantir.atlasdb.schema.generated.SweepableCellsTable.SweepableCellsRow; import com.palantir.atlasdb.schema.generated.TargetedSweepTableFactory; import com.palantir.atlasdb.sweep.CommitTsCache; import com.palantir.atlasdb.sweep.metrics.TargetedSweepMetrics; import com.palantir.atlasdb.sweep.queue.id.SweepTableIndices; import com.palantir.atlasdb.transaction.impl.TransactionConstants; import com.palantir.atlasdb.transaction.service.TransactionService; import com.palantir.logsafe.SafeArg; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SweepableCells extends SweepQueueTable { private static final Logger log = LoggerFactory.getLogger(SweepableCells.class); private final CommitTsCache commitTsCache; private final WriteReferencePersister writeReferencePersister; private static final WriteReference DUMMY = WriteReference.of( TableReference.createFromFullyQualifiedName("dum.my"), Cell.create(new byte[] {0}, new byte[] {0}), false); public SweepableCells( KeyValueService kvs, WriteInfoPartitioner partitioner, TargetedSweepMetrics metrics, TransactionService transactionService) { super(kvs, TargetedSweepTableFactory.of().getSweepableCellsTable(null).getTableRef(), partitioner, metrics); this.commitTsCache = CommitTsCache.create(transactionService); this.writeReferencePersister = new WriteReferencePersister(new SweepTableIndices(kvs)); } @Override Map<Cell, byte[]> populateReferences(PartitionInfo partitionInfo, List<WriteInfo> writes) { boolean dedicate = writes.size() > SweepQueueUtils.MAX_CELLS_GENERIC; if (dedicate) { return addReferenceToDedicatedRows(partitionInfo, writes); } else { return ImmutableMap.of(); } } @Override Map<Cell, byte[]> populateCells(PartitionInfo partitionInfo, List<WriteInfo> writes) { Map<Cell, byte[]> cells = new HashMap<>(); boolean dedicate = writes.size() > SweepQueueUtils.MAX_CELLS_GENERIC; long index = 0; for (WriteInfo write : writes) { cells.putAll(addWrite(partitionInfo, write, dedicate, index)); index++; } return cells; } private Map<Cell, byte[]> addReferenceToDedicatedRows(PartitionInfo info, List<WriteInfo> writes) { return addCell(info, DUMMY, false, 0, entryIndicatingNumberOfRequiredRows(writes)); } private long entryIndicatingNumberOfRequiredRows(List<WriteInfo> writes) { return -(1 + (writes.size() - 1) / SweepQueueUtils.MAX_CELLS_DEDICATED); } private Map<Cell, byte[]> addCell( PartitionInfo info, WriteReference writeRef, boolean isDedicatedRow, long dedicatedRowNumber, long writeIndex) { SweepableCellsRow row = computeRow(info, isDedicatedRow, dedicatedRowNumber); SweepableCellsColumnValue colVal = createColVal(info.timestamp(), writeIndex, writeRef); return ImmutableMap.of(SweepQueueUtils.toCell(row, colVal), colVal.persistValue()); } private SweepableCellsRow computeRow(PartitionInfo info, boolean isDedicatedRow, long dedicatedRowNumber) { TargetedSweepMetadata metadata = ImmutableTargetedSweepMetadata.builder() .conservative(info.isConservative().isTrue()) .dedicatedRow(isDedicatedRow) .shard(info.shard()) .dedicatedRowNumber(dedicatedRowNumber) .build(); long tsOrPartition = getTimestampOrPartition(info, isDedicatedRow); return SweepableCellsRow.of(tsOrPartition, metadata.persistToBytes()); } private SweepableCellsRow computeRow(long partitionFine, ShardAndStrategy shardStrategy) { TargetedSweepMetadata metadata = ImmutableTargetedSweepMetadata.builder() .conservative(shardStrategy.isConservative()) .dedicatedRow(false) .shard(shardStrategy.shard()) .dedicatedRowNumber(0) .build(); return SweepableCellsRow.of(partitionFine, metadata.persistToBytes()); } private long getTimestampOrPartition(PartitionInfo info, boolean isDedicatedRow) { return isDedicatedRow ? info.timestamp() : SweepQueueUtils.tsPartitionFine(info.timestamp()); } private SweepableCellsColumnValue createColVal(long ts, long index, WriteReference writeRef) { SweepableCellsTable.SweepableCellsColumn col = SweepableCellsTable.SweepableCellsColumn.of(tsMod(ts), index); return SweepableCellsColumnValue.of(col, writeReferencePersister.persist(writeRef)); } private static long tsMod(long timestamp) { return timestamp % SweepQueueUtils.TS_FINE_GRANULARITY; } SweepBatch getBatchForPartition( ShardAndStrategy shardStrategy, long partitionFine, long minTsExclusive, long sweepTs) { SweepableCellsRow row = computeRow(partitionFine, shardStrategy); RowColumnRangeIterator resultIterator = getRowColumnRange(row, partitionFine, minTsExclusive, sweepTs); PeekingIterator<Map.Entry<Cell, Value>> peekingResultIterator = Iterators.peekingIterator(resultIterator); WriteBatch writeBatch = getBatchOfWrites(row, peekingResultIterator, sweepTs); Multimap<Long, WriteInfo> writesByStartTs = writeBatch.writesByStartTs; int entriesRead = writesByStartTs.size(); maybeMetrics.ifPresent(metrics -> metrics.updateEntriesRead(shardStrategy, entriesRead)); log.debug("Read {} entries from the sweep queue.", SafeArg.of("number", entriesRead)); TimestampsToSweep tsToSweep = getTimestampsToSweepDescendingAndCleanupAborted( shardStrategy, minTsExclusive, sweepTs, writesByStartTs); Collection<WriteInfo> writes = getWritesToSweep(writesByStartTs, tsToSweep.timestampsDescending()); DedicatedRows filteredDedicatedRows = getDedicatedRowsToClear(writeBatch.dedicatedRows, tsToSweep); long lastSweptTs = getLastSweptTs(tsToSweep, peekingResultIterator, partitionFine, sweepTs); return SweepBatch.of(writes, filteredDedicatedRows, lastSweptTs, tsToSweep.processedAll(), entriesRead); } private DedicatedRows getDedicatedRowsToClear(List<SweepableCellsRow> rows, TimestampsToSweep tsToSweep) { return DedicatedRows.of(rows.stream() .filter(row -> { TargetedSweepMetadata metadata = TargetedSweepMetadata.BYTES_HYDRATOR.hydrateFromBytes(row.getMetadata()); checkState(metadata.dedicatedRow(), "Row not a dedicated row", SafeArg.of("row", row)); return tsToSweep.timestampsDescending().contains(row.getTimestampPartition()); }) .collect(Collectors.toList())); } private WriteBatch getBatchOfWrites( SweepableCellsRow row, PeekingIterator<Map.Entry<Cell, Value>> resultIterator, long sweepTs) { WriteBatch writeBatch = new WriteBatch(); while (resultIterator.hasNext() && writeBatch.writesByStartTs.size() < SweepQueueUtils.SWEEP_BATCH_SIZE) { Map.Entry<Cell, Value> entry = resultIterator.next(); SweepableCellsTable.SweepableCellsColumn col = computeColumn(entry); long startTs = getTimestamp(row, col); if (knownToBeCommittedAfterSweepTs(startTs, sweepTs)) { writeBatch.add(ImmutableList.of(getWriteInfo(startTs, entry.getValue()))); return writeBatch; } writeBatch.merge(getWrites(row, col, entry.getValue())); } // there may be entries remaining with the same start timestamp as the last processed one. If that is the case // we want to include these ones as well. This is OK since there are at most MAX_CELLS_GENERIC - 1 of them. while (resultIterator.hasNext()) { Map.Entry<Cell, Value> entry = resultIterator.peek(); SweepableCellsTable.SweepableCellsColumn col = computeColumn(entry); long timestamp = getTimestamp(row, col); if (writeBatch.writesByStartTs.containsKey(timestamp)) { writeBatch.merge(getWrites(row, col, entry.getValue())); resultIterator.next(); } else { break; } } return writeBatch; } private static final class WriteBatch { private final Multimap<Long, WriteInfo> writesByStartTs = HashMultimap.create(); private final List<SweepableCellsRow> dedicatedRows = new ArrayList<>(); WriteBatch merge(WriteBatch other) { writesByStartTs.putAll(other.writesByStartTs); dedicatedRows.addAll(other.dedicatedRows); return this; } static WriteBatch single(WriteInfo writeInfo) { WriteBatch batch = new WriteBatch(); return batch.add(ImmutableList.of(writeInfo)); } WriteBatch add(List<SweepableCellsRow> newDedicatedRows, List<WriteInfo> writeInfos) { dedicatedRows.addAll(newDedicatedRows); return add(writeInfos); } WriteBatch add(List<WriteInfo> writeInfos) { writeInfos.forEach(info -> writesByStartTs.put(info.timestamp(), info)); return this; } } private RowColumnRangeIterator getRowColumnRange( SweepableCellsRow row, long partitionFine, long minTsExclusive, long maxTsExclusive) { return getRowsColumnRange( ImmutableList.of(row.persistToBytes()), columnsBetween(minTsExclusive + 1, maxTsExclusive, partitionFine), SweepQueueUtils.BATCH_SIZE_KVS); } private TimestampsToSweep getTimestampsToSweepDescendingAndCleanupAborted( ShardAndStrategy shardStrategy, long minTsExclusive, long sweepTs, Multimap<Long, WriteInfo> writesByStartTs) { Map<Long, Long> startToCommitTs = commitTsCache.loadBatch(writesByStartTs.keySet()); Map<TableReference, Multimap<Cell, Long>> cellsToDelete = new HashMap<>(); List<Long> committedTimestamps = new ArrayList<>(); long lastSweptTs = minTsExclusive; boolean processedAll = true; List<Long> sortedStartTimestamps = startToCommitTs.keySet().stream().sorted().collect(Collectors.toList()); for (long startTs : sortedStartTimestamps) { long commitTs = startToCommitTs.get(startTs); if (commitTs == TransactionConstants.FAILED_COMMIT_TS) { lastSweptTs = startTs; writesByStartTs.get(startTs).forEach(write -> cellsToDelete .computeIfAbsent(write.tableRef(), ignore -> HashMultimap.create()) .put(write.cell(), write.timestamp())); } else if (commitTs < sweepTs) { lastSweptTs = startTs; committedTimestamps.add(startTs); } else { processedAll = false; lastSweptTs = startTs - 1; break; } } cellsToDelete.forEach((tableRef, multimap) -> { try { kvs.delete(tableRef, multimap); } catch (Exception exception) { if (tableWasDropped(tableRef)) { // this table no longer exists, but had work to do in the sweep queue still; // don't error out on this batch so that the queue cleans up and doesn't constantly retry forever log.info( "Tried to delete {} aborted writes from table {}, " + "but instead found that the table no longer exists.", SafeArg.of("number", multimap.size()), LoggingArgs.tableRef(tableRef)); } else { throw exception; } } maybeMetrics.ifPresent(metrics -> metrics.updateAbortedWritesDeleted(shardStrategy, multimap.size())); log.info( "Deleted {} aborted writes from table {}.", SafeArg.of("number", multimap.size()), LoggingArgs.tableRef(tableRef)); }); return TimestampsToSweep.of( ImmutableSortedSet.copyOf(committedTimestamps).descendingSet(), lastSweptTs, processedAll); } private boolean tableWasDropped(TableReference tableRef) { return Arrays.equals(kvs.getMetadataForTable(tableRef), AtlasDbConstants.EMPTY_TABLE_METADATA); } private Collection<WriteInfo> getWritesToSweep(Multimap<Long, WriteInfo> writesByStartTs, SortedSet<Long> startTs) { Map<CellReference, WriteInfo> writesToSweepFor = new HashMap<>(); startTs.stream() .map(writesByStartTs::get) .flatMap(Collection::stream) .forEach(write -> writesToSweepFor.putIfAbsent(write.writeRef().cellReference(), write)); return writesToSweepFor.values(); } private long getLastSweptTs( TimestampsToSweep startTsCommitted, Iterator<Map.Entry<Cell, Value>> resultIterator, long partitionFine, long maxTsExclusive) { if (startTsCommitted.processedAll() && exhaustedAllColumns(resultIterator)) { return lastGuaranteedSwept(partitionFine, maxTsExclusive); } else { return startTsCommitted.maxSwept(); } } private WriteBatch getWrites(SweepableCellsRow row, SweepableCellsTable.SweepableCellsColumn col, Value value) { if (isReferenceToDedicatedRows(col)) { return writesFromDedicated(row, col); } else { return WriteBatch.single(getWriteInfo(getTimestamp(row, col), value)); } } private boolean isReferenceToDedicatedRows(SweepableCellsTable.SweepableCellsColumn col) { return col.getWriteIndex() < 0; } private WriteBatch writesFromDedicated(SweepableCellsRow row, SweepableCellsTable.SweepableCellsColumn col) { List<SweepableCellsRow> dedicatedRows = computeDedicatedRows(row, col); RowColumnRangeIterator iterator = getWithColumnRangeAll(Lists.transform(dedicatedRows, SweepableCellsRow::persistToBytes)); WriteBatch batch = new WriteBatch(); return batch.add( dedicatedRows, Streams.stream(iterator) .map(entry -> getWriteInfo(getTimestamp(row, col), entry.getValue())) .collect(Collectors.toList())); } private List<SweepableCellsRow> computeDedicatedRows( SweepableCellsRow row, SweepableCellsTable.SweepableCellsColumn col) { TargetedSweepMetadata metadata = TargetedSweepMetadata.BYTES_HYDRATOR.hydrateFromBytes(row.getMetadata()); long timestamp = getTimestamp(row, col); int numberOfDedicatedRows = writeIndexToNumberOfDedicatedRows(col.getWriteIndex()); List<SweepableCellsRow> dedicatedRows = new ArrayList<>(); for (int i = 0; i < numberOfDedicatedRows; i++) { byte[] dedicatedMetadata = ImmutableTargetedSweepMetadata.builder() .from(metadata) .dedicatedRow(true) .dedicatedRowNumber(i) .build() .persistToBytes(); dedicatedRows.add(SweepableCellsRow.of(timestamp, dedicatedMetadata)); } return dedicatedRows; } private long getTimestamp(SweepableCellsRow row, SweepableCellsTable.SweepableCellsColumn col) { return row.getTimestampPartition() * SweepQueueUtils.TS_FINE_GRANULARITY + col.getTimestampModulus(); } private boolean knownToBeCommittedAfterSweepTs(long startTs, long sweepTs) { return commitTsCache .loadIfCached(startTs) .map(commitTs -> commitTs >= sweepTs) .orElse(false); } private int writeIndexToNumberOfDedicatedRows(long writeIndex) { return (int) -writeIndex; } private RowColumnRangeIterator getWithColumnRangeAll(Iterable<byte[]> rows) { return getRowsColumnRange(rows, SweepQueueUtils.ALL_COLUMNS, SweepQueueUtils.BATCH_SIZE_KVS); } private WriteInfo getWriteInfo(long timestamp, Value value) { return WriteInfo.of( writeReferencePersister.unpersist(SweepableCellsColumnValue.hydrateValue(value.getContents())), timestamp); } private boolean exhaustedAllColumns(Iterator<Map.Entry<Cell, Value>> resultIterator) { return !resultIterator.hasNext(); } private long lastGuaranteedSwept(long partitionFine, long maxTsExclusive) { return Math.min(SweepQueueUtils.maxTsForFinePartition(partitionFine), maxTsExclusive - 1); } void deleteDedicatedRows(DedicatedRows dedicatedRows) { List<byte[]> rows = dedicatedRows.getDedicatedRows().stream() .map(SweepableCellsRow::persistToBytes) .collect(Collectors.toList()); deleteRows(rows); } void deleteNonDedicatedRows(ShardAndStrategy shardAndStrategy, Iterable<Long> partitionsFine) { List<byte[]> rows = Streams.stream(partitionsFine) .map(partitionFine -> computeRow(partitionFine, shardAndStrategy)) .map(SweepableCellsRow::persistToBytes) .collect(Collectors.toList()); deleteRows(rows); } private Map<Cell, byte[]> addWrite(PartitionInfo info, WriteInfo write, boolean dedicate, long index) { return addCell( info, write.writeRef(), dedicate, index / SweepQueueUtils.MAX_CELLS_DEDICATED, index % SweepQueueUtils.MAX_CELLS_DEDICATED); } private SweepableCellsTable.SweepableCellsColumn computeColumn(Map.Entry<Cell, Value> entry) { return SweepableCellsTable.SweepableCellsColumn.BYTES_HYDRATOR.hydrateFromBytes( entry.getKey().getColumnName()); } private ColumnRangeSelection columnsBetween(long startTsInclusive, long endTsExclusive, long partitionFine) { long startIncl = exactColumnOrElseBeginningOfRow(startTsInclusive, partitionFine); byte[] startCol = SweepableCellsTable.SweepableCellsColumn.of(startIncl, SweepQueueUtils.MINIMUM_WRITE_INDEX) .persistToBytes(); long endExcl = exactColumnOrElseOneBeyondEndOfRow(endTsExclusive, partitionFine); byte[] endCol = SweepableCellsTable.SweepableCellsColumn.of(endExcl, SweepQueueUtils.MINIMUM_WRITE_INDEX) .persistToBytes(); return new ColumnRangeSelection(startCol, endCol); } private long exactColumnOrElseOneBeyondEndOfRow(long endTsExclusive, long partitionFine) { return Math.min( endTsExclusive - SweepQueueUtils.minTsForFinePartition(partitionFine), SweepQueueUtils.TS_FINE_GRANULARITY); } private long exactColumnOrElseBeginningOfRow(long startTsInclusive, long partitionFine) { return Math.max(startTsInclusive - SweepQueueUtils.minTsForFinePartition(partitionFine), 0); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.dht; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.ThreadLocalRandom; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.PreHashedDecoratedKey; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.PartitionerDefinedOrder; import org.apache.cassandra.db.marshal.LongType; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.MurmurHash; import org.apache.cassandra.utils.ObjectSizes; import com.google.common.primitives.Longs; /** * This class generates a BigIntegerToken using a Murmur3 hash. */ public class Murmur3Partitioner implements IPartitioner { public static final LongToken MINIMUM = new LongToken(Long.MIN_VALUE); public static final long MAXIMUM = Long.MAX_VALUE; private static final int HEAP_SIZE = (int) ObjectSizes.measureDeep(MINIMUM); public static final Murmur3Partitioner instance = new Murmur3Partitioner(); public static final AbstractType<?> partitionOrdering = new PartitionerDefinedOrder(instance); private final Splitter splitter = new Splitter(this) { public Token tokenForValue(BigInteger value) { return new LongToken(value.longValue()); } public BigInteger valueForToken(Token token) { return BigInteger.valueOf(((LongToken) token).token); } }; public DecoratedKey decorateKey(ByteBuffer key) { long[] hash = getHash(key); return new PreHashedDecoratedKey(getToken(key, hash), key, hash[0], hash[1]); } public Token midpoint(Token lToken, Token rToken) { // using BigInteger to avoid long overflow in intermediate operations BigInteger l = BigInteger.valueOf(((LongToken) lToken).token), r = BigInteger.valueOf(((LongToken) rToken).token), midpoint; if (l.compareTo(r) < 0) { BigInteger sum = l.add(r); midpoint = sum.shiftRight(1); } else // wrapping case { BigInteger max = BigInteger.valueOf(MAXIMUM); BigInteger min = BigInteger.valueOf(MINIMUM.token); // length of range we're bisecting is (R - min) + (max - L) // so we add that to L giving // L + ((R - min) + (max - L) / 2) = (L + R + max - min) / 2 midpoint = (max.subtract(min).add(l).add(r)).shiftRight(1); if (midpoint.compareTo(max) > 0) midpoint = min.add(midpoint.subtract(max)); } return new LongToken(midpoint.longValue()); } public LongToken getMinimumToken() { return MINIMUM; } public static class LongToken extends Token { static final long serialVersionUID = -5833580143318243006L; final long token; public LongToken(long token) { this.token = token; } public String toString() { return Long.toString(token); } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || this.getClass() != obj.getClass()) return false; return token == (((LongToken)obj).token); } public int hashCode() { return Longs.hashCode(token); } public int compareTo(Token o) { return Long.compare(token, ((LongToken) o).token); } @Override public IPartitioner getPartitioner() { return instance; } @Override public long getHeapSize() { return HEAP_SIZE; } @Override public Object getTokenValue() { return token; } @Override public double size(Token next) { LongToken n = (LongToken) next; long v = n.token - token; // Overflow acceptable and desired. double d = Math.scalb((double) v, -Long.SIZE); // Scale so that the full range is 1. return d > 0.0 ? d : (d + 1.0); // Adjust for signed long, also making sure t.size(t) == 1. } @Override public Token increaseSlightly() { return new LongToken(token + 1); } } /** * Generate the token of a key. * Note that we need to ensure all generated token are strictly bigger than MINIMUM. * In particular we don't want MINIMUM to correspond to any key because the range (MINIMUM, X] doesn't * include MINIMUM but we use such range to select all data whose token is smaller than X. */ public LongToken getToken(ByteBuffer key) { return getToken(key, getHash(key)); } private LongToken getToken(ByteBuffer key, long[] hash) { if (key.remaining() == 0) return MINIMUM; return new LongToken(normalize(hash[0])); } private long[] getHash(ByteBuffer key) { long[] hash = new long[2]; MurmurHash.hash3_x64_128(key, key.position(), key.remaining(), 0, hash); return hash; } public LongToken getRandomToken() { return getRandomToken(ThreadLocalRandom.current()); } public LongToken getRandomToken(Random r) { return new LongToken(normalize(r.nextLong())); } private long normalize(long v) { // We exclude the MINIMUM value; see getToken() return v == Long.MIN_VALUE ? Long.MAX_VALUE : v; } public boolean preservesOrder() { return false; } public Map<Token, Float> describeOwnership(List<Token> sortedTokens) { Map<Token, Float> ownerships = new HashMap<Token, Float>(); Iterator<Token> i = sortedTokens.iterator(); // 0-case if (!i.hasNext()) throw new RuntimeException("No nodes present in the cluster. Has this node finished starting up?"); // 1-case if (sortedTokens.size() == 1) ownerships.put(i.next(), new Float(1.0)); // n-case else { final BigInteger ri = BigInteger.valueOf(MAXIMUM).subtract(BigInteger.valueOf(MINIMUM.token + 1)); // (used for addition later) final BigDecimal r = new BigDecimal(ri); Token start = i.next();BigInteger ti = BigInteger.valueOf(((LongToken)start).token); // The first token and its value Token t; BigInteger tim1 = ti; // The last token and its value (after loop) while (i.hasNext()) { t = i.next(); ti = BigInteger.valueOf(((LongToken) t).token); // The next token and its value float age = new BigDecimal(ti.subtract(tim1).add(ri).mod(ri)).divide(r, 6, BigDecimal.ROUND_HALF_EVEN).floatValue(); // %age = ((T(i) - T(i-1) + R) % R) / R ownerships.put(t, age); // save (T(i) -> %age) tim1 = ti; // -> advance loop } // The start token's range extends backward to the last token, which is why both were saved above. float x = new BigDecimal(BigInteger.valueOf(((LongToken)start).token).subtract(ti).add(ri).mod(ri)).divide(r, 6, BigDecimal.ROUND_HALF_EVEN).floatValue(); ownerships.put(start, x); } return ownerships; } public Token.TokenFactory getTokenFactory() { return tokenFactory; } private final Token.TokenFactory tokenFactory = new Token.TokenFactory() { public ByteBuffer toByteArray(Token token) { LongToken longToken = (LongToken) token; return ByteBufferUtil.bytes(longToken.token); } public Token fromByteArray(ByteBuffer bytes) { return new LongToken(ByteBufferUtil.toLong(bytes)); } public String toString(Token token) { return token.toString(); } public void validate(String token) throws ConfigurationException { try { fromString(token); } catch (NumberFormatException e) { throw new ConfigurationException(e.getMessage()); } } public Token fromString(String string) { try { return new LongToken(Long.parseLong(string)); } catch (NumberFormatException e) { throw new IllegalArgumentException(String.format("Invalid token for Murmur3Partitioner. Got %s but expected a long value (unsigned 8 bytes integer).", string)); } } }; public AbstractType<?> getTokenValidator() { return LongType.instance; } public Token getMaximumToken() { return new LongToken(Long.MAX_VALUE); } public AbstractType<?> partitionOrdering() { return partitionOrdering; } public Optional<Splitter> splitter() { return Optional.of(splitter); } }
/* * Copyright 2009 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.codefollower.douyu.netty.channel.nio; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.spi.SelectorProvider; import java.util.Set; import java.util.Map.Entry; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.codefollower.douyu.logging.InternalLogger; import com.codefollower.douyu.logging.InternalLoggerFactory; import com.codefollower.douyu.netty.util.internal.SystemPropertyUtil; /** * Provides information which is specific to a NIO service provider * implementation. * * @author <a href="http://www.jboss.org/netty/">The Netty Project</a> * @author <a href="http://gleamynode.net/">Trustin Lee</a> * * @version $Rev$, $Date$ * */ class NioProviderMetadata { static final InternalLogger logger = InternalLoggerFactory.getInstance(NioProviderMetadata.class); private static final String CONSTRAINT_LEVEL_PROPERTY = "org.jboss.netty.channel.socket.nio.constraintLevel"; /** * 0 - no need to wake up to get / set interestOps (most cases) * 1 - no need to wake up to get interestOps, but need to wake up to set. * 2 - need to wake up to get / set interestOps (old providers) */ static final int CONSTRAINT_LEVEL; static { int constraintLevel = -1; // Use the system property if possible. constraintLevel = SystemPropertyUtil.get(CONSTRAINT_LEVEL_PROPERTY, -1); if (constraintLevel < 0 || constraintLevel > 2) { constraintLevel = -1; } if (constraintLevel >= 0) { logger.debug( "Setting the NIO constraint level to: " + constraintLevel); } if (constraintLevel < 0) { constraintLevel = detectConstraintLevelFromSystemProperties(); if (constraintLevel < 0) { constraintLevel = 2; logger.debug( "Couldn't determine the NIO constraint level from " + "the system properties; using the safest level (2)"); } else if (constraintLevel != 0) { logger.info( "Using the autodetected NIO constraint level: " + constraintLevel + " (Use better NIO provider for better performance)"); } else { logger.debug( "Using the autodetected NIO constraint level: " + constraintLevel); } } CONSTRAINT_LEVEL = constraintLevel; if (CONSTRAINT_LEVEL < 0 || CONSTRAINT_LEVEL > 2) { throw new Error( "Unexpected NIO constraint level: " + CONSTRAINT_LEVEL + ", please report this error."); } } private static int detectConstraintLevelFromSystemProperties() { String version = SystemPropertyUtil.get("java.specification.version"); String vminfo = SystemPropertyUtil.get("java.vm.info", ""); String os = SystemPropertyUtil.get("os.name"); String vendor = SystemPropertyUtil.get("java.vm.vendor"); String provider; try { provider = SelectorProvider.provider().getClass().getName(); } catch (Exception e) { // Perhaps security exception. provider = null; } if (version == null || os == null || vendor == null || provider == null) { return -1; } os = os.toLowerCase(); vendor = vendor.toLowerCase(); // System.out.println(version); // System.out.println(vminfo); // System.out.println(os); // System.out.println(vendor); // System.out.println(provider); // Sun JVM if (vendor.indexOf("sun") >= 0) { // Linux if (os.indexOf("linux") >= 0) { if (provider.equals("sun.nio.ch.EPollSelectorProvider") || provider.equals("sun.nio.ch.PollSelectorProvider")) { return 0; } // Windows } else if (os.indexOf("windows") >= 0) { if (provider.equals("sun.nio.ch.WindowsSelectorProvider")) { return 0; } // Solaris } else if (os.indexOf("sun") >= 0 || os.indexOf("solaris") >= 0) { if (provider.equals("sun.nio.ch.DevPollSelectorProvider")) { return 0; } } // Apple JVM } else if (vendor.indexOf("apple") >= 0) { // Mac OS if (os.indexOf("mac") >= 0 && os.indexOf("os") >= 0) { if (provider.equals("sun.nio.ch.KQueueSelectorProvider")) { return 0; } } // IBM } else if (vendor.indexOf("ibm") >= 0) { // Linux or AIX if (os.indexOf("linux") >= 0 || os.indexOf("aix") >= 0) { if (version.equals("1.5") || version.matches("^1\\.5\\D.*$")) { if (provider.equals("sun.nio.ch.PollSelectorProvider")) { return 1; } } else if (version.equals("1.6") || version.matches("^1\\.6\\D.*$")) { // IBM JDK 1.6 has different constraint level for different // version. The exact version can be determined only by its // build date. Pattern datePattern = Pattern.compile( "(?:^|[^0-9])(" + "[2-9][0-9]{3}" + // year "(?:0[1-9]|1[0-2])" + // month "(?:0[1-9]|[12][0-9]|3[01])" + // day of month ")(?:$|[^0-9])"); Matcher dateMatcher = datePattern.matcher(vminfo); if (dateMatcher.find()) { long dateValue = Long.parseLong(dateMatcher.group(1)); if (dateValue < 20081105L) { // SR0, 1, and 2 return 2; } else { // SR3 and later if (provider.equals("sun.nio.ch.EPollSelectorProvider")) { return 0; } else if (provider.equals("sun.nio.ch.PollSelectorProvider")) { return 1; } } } } } // BEA } else if (vendor.indexOf("bea") >= 0 || vendor.indexOf("oracle") >= 0) { // Linux if (os.indexOf("linux") >= 0) { if (provider.equals("sun.nio.ch.EPollSelectorProvider") || provider.equals("sun.nio.ch.PollSelectorProvider")) { return 0; } // Windows } else if (os.indexOf("windows") >= 0) { if (provider.equals("sun.nio.ch.WindowsSelectorProvider")) { return 0; } } // Apache Software Foundation } else if (vendor.indexOf("apache") >= 0) { if (provider.equals("org.apache.harmony.nio.internal.SelectorProviderImpl")) { return 1; } } // Others (untested) return -1; } private static final class ConstraintLevelAutodetector { ConstraintLevelAutodetector() { super(); } int autodetect() { final int constraintLevel; ExecutorService executor = Executors.newCachedThreadPool(); boolean success; long startTime; int interestOps; ServerSocketChannel ch = null; SelectorLoop loop = null; try { // Open a channel. ch = ServerSocketChannel.open(); // Configure the channel try { ch.socket().bind(new InetSocketAddress(0)); ch.configureBlocking(false); } catch (Throwable e) { logger.warn("Failed to configure a temporary socket.", e); return -1; } // Prepare the selector loop. try { loop = new SelectorLoop(); } catch (Throwable e) { logger.warn("Failed to open a temporary selector.", e); return -1; } // Register the channel try { ch.register(loop.selector, 0); } catch (Throwable e) { logger.warn("Failed to register a temporary selector.", e); return -1; } SelectionKey key = ch.keyFor(loop.selector); // Start the selector loop. executor.execute(loop); // Level 0 success = true; for (int i = 0; i < 10; i ++) { // Increase the probability of calling interestOps // while select() is running. do { while (!loop.selecting) { Thread.yield(); } // Wait a little bit more. try { Thread.sleep(50); } catch (InterruptedException e) { // Ignore } } while (!loop.selecting); startTime = System.nanoTime(); key.interestOps(key.interestOps() | SelectionKey.OP_ACCEPT); key.interestOps(key.interestOps() & ~SelectionKey.OP_ACCEPT); if (System.nanoTime() - startTime >= 500000000L) { success = false; break; } } if (success) { constraintLevel = 0; } else { // Level 1 success = true; for (int i = 0; i < 10; i ++) { // Increase the probability of calling interestOps // while select() is running. do { while (!loop.selecting) { Thread.yield(); } // Wait a little bit more. try { Thread.sleep(50); } catch (InterruptedException e) { // Ignore } } while (!loop.selecting); startTime = System.nanoTime(); interestOps = key.interestOps(); synchronized (loop) { loop.selector.wakeup(); key.interestOps(interestOps | SelectionKey.OP_ACCEPT); key.interestOps(interestOps & ~SelectionKey.OP_ACCEPT); } if (System.nanoTime() - startTime >= 500000000L) { success = false; break; } } if (success) { constraintLevel = 1; } else { constraintLevel = 2; } } } catch (Throwable e) { return -1; } finally { if (ch != null) { try { ch.close(); } catch (Throwable e) { logger.warn("Failed to close a temporary socket.", e); } } if (loop != null) { loop.done = true; try { executor.shutdownNow(); } catch (NullPointerException ex) { // Some JDK throws NPE here, but shouldn't. } try { for (;;) { loop.selector.wakeup(); try { if (executor.awaitTermination(1, TimeUnit.SECONDS)) { break; } } catch (InterruptedException e) { // Ignore } } } catch (Throwable e) { // Perhaps security exception. } try { loop.selector.close(); } catch (Throwable e) { logger.warn("Failed to close a temporary selector.", e); } } } return constraintLevel; } } private static final class SelectorLoop implements Runnable { final Selector selector; volatile boolean done; volatile boolean selecting; // Just an approximation SelectorLoop() throws IOException { selector = Selector.open(); } @Override public void run() { while (!done) { synchronized (this) { // Guard } try { selecting = true; try { selector.select(1000); } finally { selecting = false; } Set<SelectionKey> keys = selector.selectedKeys(); for (SelectionKey k: keys) { k.interestOps(0); } keys.clear(); } catch (IOException e) { logger.warn("Failed to wait for a temporary selector.", e); } } } } public static void main(String[] args) throws Exception { for (Entry<Object, Object> e: System.getProperties().entrySet()) { System.out.println(e.getKey() + ": " + e.getValue()); } System.out.println(); System.out.println("Hard-coded Constraint Level: " + CONSTRAINT_LEVEL); System.out.println( "Auto-detected Constraint Level: " + new ConstraintLevelAutodetector().autodetect()); } private NioProviderMetadata() { // Unused } }
/* * ConnectBot: simple, powerful, open-source SSH client for Android * Copyright 2007 Kenny Root, Jeffrey Sharkey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.woltage.irssiconnectbot.transport; import java.io.IOException; import java.util.List; import java.util.Map; import org.woltage.irssiconnectbot.bean.HostBean; import org.woltage.irssiconnectbot.bean.PortForwardBean; import org.woltage.irssiconnectbot.service.TerminalBridge; import org.woltage.irssiconnectbot.service.TerminalManager; import android.content.Context; import android.net.Uri; /** * @author Kenny Root * */ public abstract class AbsTransport { HostBean host; TerminalBridge bridge; TerminalManager manager; String emulation; public AbsTransport() {} public AbsTransport(HostBean host, TerminalBridge bridge, TerminalManager manager) { this.host = host; this.bridge = bridge; this.manager = manager; } /** * @return protocol part of the URI */ public static String getProtocolName() { return "unknown"; } /** * Encode the current transport into a URI that can be passed via intent calls. * @return URI to host */ public static Uri getUri(String input) { return null; } /** * Causes transport to connect to the target host. After connecting but before a * session is started, must call back to {@link TerminalBridge#onConnected()}. * After that call a session may be opened. */ public abstract void connect(); /** * Reads from the transport. Transport must support reading into a the byte array * <code>buffer</code> at the start of <code>offset</code> and a maximum of * <code>length</code> bytes. If the remote host disconnects, throw an * {@link IOException}. * @param buffer byte buffer to store read bytes into * @param offset where to start writing in the buffer * @param length maximum number of bytes to read * @return number of bytes read * @throws IOException when remote host disconnects */ public abstract int read(byte[] buffer, int offset, int length) throws IOException; /** * Writes to the transport. If the host is not yet connected, simply return without * doing anything. An {@link IOException} should be thrown if there is an error after * connection. * @param buffer bytes to write to transport * @throws IOException when there is a problem writing after connection */ public abstract void write(byte[] buffer) throws IOException; /** * Writes to the transport. See {@link #write(byte[])} for behavior details. * @param c character to write to the transport * @throws IOException when there is a problem writing after connection */ public abstract void write(int c) throws IOException; /** * Flushes the write commands to the transport. * @throws IOException when there is a problem writing after connection */ public abstract void flush() throws IOException; /** * Closes the connection to the terminal. Note that the resulting failure to read * should call {@link TerminalBridge#dispatchDisconnect(boolean)}. */ public abstract void close(); /** * Tells the transport what dimensions the display is currently * @param columns columns of text * @param rows rows of text * @param width width in pixels * @param height height in pixels */ public abstract void setDimensions(int columns, int rows, int width, int height); public void setOptions(Map<String,String> options) { // do nothing } public Map<String,String> getOptions() { return null; } public void setCompression(boolean compression) { // do nothing } public void setUseAuthAgent(String useAuthAgent) { // do nothing } public void setEmulation(String emulation) { this.emulation = emulation; } public String getEmulation() { return emulation; } public void setHost(HostBean host) { this.host = host; } public void setBridge(TerminalBridge bridge) { this.bridge = bridge; } public void setManager(TerminalManager manager) { this.manager = manager; } /** * Whether or not this transport type can forward ports. * @return true on ability to forward ports */ public boolean canForwardPorts() { return false; } /** * Adds the {@link PortForwardBean} to the list. * @param portForward the port forward bean to add * @return true on successful addition */ public boolean addPortForward(PortForwardBean portForward) { return false; } /** * Enables a port forward member. After calling this method, the port forward should * be operational iff it could be enabled by the transport. * @param portForward member of our current port forwards list to enable * @return true on successful port forward setup */ public boolean enablePortForward(PortForwardBean portForward) { return false; } /** * Disables a port forward member. After calling this method, the port forward should * be non-functioning iff it could be disabled by the transport. * @param portForward member of our current port forwards list to enable * @return true on successful port forward tear-down */ public boolean disablePortForward(PortForwardBean portForward) { return false; } /** * Removes the {@link PortForwardBean} from the available port forwards. * @param portForward the port forward bean to remove * @return true on successful removal */ public boolean removePortForward(PortForwardBean portForward) { return false; } /** * Gets a list of the {@link PortForwardBean} currently used by this transport. * @return the list of port forwards */ public List<PortForwardBean> getPortForwards() { return null; } public abstract boolean isConnected(); public abstract boolean isSessionOpen(); /** * @return int default port for protocol */ public abstract int getDefaultPort(); /** * @param username * @param hostname * @param port * @return */ public abstract String getDefaultNickname(String username, String hostname, int port); /** * @param uri * @param selectionKeys * @param selectionValues */ public abstract void getSelectionArgs(Uri uri, Map<String, String> selection); /** * @param uri * @return */ public abstract HostBean createHost(Uri uri); /** * @param context context containing the correct resources * @return string that hints at the format for connection */ public static String getFormatHint(Context context) { return "???"; } /** * @return */ public abstract boolean usesNetwork(); public abstract boolean resetOnConnectionChange(); public void onBackground() {} public void onForeground() {} public void onScreenOff() {} public void onScreenOn() {} }
/* * $Id: Form.java 1485978 2013-05-24 08:56:41Z lukaszlenart $ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts2.components; import com.opensymphony.xwork2.ObjectFactory; import com.opensymphony.xwork2.config.Configuration; import com.opensymphony.xwork2.config.RuntimeConfiguration; import com.opensymphony.xwork2.config.entities.ActionConfig; import com.opensymphony.xwork2.config.entities.InterceptorMapping; import com.opensymphony.xwork2.inject.Inject; import com.opensymphony.xwork2.interceptor.MethodFilterInterceptorUtil; import com.opensymphony.xwork2.util.ValueStack; import com.opensymphony.xwork2.validator.ActionValidatorManager; import com.opensymphony.xwork2.validator.FieldValidator; import com.opensymphony.xwork2.validator.ValidationException; import com.opensymphony.xwork2.validator.ValidationInterceptor; import com.opensymphony.xwork2.validator.Validator; import com.opensymphony.xwork2.validator.ValidatorContext; import com.opensymphony.xwork2.validator.validators.VisitorFieldValidator; import org.apache.commons.lang3.StringUtils; import org.apache.struts2.dispatcher.mapper.ActionMapping; import org.apache.struts2.views.annotations.StrutsTag; import org.apache.struts2.views.annotations.StrutsTagAttribute; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; /** * <!-- START SNIPPET: javadoc --> * <p/> * Renders HTML an input form.<p/> * <p/> * The remote form allows the form to be submitted without the page being refreshed. The results from the form * can be inserted into any HTML element on the page.<p/> * <p/> * NOTE:<p/> * The order / logic in determining the posting url of the generated HTML form is as follows:- * <ol> * <li> * If the action attribute is not specified, then the current request will be used to * determine the posting url * </li> * <li> * If the action is given, Struts will try to obtain an ActionConfig. This will be * successfull if the action attribute is a valid action alias defined struts.xml. * </li> * <li> * If the action is given and is not an action alias defined in struts.xml, Struts * will used the action attribute as if it is the posting url, separting the namespace * from it and using UrlHelper to generate the final url. * </li> * </ol> * <p/> * <!-- END SNIPPET: javadoc --> * <p/> * <p/> <b>Examples</b> * <p/> * <pre> * <!-- START SNIPPET: example --> * <p/> * &lt;s:form ... /&gt; * <p/> * <!-- END SNIPPET: example --> * </pre> * */ @StrutsTag( name="form", tldTagClass="org.apache.struts2.views.jsp.ui.FormTag", description="Renders an input form", allowDynamicAttributes=true) public class Form extends ClosingUIBean { public static final String OPEN_TEMPLATE = "form"; public static final String TEMPLATE = "form-close"; private int sequence = 0; protected String onsubmit; protected String onreset; protected String action; protected String target; protected String enctype; protected String method; protected String namespace; protected String validate; protected String portletMode; protected String windowState; protected String acceptcharset; protected boolean includeContext = true; protected String focusElement; protected Configuration configuration; protected ObjectFactory objectFactory; protected UrlRenderer urlRenderer; protected ActionValidatorManager actionValidatorManager; public Form(ValueStack stack, HttpServletRequest request, HttpServletResponse response) { super(stack, request, response); } @Override protected boolean evaluateNameValue() { return false; } @Override public String getDefaultOpenTemplate() { return OPEN_TEMPLATE; } @Override protected String getDefaultTemplate() { return TEMPLATE; } @Inject public void setConfiguration(Configuration configuration) { this.configuration = configuration; } @Inject public void setObjectFactory(ObjectFactory objectFactory) { this.objectFactory = objectFactory; } @Inject public void setUrlRenderer(UrlRenderer urlRenderer) { this.urlRenderer = urlRenderer; } @Inject public void setActionValidatorManager(ActionValidatorManager mgr) { this.actionValidatorManager = mgr; } /* * Revised for Portlet actionURL as form action, and add wwAction as hidden * field. Refer to template.simple/form.vm */ @Override protected void evaluateExtraParams() { super.evaluateExtraParams(); if (validate != null) { addParameter("validate", findValue(validate, Boolean.class)); } if (name == null) { //make the name the same as the id String id = (String) getParameters().get("id"); if (StringUtils.isNotEmpty(id)) { addParameter("name", id); } } if (onsubmit != null) { addParameter("onsubmit", findString(onsubmit)); } if (onreset != null) { addParameter("onreset", findString(onreset)); } if (target != null) { addParameter("target", findString(target)); } if (enctype != null) { addParameter("enctype", findString(enctype)); } if (method != null) { addParameter("method", findString(method)); } if (acceptcharset != null) { addParameter("acceptcharset", findString(acceptcharset)); } // keep a collection of the tag names for anything special the templates might want to do (such as pure client // side validation) if (!parameters.containsKey("tagNames")) { // we have this if check so we don't do this twice (on open and close of the template) addParameter("tagNames", new ArrayList()); } if (focusElement != null) { addParameter("focusElement", findString(focusElement)); } } /** * Form component determine the its HTML element id as follows:- * <ol> * <li>if an 'id' attribute is specified.</li> * <li>if an 'action' attribute is specified, it will be used as the id.</li> * </ol> */ @Override protected void populateComponentHtmlId(Form form) { if (id != null) { addParameter("id", escape(id)); } // if no id given, it will be tried to generate it from the action attribute // by the urlRenderer implementation urlRenderer.renderFormUrl(this); } /** * Evaluate client side JavaScript Enablement. * @param actionName the actioName to check for * @param namespace the namespace to check for * @param actionMethod the method to ckeck for */ protected void evaluateClientSideJsEnablement(String actionName, String namespace, String actionMethod) { // Only evaluate if Client-Side js is to be enable when validate=true Boolean validate = (Boolean) getParameters().get("validate"); if (validate != null && validate) { addParameter("performValidation", Boolean.FALSE); RuntimeConfiguration runtimeConfiguration = configuration.getRuntimeConfiguration(); ActionConfig actionConfig = runtimeConfiguration.getActionConfig(namespace, actionName); if (actionConfig != null) { List<InterceptorMapping> interceptors = actionConfig.getInterceptors(); for (InterceptorMapping interceptorMapping : interceptors) { if (ValidationInterceptor.class.isInstance(interceptorMapping.getInterceptor())) { ValidationInterceptor validationInterceptor = (ValidationInterceptor) interceptorMapping.getInterceptor(); Set excludeMethods = validationInterceptor.getExcludeMethodsSet(); Set includeMethods = validationInterceptor.getIncludeMethodsSet(); if (MethodFilterInterceptorUtil.applyMethod(excludeMethods, includeMethods, actionMethod)) { addParameter("performValidation", Boolean.TRUE); } return; } } } } } public List getValidators(String name) { Class actionClass = (Class) getParameters().get("actionClass"); if (actionClass == null) { return Collections.EMPTY_LIST; } String formActionValue = findString(action); ActionMapping mapping = actionMapper.getMappingFromActionName(formActionValue); String actionName = mapping.getName(); String methodName = mapping.getMethod(); List<Validator> actionValidators = actionValidatorManager.getValidators(actionClass, actionName, methodName); List<Validator> validators = new ArrayList<Validator>(); findFieldValidators(name, actionClass, actionName, actionValidators, validators, ""); return validators; } private void findFieldValidators(String name, Class actionClass, String actionName, List<Validator> validatorList, List<Validator> retultValidators, String prefix) { for (Validator validator : validatorList) { if (validator instanceof FieldValidator) { FieldValidator fieldValidator = (FieldValidator) validator; if (validator instanceof VisitorFieldValidator) { VisitorFieldValidator vfValidator = (VisitorFieldValidator) fieldValidator; Class clazz = getVisitorReturnType(actionClass, vfValidator.getFieldName()); if (clazz == null) { continue; } List<Validator> visitorValidators = actionValidatorManager.getValidators(clazz, actionName); String vPrefix = prefix + (vfValidator.isAppendPrefix() ? vfValidator.getFieldName() + "." : ""); findFieldValidators(name, clazz, actionName, visitorValidators, retultValidators, vPrefix); } else if ((prefix + fieldValidator.getFieldName()).equals(name)) { if (StringUtils.isNotBlank(prefix)) { //fixing field name for js side FieldVisitorValidatorWrapper wrap = new FieldVisitorValidatorWrapper(fieldValidator, prefix); retultValidators.add(wrap); } else { retultValidators.add(fieldValidator); } } } } } /** * Wrap field validator, add visitor's field prefix to the field name. * Javascript side is not aware of the visitor validators * and does not know how to prefix the fields. */ /* * Class is public because Freemarker has problems accessing properties. */ public static class FieldVisitorValidatorWrapper implements FieldValidator { private FieldValidator fieldValidator; private String namePrefix; public FieldVisitorValidatorWrapper(FieldValidator fv, String namePrefix) { this.fieldValidator = fv; this.namePrefix = namePrefix; } public String getValidatorType() { return "field-visitor"; } public String getFieldName() { return namePrefix + fieldValidator.getFieldName(); } public FieldValidator getFieldValidator() { return fieldValidator; } public void setFieldValidator(FieldValidator fieldValidator) { this.fieldValidator = fieldValidator; } public String getDefaultMessage() { return fieldValidator.getDefaultMessage(); } public String getMessage(Object object) { return fieldValidator.getMessage(object); } public String getMessageKey() { return fieldValidator.getMessageKey(); } public String[] getMessageParameters() { return fieldValidator.getMessageParameters(); } public ValidatorContext getValidatorContext() { return fieldValidator.getValidatorContext(); } public void setDefaultMessage(String message) { fieldValidator.setDefaultMessage(message); } public void setFieldName(String fieldName) { fieldValidator.setFieldName(fieldName); } public void setMessageKey(String key) { fieldValidator.setMessageKey(key); } public void setMessageParameters(String[] messageParameters) { fieldValidator.setMessageParameters(messageParameters); } public void setValidatorContext(ValidatorContext validatorContext) { fieldValidator.setValidatorContext(validatorContext); } public void setValidatorType(String type) { fieldValidator.setValidatorType(type); } public void setValueStack(ValueStack stack) { fieldValidator.setValueStack(stack); } public void validate(Object object) throws ValidationException { fieldValidator.validate(object); } public String getNamePrefix() { return namePrefix; } public void setNamePrefix(String namePrefix) { this.namePrefix = namePrefix; } } /** * Return type of visited object. * @param actionClass * @param visitorFieldName * @return */ @SuppressWarnings("unchecked") protected Class getVisitorReturnType(Class actionClass, String visitorFieldName) { if (visitorFieldName == null) { return null; } String methodName = "get" + org.apache.commons.lang.StringUtils.capitalize(visitorFieldName); try { Method method = actionClass.getMethod(methodName, new Class[0]); return method.getReturnType(); } catch (NoSuchMethodException e) { return null; } } /** * Get a incrementing sequence unique to this <code>Form</code> component. * It is used by <code>Form</code> component's child that might need a * sequence to make them unique. * * @return int */ protected int getSequence() { return sequence++; } @StrutsTagAttribute(description="HTML onsubmit attribute") public void setOnsubmit(String onsubmit) { this.onsubmit = onsubmit; } @StrutsTagAttribute(description="HTML onreset attribute") public void setOnreset(String onreset) { this.onreset = onreset; } @StrutsTagAttribute(description="Set action name to submit to, without .action suffix", defaultValue="current action") public void setAction(String action) { this.action = action; } @StrutsTagAttribute(description="HTML form target attribute") public void setTarget(String target) { this.target = target; } @StrutsTagAttribute(description="HTML form enctype attribute") public void setEnctype(String enctype) { this.enctype = enctype; } @StrutsTagAttribute(description="HTML form method attribute") public void setMethod(String method) { this.method = method; } @StrutsTagAttribute(description="Namespace for action to submit to", defaultValue="current namespace") public void setNamespace(String namespace) { this.namespace = namespace; } @StrutsTagAttribute(description="Whether client side/remote validation should be performed. Only" + " useful with theme xhtml/ajax", type="Boolean", defaultValue="false") public void setValidate(String validate) { this.validate = validate; } @StrutsTagAttribute(description="The portlet mode to display after the form submit") public void setPortletMode(String portletMode) { this.portletMode = portletMode; } @StrutsTagAttribute(description="The window state to display after the form submit") public void setWindowState(String windowState) { this.windowState = windowState; } @StrutsTagAttribute(description="The accepted charsets for this form. The values may be comma or blank delimited.") public void setAcceptcharset(String acceptcharset) { this.acceptcharset = acceptcharset; } @StrutsTagAttribute(description="Id of element that will receive the focus when page loads.") public void setFocusElement(String focusElement) { this.focusElement = focusElement; } @StrutsTagAttribute(description="Whether actual context should be included in URL", type="Boolean", defaultValue="true") public void setIncludeContext(boolean includeContext) { this.includeContext = includeContext; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.editor.impl; import com.intellij.injected.editor.DocumentWindow; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityStateListener; import com.intellij.openapi.application.impl.LaterInvocator; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.EditorKind; import com.intellij.openapi.editor.actionSystem.ActionPlan; import com.intellij.openapi.editor.actionSystem.TypedActionHandler; import com.intellij.openapi.editor.actionSystem.TypedActionHandlerEx; import com.intellij.openapi.editor.colors.EditorColorsListener; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.EditorEventMulticaster; import com.intellij.openapi.editor.event.EditorFactoryEvent; import com.intellij.openapi.editor.event.EditorFactoryListener; import com.intellij.openapi.editor.ex.DocumentEx; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.highlighter.EditorHighlighterFactory; import com.intellij.openapi.editor.impl.event.EditorEventMulticasterImpl; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectManagerListener; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.EventDispatcher; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.text.CharArrayCharSequence; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; public class EditorFactoryImpl extends EditorFactory { private static final ExtensionPointName<EditorFactoryListener> EP = new ExtensionPointName<>("com.intellij.editorFactoryListener"); private static final Logger LOG = Logger.getInstance(EditorFactoryImpl.class); private final EditorEventMulticasterImpl myEditorEventMulticaster = new EditorEventMulticasterImpl(); private final EventDispatcher<EditorFactoryListener> myEditorFactoryEventDispatcher = EventDispatcher.create(EditorFactoryListener.class); private final List<Editor> myEditors = ContainerUtil.createLockFreeCopyOnWriteList(); public EditorFactoryImpl() { MessageBusConnection busConnection = ApplicationManager.getApplication().getMessageBus().connect(); busConnection.subscribe(ProjectManager.TOPIC, new ProjectManagerListener() { @Override public void projectClosed(@NotNull Project project) { // validate all editors are disposed after fireProjectClosed() was called, because it's the place where editor should be released Disposer.register(project, () -> { Project[] openProjects = ProjectManager.getInstance().getOpenProjects(); boolean isLastProjectClosed = openProjects.length == 0; // EditorTextField.releaseEditorLater defer releasing its editor; invokeLater to avoid false positives about such editors. ApplicationManager.getApplication().invokeLater(() -> validateEditorsAreReleased(project, isLastProjectClosed)); }); } }); busConnection.subscribe(EditorColorsManager.TOPIC, new EditorColorsListener() { @Override public void globalSchemeChange(@Nullable EditorColorsScheme scheme) { refreshAllEditors(); } }); LaterInvocator.addModalityStateListener(new ModalityStateListener() { @Override public void beforeModalityStateChanged(boolean entering) { for (Editor editor : myEditors) { ((EditorImpl)editor).beforeModalityStateChanged(); } } }, ApplicationManager.getApplication()); } public void validateEditorsAreReleased(@NotNull Project project, boolean isLastProjectClosed) { for (Editor editor : myEditors) { if (editor.getProject() == project || (editor.getProject() == null && isLastProjectClosed)) { try { throwNotReleasedError(editor); } finally { releaseEditor(editor); } } } } @NonNls public static void throwNotReleasedError(@NotNull Editor editor) { if (editor instanceof EditorImpl) { ((EditorImpl)editor).throwEditorNotDisposedError("Editor of " + editor.getClass() + " hasn't been released:"); } else { throw new RuntimeException("Editor of " + editor.getClass() + " and the following text hasn't been released:\n" + editor.getDocument().getText()); } } @Override @NotNull public Document createDocument(char @NotNull [] text) { return createDocument(new CharArrayCharSequence(text)); } @Override @NotNull public Document createDocument(@NotNull CharSequence text) { DocumentEx document = new DocumentImpl(text); myEditorEventMulticaster.registerDocument(document); return document; } @NotNull public Document createDocument(boolean allowUpdatesWithoutWriteAction) { DocumentEx document = new DocumentImpl("", allowUpdatesWithoutWriteAction); myEditorEventMulticaster.registerDocument(document); return document; } @NotNull public Document createDocument(@NotNull CharSequence text, boolean acceptsSlashR, boolean allowUpdatesWithoutWriteAction) { DocumentEx document = new DocumentImpl(text, acceptsSlashR, allowUpdatesWithoutWriteAction); myEditorEventMulticaster.registerDocument(document); return document; } @Override public void refreshAllEditors() { for (Editor editor : myEditors) { ((EditorEx)editor).reinitSettings(); } } @Override public Editor createEditor(@NotNull Document document) { return createEditor(document, false, null, EditorKind.UNTYPED); } @Override public Editor createViewer(@NotNull Document document) { return createEditor(document, true, null, EditorKind.UNTYPED); } @Override public Editor createEditor(@NotNull Document document, Project project) { return createEditor(document, false, project, EditorKind.UNTYPED); } @Override public Editor createEditor(@NotNull Document document, @Nullable Project project, @NotNull EditorKind kind) { return createEditor(document, false, project, kind); } @Override public Editor createViewer(@NotNull Document document, Project project) { return createEditor(document, true, project, EditorKind.UNTYPED); } @Override public Editor createViewer(@NotNull Document document, @Nullable Project project, @NotNull EditorKind kind) { return createEditor(document, true, project, kind); } @Override public Editor createEditor(@NotNull final Document document, final Project project, @NotNull final FileType fileType, final boolean isViewer) { Editor editor = createEditor(document, isViewer, project, EditorKind.UNTYPED); ((EditorEx)editor).setHighlighter(EditorHighlighterFactory.getInstance().createEditorHighlighter(project, fileType)); return editor; } @Override public Editor createEditor(@NotNull Document document, Project project, @NotNull VirtualFile file, boolean isViewer) { Editor editor = createEditor(document, isViewer, project, EditorKind.UNTYPED); ((EditorEx)editor).setHighlighter(EditorHighlighterFactory.getInstance().createEditorHighlighter(project, file)); return editor; } @Override public Editor createEditor(@NotNull Document document, Project project, @NotNull VirtualFile file, boolean isViewer, @NotNull EditorKind kind) { Editor editor = createEditor(document, isViewer, project, kind); ((EditorEx)editor).setHighlighter(EditorHighlighterFactory.getInstance().createEditorHighlighter(project, file)); return editor; } private Editor createEditor(@NotNull Document document, boolean isViewer, Project project, @NotNull EditorKind kind) { Document hostDocument = document instanceof DocumentWindow ? ((DocumentWindow)document).getDelegate() : document; EditorImpl editor = new EditorImpl(hostDocument, isViewer, project, kind); myEditors.add(editor); myEditorEventMulticaster.registerEditor(editor); EditorFactoryEvent event = new EditorFactoryEvent(this, editor); myEditorFactoryEventDispatcher.getMulticaster().editorCreated(event); EP.forEachExtensionSafe(it -> it.editorCreated(event)); if (LOG.isDebugEnabled()) { LOG.debug("number of Editors after create: " + myEditors.size()); } return editor; } @Override public void releaseEditor(@NotNull Editor editor) { try { EditorFactoryEvent event = new EditorFactoryEvent(this, editor); myEditorFactoryEventDispatcher.getMulticaster().editorReleased(event); EP.forEachExtensionSafe(it -> it.editorReleased(event)); } finally { try { ((EditorImpl)editor).release(); } finally { myEditors.remove(editor); if (LOG.isDebugEnabled()) { LOG.debug("number of Editors after release: " + myEditors.size()); } } } } @Override public Editor @NotNull [] getEditors(@NotNull Document document, Project project) { List<Editor> list = null; for (Editor editor : myEditors) { if (editor.getDocument().equals(document) && (project == null || project.equals(editor.getProject()))) { if (list == null) list = new SmartList<>(); list.add(editor); } } return list == null ? Editor.EMPTY_ARRAY : list.toArray(Editor.EMPTY_ARRAY); } @Override public Editor @NotNull [] getAllEditors() { return myEditors.toArray(Editor.EMPTY_ARRAY); } @Override @Deprecated public void addEditorFactoryListener(@NotNull EditorFactoryListener listener) { myEditorFactoryEventDispatcher.addListener(listener); } @Override public void addEditorFactoryListener(@NotNull EditorFactoryListener listener, @NotNull Disposable parentDisposable) { myEditorFactoryEventDispatcher.addListener(listener,parentDisposable); } @Override @Deprecated public void removeEditorFactoryListener(@NotNull EditorFactoryListener listener) { myEditorFactoryEventDispatcher.removeListener(listener); } @Override @NotNull public EditorEventMulticaster getEventMulticaster() { return myEditorEventMulticaster; } public static final class MyRawTypedHandler implements TypedActionHandlerEx { private final TypedActionHandler myDelegate; @SuppressWarnings("NonDefaultConstructor") public MyRawTypedHandler(TypedActionHandler delegate) { myDelegate = delegate; } @Override public void execute(@NotNull Editor editor, char charTyped, @NotNull DataContext dataContext) { editor.putUserData(EditorImpl.DISABLE_CARET_SHIFT_ON_WHITESPACE_INSERTION, Boolean.TRUE); try { myDelegate.execute(editor, charTyped, dataContext); } finally { editor.putUserData(EditorImpl.DISABLE_CARET_SHIFT_ON_WHITESPACE_INSERTION, null); } } @Override public void beforeExecute(@NotNull Editor editor, char c, @NotNull DataContext context, @NotNull ActionPlan plan) { if (myDelegate instanceof TypedActionHandlerEx) { ((TypedActionHandlerEx)myDelegate).beforeExecute(editor, c, context, plan); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.node.internal; import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ElasticsearchTestCase; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; public class InternalSettingsPreparerTests extends ElasticsearchTestCase { @Before public void setupSystemProperties() { System.setProperty("es.node.zone", "foo"); System.setProperty("name", "sys-prop-name"); } @After public void cleanupSystemProperties() { System.clearProperty("es.node.zone"); System.clearProperty("name"); } @Test public void testIgnoreSystemProperties() { Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(settingsBuilder().put("node.zone", "bar").build(), true); // Should use setting from the system property assertThat(tuple.v1().get("node.zone"), equalTo("foo")); Settings settings = settingsBuilder() .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) .put("node.zone", "bar") .build(); tuple = InternalSettingsPreparer.prepareSettings(settings, true); // Should use setting from the system property assertThat(tuple.v1().get("node.zone"), equalTo("bar")); } @Test public void testAlternateConfigFileSuffixes() { // test that we can read config files with .yaml, .json, and .properties suffixes Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(settingsBuilder() .put("config.ignore_system_properties", true) .build(), true); assertThat(tuple.v1().get("yaml.config.exists"), equalTo("true")); assertThat(tuple.v1().get("json.config.exists"), equalTo("true")); assertThat(tuple.v1().get("properties.config.exists"), equalTo("true")); } @Test public void testReplacePromptPlaceholders() { final List<String> replacedSecretProperties = new ArrayList<>(); final List<String> replacedTextProperties = new ArrayList<>(); final Terminal terminal = new CliToolTestCase.MockTerminal() { @Override public char[] readSecret(String message, Object... args) { for (Object arg : args) { replacedSecretProperties.add((String) arg); } return "replaced".toCharArray(); } @Override public String readText(String message, Object... args) { for (Object arg : args) { replacedTextProperties.add((String) arg); } return "text"; } }; Settings.Builder builder = settingsBuilder() .put("password.replace", InternalSettingsPreparer.SECRET_PROMPT_VALUE) .put("dont.replace", "prompt:secret") .put("dont.replace2", "_prompt:secret_") .put("dont.replace3", "_prompt:text__") .put("dont.replace4", "__prompt:text_") .put("dont.replace5", "prompt:secret__") .put("replace_me", InternalSettingsPreparer.TEXT_PROMPT_VALUE); Settings settings = builder.build(); settings = InternalSettingsPreparer.replacePromptPlaceholders(settings, terminal); assertThat(replacedSecretProperties.size(), is(1)); assertThat(replacedTextProperties.size(), is(1)); assertThat(settings.get("password.replace"), equalTo("replaced")); assertThat(settings.get("replace_me"), equalTo("text")); // verify other values unchanged assertThat(settings.get("dont.replace"), equalTo("prompt:secret")); assertThat(settings.get("dont.replace2"), equalTo("_prompt:secret_")); assertThat(settings.get("dont.replace3"), equalTo("_prompt:text__")); assertThat(settings.get("dont.replace4"), equalTo("__prompt:text_")); assertThat(settings.get("dont.replace5"), equalTo("prompt:secret__")); } @Test public void testReplaceSecretPromptPlaceholderWithNullTerminal() { Settings.Builder builder = settingsBuilder() .put("replace_me1", InternalSettingsPreparer.SECRET_PROMPT_VALUE); try { InternalSettingsPreparer.replacePromptPlaceholders(builder.build(), null); fail("an exception should have been thrown since no terminal was provided!"); } catch (UnsupportedOperationException e) { assertThat(e.getMessage(), containsString("with value [" + InternalSettingsPreparer.SECRET_PROMPT_VALUE + "]")); } } @Test public void testReplaceTextPromptPlaceholderWithNullTerminal() { Settings.Builder builder = settingsBuilder() .put("replace_me1", InternalSettingsPreparer.TEXT_PROMPT_VALUE); try { InternalSettingsPreparer.replacePromptPlaceholders(builder.build(), null); fail("an exception should have been thrown since no terminal was provided!"); } catch (UnsupportedOperationException e) { assertThat(e.getMessage(), containsString("with value [" + InternalSettingsPreparer.TEXT_PROMPT_VALUE + "]")); } } @Test public void testNameSettingsPreference() { // Test system property overrides node.name Settings settings = settingsBuilder() .put("node.name", "node-name") .put("path.home", newTempDir()) .build(); Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(settings, true); assertThat(tuple.v1().get("name"), equalTo("sys-prop-name")); // test name in settings overrides sys prop and node.name settings = settingsBuilder() .put("name", "name-in-settings") .put("node.name", "node-name") .put("path.home", newTempDir()) .build(); tuple = InternalSettingsPreparer.prepareSettings(settings, true); assertThat(tuple.v1().get("name"), equalTo("name-in-settings")); // test only node.name in settings System.clearProperty("name"); settings = settingsBuilder() .put("node.name", "node-name") .put("path.home", newTempDir()) .build(); tuple = InternalSettingsPreparer.prepareSettings(settings, true); assertThat(tuple.v1().get("name"), equalTo("node-name")); // test no name at all results in name being set settings = settingsBuilder() .put("path.home", newTempDir()) .build(); tuple = InternalSettingsPreparer.prepareSettings(settings, true); assertThat(tuple.v1().get("name"), not("name-in-settings")); assertThat(tuple.v1().get("name"), not("sys-prop-name")); assertThat(tuple.v1().get("name"), not("node-name")); assertThat(tuple.v1().get("name"), notNullValue()); } @Test public void testPromptForNodeNameOnlyPromptsOnce() { final AtomicInteger counter = new AtomicInteger(); final Terminal terminal = new CliToolTestCase.MockTerminal() { @Override public char[] readSecret(String message, Object... args) { fail("readSecret should never be called by this test"); return null; } @Override public String readText(String message, Object... args) { int count = counter.getAndIncrement(); return "prompted name " + count; } }; System.clearProperty("name"); Settings settings = ImmutableSettings.builder() .put("path.home", newTempDir()) .put("node.name", InternalSettingsPreparer.TEXT_PROMPT_VALUE) .build(); Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(settings, false, terminal); settings = tuple.v1(); assertThat(counter.intValue(), is(1)); assertThat(settings.get("name"), is("prompted name 0")); assertThat(settings.get("node.name"), is("prompted name 0")); } }
package de.silveryard.basesystem.sdk.sound; import de.silveryard.basesystem.sdk.kernel.KernelException; import de.silveryard.basesystem.sdk.kernel.ReturnCode; import de.silveryard.basesystem.sdk.kernel.Wrapper; import de.silveryard.basesystem.sdk.kernel.sound.FmodOutputType; import de.silveryard.basesystem.sdk.kernel.sound.FmodResult; import de.silveryard.basesystem.sdk.kernel.sound.FmodSpeakerMode; import de.silveryard.basesystem.sdk.kernel.sound.SoundReturnCode; import static de.silveryard.basesystem.sdk.kernel.sound.FmodSystem.*; /** * Created by Sebif on 15.04.2017. */ public abstract class FmodSystem { private static final Wrapper<ReturnCode> returnCodeWrapper = new Wrapper<>(); private static final Wrapper<SoundReturnCode> soundReturnCodeWrapper = new Wrapper<>(); private static final Wrapper<FmodResult> fmodResultWrapper = new Wrapper<>(); /** * FMOD::System::setOutput * @param outputType * @return */ public static synchronized FmodResult setOutput(FmodOutputType outputType){ systemCallSoundFmodSystemSetOutput(outputType, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::getOutput * @param outputType * @return */ public static synchronized FmodResult getOutput(Wrapper<FmodOutputType> outputType){ systemCallSoundFmodSystemGetOutput(returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, outputType); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::getNumDrivers * @param numDrivers * @return */ public static synchronized FmodResult getNumDrivers(Wrapper<Integer> numDrivers){ systemCallSoundFmodSystemGetNumDrivers(returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, numDrivers); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::getDriverInfo * @param driver * @param name * @param guid * @param systemRate * @param speakerMode * @param speakerModeChannels * @return */ public static synchronized FmodResult getDriverInfo( int driver, Wrapper<String> name, Wrapper<String> guid, Wrapper<Integer> systemRate, Wrapper<FmodSpeakerMode> speakerMode, Wrapper<Integer> speakerModeChannels ){ systemCallSoundFmodSystemGetDriverInfo(driver, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, name, guid, systemRate, speakerMode, speakerModeChannels); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::setDriver * @param driver * @return */ public static synchronized FmodResult setDriver(int driver){ systemCallSoundFmodSystemSetDriver(driver, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::getDriver * @param driver * @return */ public static synchronized FmodResult getDriver(Wrapper<Integer> driver){ systemCallSoundFmodSystemGetDriver(returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, driver); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::createSound * @param nameOrData * @param mode * @param exInfo * @param sound * @return */ public static synchronized FmodResult createSound( String nameOrData, int mode, FmodCreateSoundExInfo exInfo, FmodSound sound ){ systemCallSoundFmodSystemCreateSound(nameOrData, mode, exInfo == null ? -1 : exInfo.getId(), sound.getId(), returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::createStream * @param nameOrData * @param mode * @param exInfo * @param sound * @return */ public static synchronized FmodResult createStream( String nameOrData, int mode, FmodCreateSoundExInfo exInfo, FmodSound sound ){ systemCallSoundFmodSystemCreateStream(nameOrData, mode, exInfo == null ? - 1 : exInfo.getId(), sound.getId(), returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::playSound * @param sound * @param paused * @param channel * @return */ public static synchronized FmodResult playSound(FmodSound sound, boolean paused, FmodChannel channel){ systemCallSoundFmodSystemPlaySound(sound.getId(), paused, channel.getId(), returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::getRecordNumDrivers * @param numDrivers * @param numConnected * @return */ public static synchronized FmodResult getRecordNumDrivers(Wrapper<Integer> numDrivers, Wrapper<Integer> numConnected){ systemCallSoundFmodSystemGetRecordNumDrivers(returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, numDrivers, numConnected); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::getRecordDriverInfo * @param driver * @param name * @param guid * @param systemRate * @param speakerMode * @param speakerModeChannels * @param state * @return */ public static synchronized FmodResult getRecordDriverInfo( int driver, Wrapper<String> name, Wrapper<String> guid, Wrapper<Integer> systemRate, Wrapper<FmodSpeakerMode> speakerMode, Wrapper<Integer> speakerModeChannels, Wrapper<Integer> state ){ systemCallSoundFmodSystemGetRecordDriverInfo(driver, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, name, guid, systemRate, speakerMode, speakerModeChannels, state); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::getRecordPosition * @param driver * @param position * @return */ public static synchronized FmodResult getRecordPosition(int driver, Wrapper<Integer> position){ systemCallSoundFmodSystemGetRecordPosition(driver, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, position); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::recordStart * @param driver * @param sound * @param loop * @return */ public static synchronized FmodResult recordStart(int driver, FmodSound sound, boolean loop){ systemCallSoundFmodSystemRecordStart(driver, sound.getId(), loop, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::recordStop * @param driver * @return */ public static synchronized FmodResult recordStop(int driver){ systemCallSoundFmodSystemRecordStop(driver, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } /** * FMOD::System::isRecording * @param driver * @param isRecording * @return */ public static synchronized FmodResult isRecording(int driver, Wrapper<Boolean> isRecording){ systemCallSoundFmodSystemIsRecording(driver, returnCodeWrapper, soundReturnCodeWrapper, fmodResultWrapper, isRecording); if(soundReturnCodeWrapper.value != SoundReturnCode.OK){ throw new SoundKernelException(soundReturnCodeWrapper.value); } if(returnCodeWrapper.value != ReturnCode.OK){ throw new KernelException(returnCodeWrapper.value); } return fmodResultWrapper.value; } }
package org.a5calls.android.a5calls.net; import android.content.Context; import android.util.Log; import com.android.volley.AuthFailureError; import com.android.volley.Request; import com.android.volley.RequestQueue; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.JsonObjectRequest; import com.android.volley.toolbox.StringRequest; import com.android.volley.toolbox.Volley; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; import org.a5calls.android.a5calls.BuildConfig; import org.a5calls.android.a5calls.model.Issue; import org.a5calls.android.a5calls.model.Outcome; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.lang.reflect.Type; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Class to handle server gets and posts. */ public class FiveCallsApi { private static final String TAG = "FiveCallsApi"; // Set TESTING "true" to set a parameter to the count call request which marks it as a test // request on the server. This will only work on debug builds. private static final boolean TESTING = true; private static final String GET_ISSUES_REQUEST = "https://5calls.org/issues/?address="; private static final String GET_REPORT = "https://5calls.org/report"; public interface CallRequestListener { void onRequestError(); void onJsonError(); void onCallCount(int count); void onCallReported(); } public interface IssuesRequestListener { void onRequestError(); void onJsonError(); void onAddressError(); void onIssuesReceived(String locationName, boolean splitDistrict, List<Issue> issues); } private RequestQueue mRequestQueue; private Gson mGson; private List<CallRequestListener> mCallRequestListeners = new ArrayList<>(); private List<IssuesRequestListener> mIssuesRequestListeners = new ArrayList<>(); public FiveCallsApi(Context context) { // TODO: Using OkHttpClient and OkHttpStack cause failures on multiple types of Samsung // Galaxy devices. //mRequestQueue = Volley.newRequestQueue(context, new OkHttpStack(new OkHttpClient())); mRequestQueue = Volley.newRequestQueue(context); mGson = new GsonBuilder() .serializeNulls() .registerTypeAdapter(Outcome.Status.class, new OutcomeStatusTypeAdapter()) .create(); } public void registerCallRequestListener(CallRequestListener callRequestListener) { mCallRequestListeners.add(callRequestListener); } public void unregisterCallRequestListener(CallRequestListener callRequestListener) { if (mCallRequestListeners.contains(callRequestListener)) { mCallRequestListeners.remove(callRequestListener); } } public void registerIssuesRequestListener(IssuesRequestListener issuesRequestListener) { mIssuesRequestListeners.add(issuesRequestListener); } public void unregisterIssuesRequestListener(IssuesRequestListener issuesRequestListener) { if (mIssuesRequestListeners.contains(issuesRequestListener)) { mIssuesRequestListeners.remove(issuesRequestListener); } } public void onDestroy() { mRequestQueue.cancelAll(TAG); mRequestQueue.stop(); mRequestQueue = null; } public void getIssuesForLocation(String address) { String url = GET_ISSUES_REQUEST + URLEncoder.encode(address) + "&all=true"; buildIssuesRequest(url, mIssuesRequestListeners); } public void getInactiveIssuesForLocation(String address, IssuesRequestListener listener) { String url = GET_ISSUES_REQUEST + URLEncoder.encode(address) + "&inactive=true"; List<IssuesRequestListener> list = Collections.singletonList(listener); buildIssuesRequest(url, list); } private void buildIssuesRequest(String url, final List<IssuesRequestListener> listeners) { // Request a JSON Object response from the provided URL. JsonObjectRequest statusRequest = new JsonObjectRequest( Request.Method.GET, url, null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { if (response != null) { String locationName = ""; boolean splitDistrict = false; try { if (response.getBoolean("invalidAddress")) { for (IssuesRequestListener listener : listeners) { listener.onAddressError(); } return; } locationName = response.getString("normalizedLocation"); splitDistrict = response.getBoolean("splitDistrict"); } catch (JSONException e) { e.printStackTrace(); } JSONArray jsonArray = response.optJSONArray("issues"); if (jsonArray == null) { for (IssuesRequestListener listener : listeners) { listener.onJsonError(); } return; } Type listType = new TypeToken<ArrayList<Issue>>(){}.getType(); List<Issue> issues = mGson.fromJson(jsonArray.toString(), listType); issues = Outcome.filterSkipOutcomes(issues); // TODO: Sanitize contact IDs here for (IssuesRequestListener listener : listeners) { listener.onIssuesReceived(locationName, splitDistrict, issues); } } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { for (IssuesRequestListener listener : listeners) { listener.onRequestError(); } } }); statusRequest.setTag(TAG); // Add the request to the RequestQueue. mRequestQueue.add(statusRequest); } public void getCallCount() { String getReport = GET_REPORT; JsonObjectRequest reportRequest = new JsonObjectRequest( Request.Method.GET, getReport, null, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { try { int count = response.getInt("count"); for (CallRequestListener listener : mCallRequestListeners) { listener.onCallCount(count); } } catch (JSONException e) { for (CallRequestListener listener : mCallRequestListeners) { listener.onJsonError(); } e.printStackTrace(); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { onRequestError(error); } }); reportRequest.setTag(TAG); // TODO: same tag OK? // Add the request to the RequestQueue. mRequestQueue.add(reportRequest); } // Result is "VOICEMAIL", "unavailable", or "contacted" // https://github.com/5calls/5calls/blob/master/static/js/main.js#L221 public void reportCall(final String issueId, final String contactId, final String result, final String zip) { String getReport = GET_REPORT; StringRequest request = new StringRequest(Request.Method.POST, getReport, new Response.Listener<String>() { @Override public void onResponse(String response) { for (CallRequestListener listener : mCallRequestListeners) { listener.onCallReported(); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { onRequestError(error); } }) { @Override protected Map<String, String> getParams() { Map<String, String> params = new HashMap<String, String>(); params.put("issueid", issueId); params.put("result", result); params.put("contactid", contactId); params.put("location", zip); params.put("via", (BuildConfig.DEBUG && TESTING) ? "test" : "android"); return params; } @Override public Map<String, String> getHeaders() throws AuthFailureError { Map<String, String> params = new HashMap<>(); params.put("Content-Type", "application/x-www-form-urlencoded"); return params; } }; request.setTag(TAG); // Add the request to the RequestQueue. mRequestQueue.add(request); } private void onRequestError(VolleyError error) { for (CallRequestListener listener : mCallRequestListeners) { listener.onRequestError(); } if (error.getMessage() == null) { Log.d("Error", "no message"); } else { Log.d("Error", error.getMessage()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.filter.sql; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; import org.apache.druid.common.config.NullHandling; import org.apache.druid.guice.BloomFilterExtensionModule; import org.apache.druid.guice.BloomFilterSerializersModule; import org.apache.druid.guice.ExpressionModule; import org.apache.druid.guice.annotations.Json; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Druids; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable; import org.apache.druid.query.expression.LookupExprMacro; import org.apache.druid.query.expressions.BloomFilterExprMacro; import org.apache.druid.query.filter.BloomDimFilter; import org.apache.druid.query.filter.BloomKFilter; import org.apache.druid.query.filter.BloomKFilterHolder; import org.apache.druid.query.filter.ExpressionDimFilter; import org.apache.druid.query.filter.OrDimFilter; import org.apache.druid.query.lookup.LookupReferencesManager; import org.apache.druid.segment.TestHelper; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.sql.calcite.BaseCalciteQueryTest; import org.apache.druid.sql.calcite.filtration.Filtration; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; import org.junit.Rule; import org.junit.Test; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; public class BloomDimFilterSqlTest extends BaseCalciteQueryTest { private static final Injector injector = Guice.createInjector( binder -> { binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()); binder.bind(LookupReferencesManager.class).toInstance( LookupEnabledTestExprMacroTable.createTestLookupReferencesManager( ImmutableMap.of( "a", "xa", "abc", "xabc" ) ) ); }, new BloomFilterExtensionModule() ); private static ObjectMapper jsonMapper = injector .getInstance(Key.get(ObjectMapper.class, Json.class)) .registerModules(Collections.singletonList(new BloomFilterSerializersModule())); public static ExprMacroTable createExprMacroTable() { final List<ExprMacroTable.ExprMacro> exprMacros = new ArrayList<>(); for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) { exprMacros.add(injector.getInstance(clazz)); } exprMacros.add(injector.getInstance(BloomFilterExprMacro.class)); exprMacros.add(injector.getInstance(LookupExprMacro.class)); return new ExprMacroTable(exprMacros); } @Rule @Override public QueryLogHook getQueryLogHook() { return queryLogHook = QueryLogHook.create(jsonMapper); } @Test public void testBloomFilter() throws Exception { BloomKFilter filter = new BloomKFilter(1500); filter.addString("def"); byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter); String base64 = StringUtils.encodeBase64String(bytes); testQuery( StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(dim1, '%s')", base64), ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .granularity(Granularities.ALL) .filters( new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null) ) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{1L} ) ); } @Test public void testBloomFilterVirtualColumn() throws Exception { BloomKFilter filter = new BloomKFilter(1500); filter.addString("a-foo"); filter.addString("-foo"); if (!NullHandling.replaceWithDefault()) { filter.addBytes(null, 0, 0); } byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter); String base64 = StringUtils.encodeBase64String(bytes); testQuery( StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(concat(dim2, '-foo'), '%s')", base64), ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .granularity(Granularities.ALL) .virtualColumns() .filters( new ExpressionDimFilter( StringUtils.format("bloom_filter_test(concat(\"dim2\",'-foo'),'%s')", base64), createExprMacroTable() ) ) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{5L} ) ); } @Test public void testBloomFilterVirtualColumnNumber() throws Exception { BloomKFilter filter = new BloomKFilter(1500); filter.addDouble(20.2); byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter); String base64 = StringUtils.encodeBase64String(bytes); testQuery( StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(2 * CAST(dim1 AS float), '%s')", base64), ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .granularity(Granularities.ALL) .virtualColumns() .filters( new ExpressionDimFilter( StringUtils.format("bloom_filter_test((2 * CAST(\"dim1\", 'DOUBLE')),'%s')", base64), createExprMacroTable() ) ) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{1L} ) ); } @Test public void testBloomFilters() throws Exception { BloomKFilter filter = new BloomKFilter(1500); filter.addString("def"); BloomKFilter filter2 = new BloomKFilter(1500); filter.addString("abc"); byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter); byte[] bytes2 = BloomFilterSerializersModule.bloomKFilterToBytes(filter2); String base64 = StringUtils.encodeBase64String(bytes); String base642 = StringUtils.encodeBase64String(bytes2); testQuery( StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(dim1, '%s') OR bloom_filter_test(dim2, '%s')", base64, base642), ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .granularity(Granularities.ALL) .filters( new OrDimFilter( new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null), new BloomDimFilter("dim2", BloomKFilterHolder.fromBloomKFilter(filter2), null) ) ) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{2L} ) ); } @Override public List<Object[]> getResults( final PlannerConfig plannerConfig, final Map<String, Object> queryContext, final String sql, final AuthenticationResult authenticationResult ) throws Exception { final DruidOperatorTable operatorTable = new DruidOperatorTable( ImmutableSet.of(), ImmutableSet.of(injector.getInstance(BloomFilterOperatorConversion.class)) ); return getResults( plannerConfig, queryContext, sql, authenticationResult, operatorTable, createExprMacroTable(), CalciteTests.TEST_AUTHORIZER_MAPPER, jsonMapper ); } }
/* * Copyright 2014 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.javascript.jscomp.Es6ToEs3Converter.makeIterator; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableSet; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSDocInfoBuilder; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Converts ES6 generator functions to valid ES3 code. This pass runs after all ES6 features * except for yield and generators have been transpiled. * * @author mattloring@google.com (Matthew Loring) */ public final class Es6RewriteGenerators extends NodeTraversal.AbstractPostOrderCallback implements HotSwapCompilerPass { // Name of the variable that holds the state at which the generator // should resume execution after a call to yield or return. // The beginning state is 0 and the end state is -1. private static final String GENERATOR_STATE = "$jscomp$generator$state"; private static final String GENERATOR_DO_WHILE_INITIAL = "$jscomp$generator$first$do"; private static final String GENERATOR_YIELD_ALL_NAME = "$jscomp$generator$yield$all"; private static final String GENERATOR_YIELD_ALL_ENTRY = "$jscomp$generator$yield$entry"; private static final String GENERATOR_ARGUMENTS = "$jscomp$generator$arguments"; private static final String GENERATOR_THIS = "$jscomp$generator$this"; private static final String GENERATOR_NEXT_ARG = "$jscomp$generator$next$arg"; private static final String GENERATOR_THROW_ARG = "$jscomp$generator$throw$arg"; private static final String GENERATOR_SWITCH_ENTERED = "$jscomp$generator$switch$entered"; private static final String GENERATOR_SWITCH_VAL = "$jscomp$generator$switch$val"; private static final String GENERATOR_FINALLY_JUMP = "$jscomp$generator$finally"; private static final String GENERATOR_ERROR = "$jscomp$generator$global$error"; private static final String GENERATOR_FOR_IN_ARRAY = "$jscomp$generator$forin$array"; private static final String GENERATOR_FOR_IN_VAR = "$jscomp$generator$forin$var"; private static final String GENERATOR_FOR_IN_ITER = "$jscomp$generator$forin$iter"; private static final String GENERATOR_LOOP_GUARD = "$jscomp$generator$loop$guard"; private final AbstractCompiler compiler; // Maintains a stack of numbers which identify the cases which mark the end of loops. These // are used to manage jump destinations for break and continue statements. private final List<LoopContext> currentLoopContext; private final List<ExceptionContext> currentExceptionContext; private static int generatorCaseCount; private Supplier<String> generatorCounter; // Current case statement onto which translated statements from the // body of a generator will be appended. private Node enclosingBlock; // Destination for vars defined in the body of a generator. private Node hoistRoot; // Body of the generator function currently being translated. private Node originalGeneratorBody; // Current statement being translated. private Node currentStatement; private boolean hasTranslatedTry; public Es6RewriteGenerators(AbstractCompiler compiler) { Preconditions.checkNotNull(compiler); this.compiler = compiler; this.currentLoopContext = new ArrayList<>(); this.currentExceptionContext = new ArrayList<>(); generatorCounter = compiler.getUniqueNameIdSupplier(); } @Override public void process(Node externs, Node root) { NodeTraversal.traverseEs6(compiler, root, new DecomposeYields(compiler)); NodeTraversal.traverseEs6(compiler, root, this); } @Override public void hotSwapScript(Node scriptRoot, Node originalRoot) { NodeTraversal.traverseEs6(compiler, scriptRoot, new DecomposeYields(compiler)); NodeTraversal.traverseEs6(compiler, scriptRoot, this); } @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.FUNCTION: if (n.isGeneratorFunction()) { generatorCaseCount = 0; visitGenerator(n, parent); } break; case Token.NAME: Node enclosing = NodeUtil.getEnclosingFunction(n); if (enclosing != null && enclosing.isGeneratorFunction() && n.matchesQualifiedName("arguments")) { n.setString(GENERATOR_ARGUMENTS); } break; case Token.THIS: enclosing = NodeUtil.getEnclosingFunction(n); if (enclosing != null && enclosing.isGeneratorFunction()) { n.getParent().replaceChild(n, IR.name(GENERATOR_THIS)); } break; case Token.YIELD: if (n.isYieldFor()) { visitYieldFor(t, n, parent); } else if (!parent.isExprResult()) { visitYieldExpr(n, parent); } else { visitYieldThrows(parent, parent.getParent()); } break; default: break; } } private void visitYieldThrows(Node n, Node parent) { Node ifThrows = IR.ifNode( IR.shne(IR.name(GENERATOR_THROW_ARG), IR.name("undefined")), IR.block(IR.throwNode(IR.name(GENERATOR_THROW_ARG)))); parent.addChildAfter(ifThrows, n); compiler.reportCodeChange(); } /** * Translates expressions using the new yield-for syntax. * * <p>Sample translation: * * <pre> * var i = yield * gen(); * </pre> * * <p>Is rewritten to: * * <pre> * var $jscomp$generator$yield$all = gen(); * var $jscomp$generator$yield$entry; * while (!($jscomp$generator$yield$entry = * $jscomp$generator$yield$all.next($jscomp$generator$next$arg)).done) { * yield $jscomp$generator$yield$entry.value; * } * var i = $jscomp$generator$yield$entry.value; * </pre> */ private void visitYieldFor(NodeTraversal t, Node n, Node parent) { Node enclosingStatement = NodeUtil.getEnclosingStatement(n); Node generator = IR.var( IR.name(GENERATOR_YIELD_ALL_NAME), makeIterator(t, compiler, n.removeFirstChild())); Node entryDecl = IR.var(IR.name(GENERATOR_YIELD_ALL_ENTRY)); Node assignIterResult = IR.assign( IR.name(GENERATOR_YIELD_ALL_ENTRY), IR.call( IR.getprop(IR.name(GENERATOR_YIELD_ALL_NAME), IR.string("next")), IR.name(GENERATOR_NEXT_ARG))); Node loopCondition = IR.not(IR.getprop(assignIterResult, IR.string("done"))); Node elemValue = IR.getprop(IR.name(GENERATOR_YIELD_ALL_ENTRY), IR.string("value")); Node yieldStatement = IR.exprResult(IR.yield(elemValue.cloneTree())); Node loop = IR.whileNode(loopCondition, IR.block(yieldStatement)); enclosingStatement.getParent().addChildBefore(generator, enclosingStatement); enclosingStatement.getParent().addChildBefore(entryDecl, enclosingStatement); enclosingStatement.getParent().addChildBefore(loop, enclosingStatement); if (parent.isExprResult()) { parent.detachFromParent(); } else { parent.replaceChild(n, elemValue); } visitYieldThrows(yieldStatement, yieldStatement.getParent()); compiler.reportCodeChange(); } private void visitYieldExpr(Node n, Node parent) { Node enclosingStatement = NodeUtil.getEnclosingStatement(n); Node yieldStatement = IR.exprResult(n.hasChildren() ? IR.yield(n.removeFirstChild()) : IR.yield()); Node yieldResult = IR.name(GENERATOR_NEXT_ARG + generatorCounter.get()); Node yieldResultDecl = IR.var(yieldResult.cloneTree(), IR.name(GENERATOR_NEXT_ARG)); parent.replaceChild(n, yieldResult); enclosingStatement.getParent().addChildBefore(yieldStatement, enclosingStatement); enclosingStatement.getParent().addChildBefore(yieldResultDecl, enclosingStatement); visitYieldThrows(yieldStatement, yieldStatement.getParent()); compiler.reportCodeChange(); } private void visitGenerator(Node n, Node parent) { compiler.needsEs6Runtime = true; hasTranslatedTry = false; Node genBlock = compiler .parseSyntheticCode(Joiner.on('\n').join( // TODO(dimvar): Remove annotation once Iterable is a @record and NTI can // handle @record. "/** @return {!Iterable<?>} */", "function generatorBody() {", " var " + GENERATOR_STATE + " = " + generatorCaseCount + ";", " function $jscomp$generator$impl(" + GENERATOR_NEXT_ARG + ", ", " " + GENERATOR_THROW_ARG + ") {", " while (1) switch (" + GENERATOR_STATE + ") {", " case " + generatorCaseCount + ":", " default:", " return {value: undefined, done: true};", " }", " }", " var iterator = {", " next: function(arg) { return $jscomp$generator$impl(arg, undefined); },", " throw: function(arg) { return $jscomp$generator$impl(undefined, arg); },", " };", " $jscomp.initSymbolIterator();", " iterator[Symbol.iterator] = function() { return this; };", " return /** @type {!Iterable<?>} */ (iterator);", "}")) .getFirstChild() .getLastChild() .detachFromParent(); generatorCaseCount++; originalGeneratorBody = n.getLastChild(); n.replaceChild(originalGeneratorBody, genBlock); n.setIsGeneratorFunction(false); // TODO(mattloring): remove this suppression once we can optimize the switch statement to // remove unused cases. JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo()); // TODO(mattloring): copy existing suppressions. builder.recordSuppressions(ImmutableSet.of("uselessCode")); JSDocInfo info = builder.build(); n.setJSDocInfo(info); // Set state to the default after the body of the function has completed. originalGeneratorBody.addChildToBack( IR.exprResult(IR.assign(IR.name(GENERATOR_STATE), IR.number(-1)))); enclosingBlock = getUnique(genBlock, Token.CASE).getLastChild(); hoistRoot = genBlock.getFirstChild(); if (NodeUtil.isNameReferenced(originalGeneratorBody, GENERATOR_ARGUMENTS)) { hoistRoot .getParent() .addChildAfter(IR.var(IR.name(GENERATOR_ARGUMENTS), IR.name("arguments")), hoistRoot); } if (NodeUtil.isNameReferenced(originalGeneratorBody, GENERATOR_THIS)) { hoistRoot .getParent() .addChildAfter(IR.var(IR.name(GENERATOR_THIS), IR.thisNode()), hoistRoot); } while (originalGeneratorBody.hasChildren()) { currentStatement = originalGeneratorBody.removeFirstChild(); boolean advanceCase = translateStatementInOriginalBody(); if (advanceCase) { int caseNumber; if (currentStatement.isGeneratorMarker()) { caseNumber = (int) currentStatement.getFirstChild().getDouble(); } else { caseNumber = generatorCaseCount; generatorCaseCount++; } Node oldCase = enclosingBlock.getParent(); Node newCase = IR.caseNode(IR.number(caseNumber), IR.block()); enclosingBlock = newCase.getLastChild(); if (oldCase.isTry()) { oldCase = oldCase.getParent().getParent(); if (!currentExceptionContext.isEmpty()) { Node newTry = IR.tryCatch(IR.block(), currentExceptionContext.get(0).catchBlock.cloneTree()); newCase.getLastChild().addChildToBack(newTry); enclosingBlock = newCase.getLastChild().getLastChild().getFirstChild(); } } oldCase.getParent().addChildAfter(newCase, oldCase); } } parent.useSourceInfoIfMissingFromForTree(parent); compiler.reportCodeChange(); } /** Returns {@code true} if a new case node should be added */ private boolean translateStatementInOriginalBody() { if (currentStatement.isVar()) { visitVar(); return false; } else if (currentStatement.isGeneratorMarker()) { visitGeneratorMarker(); return true; } else if (currentStatement.isFunction()) { visitFunctionStatement(); return false; } else if (currentStatement.isBlock()) { visitBlock(); return false; } else if (controlCanExit(currentStatement)) { switch (currentStatement.getType()) { case Token.WHILE: case Token.DO: case Token.FOR: if (NodeUtil.isForIn(currentStatement)) { visitForIn(); return false; } visitLoop(null); return false; case Token.LABEL: visitLabel(); return false; case Token.SWITCH: visitSwitch(); return false; case Token.IF: if (!currentStatement.isGeneratorSafe()) { visitIf(); return false; } break; case Token.TRY: visitTry(); return false; case Token.EXPR_RESULT: if (currentStatement.getFirstChild().isYield()) { visitYieldExprResult(); return true; } break; case Token.RETURN: visitReturn(); return false; case Token.CONTINUE: visitContinue(); return false; case Token.BREAK: if (!currentStatement.isGeneratorSafe()) { visitBreak(); return false; } break; case Token.THROW: visitThrow(); return false; default: // We never want to copy over an untranslated statement for which control exits. throw new RuntimeException( "Untranslatable control-exiting statement in generator function: " + Token.name(currentStatement.getType())); } } // In the default case, add the statement to the current case block unchanged. enclosingBlock.addChildToBack(currentStatement); return false; } private void visitFunctionStatement() { hoistRoot.getParent().addChildAfter(currentStatement, hoistRoot); } private void visitTry() { Node tryBody = currentStatement.getFirstChild(); Node caughtError; Node catchBody; Node catchBlock = tryBody.getNext(); if (catchBlock.hasChildren()) { // There is a catch block caughtError = catchBlock.getFirstChild().removeFirstChild(); catchBody = catchBlock.getFirstChild().removeFirstChild(); } else { caughtError = IR.name(GENERATOR_ERROR + "temp"); catchBody = IR.block(IR.throwNode(caughtError.cloneTree())); catchBody.getFirstChild().setGeneratorSafe(true); } Node finallyBody = catchBlock.getNext(); int catchStartState = generatorCaseCount++; Node catchStart = makeGeneratorMarker(catchStartState); Node errorNameGenerated = IR.name("$jscomp$generator$" + caughtError.getString()); originalGeneratorBody.addChildToFront(catchStart); originalGeneratorBody.addChildAfter(catchBody, catchStart); Node assignError = IR.assign(IR.name(GENERATOR_ERROR), errorNameGenerated.cloneTree()); Node newCatchBody = IR.block(IR.exprResult(assignError), createStateUpdate(catchStartState), createSafeBreak()); Node newCatch = IR.catchNode(errorNameGenerated, newCatchBody); currentExceptionContext.add(0, new ExceptionContext(catchStartState, newCatch)); if (finallyBody != null) { Node finallyName = IR.name(GENERATOR_FINALLY_JUMP + generatorCounter.get()); int finallyStartState = generatorCaseCount++; Node finallyStart = makeGeneratorMarker(finallyStartState); int finallyEndState = generatorCaseCount++; Node finallyEnd = makeGeneratorMarker(finallyEndState); NodeTraversal.traverseEs6( compiler, tryBody, new ControlExitsCheck(finallyName, finallyStartState)); NodeTraversal.traverseEs6( compiler, catchBody, new ControlExitsCheck(finallyName, finallyStartState)); originalGeneratorBody.addChildToFront(tryBody.detachFromParent()); originalGeneratorBody.addChildAfter(finallyStart, catchBody); originalGeneratorBody.addChildAfter(finallyBody.detachFromParent(), finallyStart); originalGeneratorBody.addChildAfter(finallyEnd, finallyBody); originalGeneratorBody.addChildToFront(IR.var(finallyName.cloneTree())); finallyBody.addChildToBack( IR.exprResult(IR.assign(IR.name(GENERATOR_STATE), finallyName.cloneTree()))); finallyBody.addChildToBack(createSafeBreak()); tryBody.addChildToBack( IR.exprResult(IR.assign(finallyName.cloneTree(), IR.number(finallyEndState)))); tryBody.addChildToBack(createStateUpdate(finallyStartState)); tryBody.addChildToBack(createSafeBreak()); catchBody.addChildToBack( IR.exprResult(IR.assign(finallyName.cloneTree(), IR.number(finallyEndState)))); } else { int catchEndState = generatorCaseCount++; Node catchEnd = makeGeneratorMarker(catchEndState); originalGeneratorBody.addChildAfter(catchEnd, catchBody); tryBody.addChildToBack(createStateUpdate(catchEndState)); tryBody.addChildToBack(createSafeBreak()); originalGeneratorBody.addChildToFront(tryBody.detachFromParent()); } catchBody.addChildToFront(IR.var(caughtError, IR.name(GENERATOR_ERROR))); if (enclosingBlock.getParent().isTry()) { enclosingBlock = enclosingBlock.getParent().getParent(); } enclosingBlock.addChildToBack(IR.tryCatch(IR.block(), newCatch)); enclosingBlock = enclosingBlock.getLastChild().getFirstChild(); if (!hasTranslatedTry) { hasTranslatedTry = true; hoistRoot.getParent().addChildAfter(IR.var(IR.name(GENERATOR_ERROR)), hoistRoot); } } private void visitContinue() { Preconditions.checkState(currentLoopContext.get(0).continueCase != -1); int continueCase; if (currentStatement.hasChildren()) { continueCase = getLoopContext(currentStatement.removeFirstChild().getString()).continueCase; } else { continueCase = currentLoopContext.get(0).continueCase; } enclosingBlock.addChildToBack(createStateUpdate(continueCase)); enclosingBlock.addChildToBack(createSafeBreak()); } private void visitThrow() { enclosingBlock.addChildToBack(createStateUpdate(-1)); enclosingBlock.addChildToBack(currentStatement); } private void visitBreak() { int breakCase; if (currentStatement.hasChildren()) { LoopContext loop = getLoopContext(currentStatement.removeFirstChild().getString()); if (loop == null) { compiler.report( JSError.make( currentStatement, Es6ToEs3Converter.CANNOT_CONVERT_YET, "Breaking to a label that is not a loop")); return; } breakCase = loop.breakCase; } else { breakCase = currentLoopContext.get(0).breakCase; } enclosingBlock.addChildToBack(createStateUpdate(breakCase)); enclosingBlock.addChildToBack(createSafeBreak()); } private void visitLabel() { Node labelName = currentStatement.removeFirstChild(); Node child = currentStatement.removeFirstChild(); if (NodeUtil.isLoopStructure(child)) { currentStatement = child; visitLoop(labelName.getString()); } else { originalGeneratorBody.addChildToFront(child); } } /** * Pops the loop information off of our stack if we reach the marker cooresponding * to the end of the current loop. */ private void visitGeneratorMarker() { if (!currentLoopContext.isEmpty() && currentLoopContext.get(0).breakCase == currentStatement.getFirstChild().getDouble()) { currentLoopContext.remove(0); } if (!currentExceptionContext.isEmpty() && currentExceptionContext.get(0).catchStartCase == currentStatement.getFirstChild().getDouble()) { currentExceptionContext.remove(0); } } /** * Uses a case statement to jump over the body if the condition of the * if statement is false. Additionally, lift the body of the {@code if} * statement to the top level. */ private void visitIf() { Node condition = currentStatement.removeFirstChild(); Node ifBody = currentStatement.removeFirstChild(); boolean hasElse = currentStatement.hasChildren(); int ifEndState = generatorCaseCount++; Node invertedConditional = IR.ifNode(IR.not(condition), IR.block(createStateUpdate(ifEndState), createSafeBreak())); invertedConditional.setGeneratorSafe(true); Node endIf = makeGeneratorMarker(ifEndState); originalGeneratorBody.addChildToFront(invertedConditional); originalGeneratorBody.addChildAfter(ifBody, invertedConditional); originalGeneratorBody.addChildAfter(endIf, ifBody); if (hasElse) { Node elseBlock = currentStatement.removeFirstChild(); int elseEndState = generatorCaseCount++; Node endElse = makeGeneratorMarker(elseEndState); ifBody.addChildToBack(createStateUpdate(elseEndState)); ifBody.addChildToBack(createSafeBreak()); originalGeneratorBody.addChildAfter(elseBlock, endIf); originalGeneratorBody.addChildAfter(endElse, elseBlock); } } /** * Translates switch statements into a series of if statements. * * <p>Sample translation: * <pre> * switch (i) { * case 1: * s; * case 2: * t; * ... * } * </pre> * * <p>Is eventually rewritten to: * * <pre> * $jscomp$generator$switch$entered0 = false; * if ($jscomp$generator$switch$entered0 || i == 1) { * $jscomp$generator$switch$entered0 = true; * s; * } * if ($jscomp$generator$switch$entered0 || i == 2) { * $jscomp$generator$switch$entered0 = true; * t; * } * ... * * </pre> */ private void visitSwitch() { Node didEnter = IR.name(GENERATOR_SWITCH_ENTERED + generatorCounter.get()); Node didEnterDecl = IR.var(didEnter.cloneTree(), IR.falseNode()); Node switchVal = IR.name(GENERATOR_SWITCH_VAL + generatorCounter.get()); Node switchValDecl = IR.var(switchVal.cloneTree(), currentStatement.removeFirstChild()); originalGeneratorBody.addChildToFront(didEnterDecl); originalGeneratorBody.addChildAfter(switchValDecl, didEnterDecl); Node insertionPoint = switchValDecl; while (currentStatement.hasChildren()) { Node currCase = currentStatement.removeFirstChild(); Node equivBlock; currCase .getLastChild() .addChildToFront(IR.exprResult(IR.assign(didEnter.cloneTree(), IR.trueNode()))); if (currCase.isDefaultCase()) { if (currentStatement.hasChildren()) { compiler.report( JSError.make( currentStatement, Es6ToEs3Converter.CANNOT_CONVERT_YET, "Default case as intermediate case")); } equivBlock = IR.block(currCase.removeFirstChild()); } else { equivBlock = IR.ifNode( IR.or( didEnter.cloneTree(), IR.sheq(switchVal.cloneTree(), currCase.removeFirstChild())), currCase.removeFirstChild()); } originalGeneratorBody.addChildAfter(equivBlock, insertionPoint); insertionPoint = equivBlock; } int breakTarget = generatorCaseCount++; int cont = currentLoopContext.isEmpty() ? -1 : currentLoopContext.get(0).continueCase; currentLoopContext.add(0, new LoopContext(breakTarget, cont, null)); Node breakCase = makeGeneratorMarker(breakTarget); originalGeneratorBody.addChildAfter(breakCase, insertionPoint); } /** * Lifts all children to the body of the original generator to flatten the block. */ private void visitBlock() { if (currentStatement.getChildCount() == 0) { return; } Node insertionPoint = currentStatement.removeFirstChild(); originalGeneratorBody.addChildToFront(insertionPoint); for (Node child = currentStatement.removeFirstChild(); child != null; child = currentStatement.removeFirstChild()) { originalGeneratorBody.addChildAfter(child, insertionPoint); insertionPoint = child; } } /** * Translates for in loops to a for in loop which produces an array of * values iterated over followed by a plain for loop which performs the logic * contained in the body of the original for in. * * <p>Sample translation: * <pre> * for (i in j) { * s; * } * </pre> * * <p>Is eventually rewritten to: * * <pre> * $jscomp$arr = []; * $jscomp$iter = j; * for (i in $jscomp$iter) { * $jscomp$arr.push(i); * } * for ($jscomp$var = 0; $jscomp$var < $jscomp$arr.length; $jscomp$var++) { * i = $jscomp$arr[$jscomp$var]; * if (!(i in $jscomp$iter)) { * continue; * } * s; * } * </pre> */ private void visitForIn() { Node variable = currentStatement.removeFirstChild(); Node iterable = currentStatement.removeFirstChild(); Node body = currentStatement.removeFirstChild(); String loopId = generatorCounter.get(); Node arrayName = IR.name(GENERATOR_FOR_IN_ARRAY + loopId); Node varName = IR.name(GENERATOR_FOR_IN_VAR + loopId); Node iterableName = IR.name(GENERATOR_FOR_IN_ITER + loopId); if (variable.isVar()) { variable = variable.removeFirstChild(); } body.addChildToFront( IR.ifNode( IR.not(IR.in(variable.cloneTree(), iterableName.cloneTree())), IR.block(IR.continueNode()))); body.addChildToFront( IR.var(variable.cloneTree(), IR.getelem(arrayName.cloneTree(), varName.cloneTree()))); hoistRoot.getParent().addChildAfter(IR.var(arrayName.cloneTree()), hoistRoot); hoistRoot.getParent().addChildAfter(IR.var(varName.cloneTree()), hoistRoot); hoistRoot.getParent().addChildAfter(IR.var(iterableName.cloneTree()), hoistRoot); Node arrayDef = IR.exprResult(IR.assign(arrayName.cloneTree(), IR.arraylit())); Node iterDef = IR.exprResult(IR.assign(iterableName.cloneTree(), iterable)); Node newForIn = IR.forIn( variable.cloneTree(), iterableName, IR.block( IR.exprResult( IR.call(IR.getprop(arrayName.cloneTree(), IR.string("push")), variable)))); Node newFor = IR.forNode( IR.assign(varName.cloneTree(), IR.number(0)), IR.lt(varName.cloneTree(), IR.getprop(arrayName, IR.string("length"))), IR.inc(varName, true), body); enclosingBlock.addChildToBack(arrayDef); enclosingBlock.addChildToBack(iterDef); enclosingBlock.addChildToBack(newForIn); originalGeneratorBody.addChildToFront(newFor); } /** * Translates loops to a case statement followed by an if statement * containing the loop body. The if statement finishes by * jumping back to the initial case statement to enter the loop again. * In the case of for and do loops, initialization and post loop statements are inserted * before and after the if statement. Below is a sample translation for a while loop: * * <p>Sample translation: * <pre> * while (b) { * s; * } * </pre> * * <p>Is eventually rewritten to: * <pre> * case n: * if (b) { * s; * state = n; * break; * } * </pre> */ private void visitLoop(String label) { Node initializer; Node guard; Node incr; Node body; if (currentStatement.isWhile()) { guard = currentStatement.removeFirstChild(); body = currentStatement.removeFirstChild(); initializer = IR.empty(); incr = IR.empty(); } else if (currentStatement.isFor()) { initializer = currentStatement.removeFirstChild(); if (initializer.isAssign()) { initializer = IR.exprResult(initializer); } guard = currentStatement.removeFirstChild(); incr = currentStatement.removeFirstChild(); body = currentStatement.removeFirstChild(); } else { Preconditions.checkState(currentStatement.isDo()); initializer = IR.empty(); incr = IR.assign(IR.name(GENERATOR_DO_WHILE_INITIAL), IR.falseNode()); body = currentStatement.removeFirstChild(); guard = currentStatement.removeFirstChild(); } Node condition, prestatement; if (guard.isBlock()) { prestatement = guard.removeFirstChild(); condition = guard.removeFirstChild(); } else { prestatement = IR.block(); condition = guard; } int loopBeginState = generatorCaseCount++; int continueState = loopBeginState; if (!incr.isEmpty()) { continueState = generatorCaseCount++; Node continueCase = makeGeneratorMarker(continueState); body.addChildToBack(continueCase); body.addChildToBack(incr.isBlock() ? incr : IR.exprResult(incr)); } currentLoopContext.add(0, new LoopContext(generatorCaseCount, continueState, label)); Node beginCase = makeGeneratorMarker(loopBeginState); Node conditionalBranch = IR.ifNode(condition.isEmpty() ? IR.trueNode() : condition, body); Node setStateLoopStart = createStateUpdate(loopBeginState); Node breakToStart = createSafeBreak(); originalGeneratorBody.addChildToFront(conditionalBranch); if (!prestatement.isEmpty()) { originalGeneratorBody.addChildToFront(prestatement); } originalGeneratorBody.addChildToFront(beginCase); if (!initializer.isEmpty()) { originalGeneratorBody.addChildToFront(initializer); } body.addChildToBack(setStateLoopStart); body.addChildToBack(breakToStart); } /** * Hoists {@code var} statements into the closure containing the iterator * to preserve their state across * multiple calls to next(). */ private void visitVar() { Node name = currentStatement.removeFirstChild(); while (name != null) { if (name.hasChildren()) { enclosingBlock.addChildToBack(IR.exprResult(IR.assign(name, name.removeFirstChild()))); } hoistRoot.getParent().addChildAfter(IR.var(name.cloneTree()), hoistRoot); name = currentStatement.removeFirstChild(); } } /** * Translates {@code yield} to set the state so that execution resume at the next statement * when the function is next called and then returns an iterator result with * the desired value. */ private void visitYieldExprResult() { enclosingBlock.addChildToBack(createStateUpdate()); Node yield = currentStatement.getFirstChild(); Node value = yield.hasChildren() ? yield.removeFirstChild() : IR.name("undefined"); enclosingBlock.addChildToBack(IR.returnNode(createIteratorResult(value, false))); } /** * Translates {@code return} statements to set the state to done before returning the * desired value. */ private void visitReturn() { enclosingBlock.addChildToBack(createStateUpdate(-1)); enclosingBlock.addChildToBack( IR.returnNode( createIteratorResult( currentStatement.hasChildren() ? currentStatement.removeFirstChild() : IR.name("undefined"), true))); } private static Node createStateUpdate() { return IR.exprResult(IR.assign(IR.name(GENERATOR_STATE), IR.number(generatorCaseCount))); } private static Node createStateUpdate(int state) { return IR.exprResult(IR.assign(IR.name(GENERATOR_STATE), IR.number(state))); } private static Node createIteratorResult(Node value, boolean done) { return IR.objectlit( IR.propdef(IR.stringKey("value"), value), IR.propdef(IR.stringKey("done"), done ? IR.trueNode() : IR.falseNode())); } private static Node createSafeBreak() { Node breakNode = IR.breakNode(); breakNode.setGeneratorSafe(true); return breakNode; } private static Node createFinallyJumpBlock(Node finallyName, int finallyStartState) { int jumpPoint = generatorCaseCount++; Node setReturnState = IR.exprResult(IR.assign(finallyName.cloneTree(), IR.number(jumpPoint))); Node toFinally = createStateUpdate(finallyStartState); Node returnPoint = makeGeneratorMarker(jumpPoint); Node returnBlock = IR.block(setReturnState, toFinally, createSafeBreak()); returnBlock.addChildToBack(returnPoint); return returnBlock; } private LoopContext getLoopContext(String label) { for (LoopContext context : currentLoopContext) { if (label.equals(context.label)) { return context; } } return null; } private boolean controlCanExit(Node n) { ControlExitsCheck exits = new ControlExitsCheck(); NodeTraversal.traverseEs6(compiler, n, exits); return exits.didExit(); } /** * Finds the only child of the {@code node} of the given type. */ private Node getUnique(Node node, int type) { List<Node> matches = new ArrayList<>(); insertAll(node, type, matches); Preconditions.checkState(matches.size() == 1, matches); return matches.get(0); } /** * Adds all children of the {@code node} of the given type to given list. */ private void insertAll(Node node, int type, List<Node> matchingNodes) { if (node.getType() == type) { matchingNodes.add(node); } for (Node c = node.getFirstChild(); c != null; c = c.getNext()) { insertAll(c, type, matchingNodes); } } /** * Decomposes expressions with yields inside of them to equivalent * sequence of expressions in which all non-statement yields are * of the form: * * <pre> * var name = yield expr; * </pre> * * <p>For example, change the following code: * <pre> * return x || yield y; * </pre> * <p>Into: * <pre> * var temp$$0; * if (temp$$0 = x); else temp$$0 = yield y; * return temp$$0; * </pre> * * This uses the {@link ExpressionDecomposer} class */ private final class DecomposeYields extends NodeTraversal.AbstractPreOrderCallback { private final AbstractCompiler compiler; private final ExpressionDecomposer decomposer; DecomposeYields(AbstractCompiler compiler) { this.compiler = compiler; Set<String> consts = new HashSet<>(); decomposer = new ExpressionDecomposer( compiler, compiler.getUniqueNameIdSupplier(), consts, Scope.createGlobalScope(new Node(Token.SCRIPT))); } @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.YIELD: visitYieldExpression(n); break; case Token.DO: case Token.FOR: case Token.WHILE: visitLoop(n); break; case Token.CASE: if (controlCanExit(n.getFirstChild())) { compiler.report( JSError.make( n, Es6ToEs3Converter.CANNOT_CONVERT_YET, "Case statements that contain yields")); return false; } break; default: break; } return true; } private void visitYieldExpression(Node n) { if (n.getParent().isExprResult()) { return; } if (decomposer.canExposeExpression(n) != ExpressionDecomposer.DecompositionType.UNDECOMPOSABLE) { decomposer.exposeExpression(n); compiler.reportCodeChange(); } else { compiler.report( JSError.make(n, Es6ToEs3Converter.CANNOT_CONVERT, "Undecomposable expression")); } } private void visitLoop(Node n) { Node enclosingFunc = NodeUtil.getEnclosingFunction(n); if (enclosingFunc == null || !enclosingFunc.isGeneratorFunction() || NodeUtil.isForIn(n)) { return; } Node enclosingBlock = NodeUtil.getEnclosingType(n, Token.BLOCK); Node guard = null; Node incr = null; switch (n.getType()) { case Token.FOR: guard = n.getSecondChild(); incr = guard.getNext(); break; case Token.WHILE: guard = n.getFirstChild(); incr = IR.empty(); break; case Token.DO: guard = n.getLastChild(); if (!guard.isEmpty()) { Node firstEntry = IR.name(GENERATOR_DO_WHILE_INITIAL); enclosingBlock.addChildToFront(IR.var(firstEntry.cloneTree(), IR.trueNode())); guard = IR.or(firstEntry, n.getLastChild().detachFromParent()); n.addChildToBack(guard); } incr = IR.empty(); break; default: break; } if (!controlCanExit(guard) && !controlCanExit(incr)) { return; } Node guardName = IR.name(GENERATOR_LOOP_GUARD + generatorCounter.get()); if (!guard.isEmpty()) { Node container = new Node(Token.BLOCK); n.replaceChild(guard, container); container.addChildToFront( IR.block(IR.exprResult(IR.assign(guardName.cloneTree(), guard.cloneTree())))); container.addChildToBack(guardName.cloneTree()); } if (!incr.isEmpty()) { n.addChildBefore(IR.block(IR.exprResult(incr.detachFromParent())), n.getLastChild()); } enclosingBlock.addChildToFront(IR.var(guardName)); compiler.reportCodeChange(); } } private static Node makeGeneratorMarker(int i) { Node n = IR.exprResult(IR.number(i)); n.setGeneratorMarker(true); return n; } private static final class ControlExitsCheck implements NodeTraversal.Callback { int continueCatchers; int breakCatchers; int throwCatchers; List<String> labels = new ArrayList<>(); boolean exited; boolean addJumps; private Node finallyName; private int finallyStartState; ControlExitsCheck(Node finallyName, int finallyStartState) { this.finallyName = finallyName; this.finallyStartState = finallyStartState; addJumps = true; } ControlExitsCheck() { addJumps = false; } @Override public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) { switch (n.getType()) { case Token.FUNCTION: return false; case Token.LABEL: labels.add(0, n.getFirstChild().getString()); break; case Token.DO: case Token.WHILE: case Token.FOR: continueCatchers++; breakCatchers++; break; case Token.SWITCH: breakCatchers++; break; case Token.BLOCK: parent = n.getParent(); if (parent != null && parent.isTry() && parent.getFirstChild() == n && n.getNext().hasChildren()) { throwCatchers++; } break; case Token.BREAK: if (!n.isGeneratorSafe() && ((breakCatchers == 0 && !n.hasChildren()) || (n.hasChildren() && !labels.contains(n.getFirstChild().getString())))) { exited = true; if (addJumps) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } } break; case Token.CONTINUE: if (continueCatchers == 0 || (n.hasChildren() && !labels.contains(n.getFirstChild().getString()))) { exited = true; if (addJumps) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } } break; case Token.THROW: if (throwCatchers == 0) { exited = true; if (addJumps && !n.isGeneratorSafe()) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } } break; case Token.RETURN: exited = true; if (addJumps) { parent.addChildBefore(createFinallyJumpBlock(finallyName, finallyStartState), n); } break; case Token.YIELD: exited = true; break; default: break; } return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.LABEL: labels.remove(0); break; case Token.DO: case Token.WHILE: case Token.FOR: continueCatchers--; breakCatchers--; break; case Token.SWITCH: breakCatchers--; break; case Token.BLOCK: parent = n.getParent(); if (parent != null && parent.isTry() && parent.getFirstChild() == n && n.getNext().hasChildren()) { throwCatchers--; } break; default: break; } } public boolean didExit() { return exited; } } private static final class LoopContext { int breakCase; int continueCase; String label; LoopContext(int breakCase, int continueCase, String label) { this.breakCase = breakCase; this.continueCase = continueCase; this.label = label; } } private static final class ExceptionContext { int catchStartCase; Node catchBlock; ExceptionContext(int catchStartCase, Node catchBlock) { this.catchStartCase = catchStartCase; this.catchBlock = catchBlock; } } }
/* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.domain; import com.thoughtworks.go.config.Agent; import com.thoughtworks.go.config.Agents; import com.thoughtworks.go.domain.AgentInstance; import com.thoughtworks.go.domain.AgentInstance.FilterBy; import com.thoughtworks.go.domain.AgentStatus; import com.thoughtworks.go.domain.NullAgentInstance; import com.thoughtworks.go.domain.exception.MaxPendingAgentsLimitReachedException; import com.thoughtworks.go.listener.AgentStatusChangeListener; import com.thoughtworks.go.server.service.AgentBuildingInfo; import com.thoughtworks.go.server.service.AgentRuntimeInfo; import com.thoughtworks.go.util.SystemEnvironment; import org.apache.commons.collections4.CollectionUtils; import org.springframework.util.LinkedMultiValueMap; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Stream; import static com.thoughtworks.go.domain.AgentInstance.createFromAgent; import static com.thoughtworks.go.util.SystemEnvironment.MAX_PENDING_AGENTS_ALLOWED; import static java.lang.String.format; import static java.util.Collections.emptyList; import static java.util.stream.Collectors.toList; import static java.util.stream.StreamSupport.stream; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.join; public class AgentInstances implements Iterable<AgentInstance> { private SystemEnvironment systemEnvironment; private Map<String, AgentInstance> uuidToAgentInstanceMap = new ConcurrentHashMap<>(); private AgentStatusChangeListener agentStatusChangeListener; public AgentInstances(AgentStatusChangeListener listener) { this.agentStatusChangeListener = listener; this.systemEnvironment = new SystemEnvironment(); } public AgentInstances(SystemEnvironment sysEnv, AgentStatusChangeListener listener, AgentInstance... agentInstances) { this(listener); this.systemEnvironment = sysEnv; if (agentInstances != null) { Stream.of(agentInstances).forEach(this::add); } } public void add(AgentInstance agent) { uuidToAgentInstanceMap.put(agent.getAgent().getUuid(), agent); } public void updateAgentAboutCancelledBuild(String uuid, boolean isCancelled) { AgentInstance agentInstance = findAgentAndRefreshStatus(uuid); if (isCancelled) { agentInstance.cancel(); } } public AgentInstance findAgentAndRefreshStatus(String uuid) { AgentInstance agentInstance = loadAgentInstance(uuid); agentInstance.refresh(); return agentInstance; } public AgentInstance findAgent(String uuid) { return loadAgentInstance(uuid); } public AgentInstance loadAgentInstance(String uuid) { if (isBlank(uuid)) { return new NullAgentInstance(uuid); } AgentInstance agentInstance = uuidToAgentInstanceMap.get(uuid); return agentInstance == null ? new NullAgentInstance(uuid) : agentInstance; } public void removeAgent(String uuid) { uuidToAgentInstanceMap.remove(uuid); } public void clearAll() { uuidToAgentInstanceMap.clear(); } public AgentInstances getAllAgents() { AgentInstances allAgentInstances = new AgentInstances(agentStatusChangeListener); currentInstances().forEach(allAgentInstances::add); return allAgentInstances; } public AgentInstances findRegisteredAgents() { this.refresh(); AgentInstances registeredInstances = new AgentInstances(agentStatusChangeListener); synchronized (uuidToAgentInstanceMap) { stream(this.spliterator(), false) .filter(agentInstance -> agentInstance.getStatus().isRegistered()) .forEach(registeredInstances::add); } return registeredInstances; } @Override public Iterator<AgentInstance> iterator() { return currentInstances().iterator(); } public boolean isEmpty() { return uuidToAgentInstanceMap.isEmpty(); } public Integer size() { return uuidToAgentInstanceMap.size(); } public void refresh() { currentInstances().forEach(AgentInstance::refresh); getRemovableAgents().forEach(agentInstance -> removeAgent(agentInstance.getAgent().getUuid())); } public List<AgentInstance> agentsStuckInCancel() { return currentInstances().stream().filter(AgentInstance::isStuckInCancel).collect(toList()); } public void syncAgentInstancesFrom(Agents agentsFromDB) { for (Agent agentFromDB : agentsFromDB) { String uuid = agentFromDB.getUuid(); if (uuidToAgentInstanceMap.containsKey(uuid)) { uuidToAgentInstanceMap.get(uuid).syncAgentFrom(agentFromDB); } else { AgentInstance newAgent = createFromAgent(agentFromDB, new SystemEnvironment(), agentStatusChangeListener); uuidToAgentInstanceMap.put(uuid, newAgent); } } synchronized (uuidToAgentInstanceMap) { List<String> uuids = new ArrayList<>(); for (String uuid : uuidToAgentInstanceMap.keySet()) { AgentInstance instance = uuidToAgentInstanceMap.get(uuid); if (!(instance.getStatus() == AgentStatus.Pending)) { if (!agentsFromDB.hasAgent(uuid)) { uuids.add(uuid); } } } uuids.forEach(uuidToAgentInstanceMap::remove); } } public boolean hasAgent(String uuid) { AgentInstance agentInstance = findAgentAndRefreshStatus(uuid); return !(agentInstance instanceof NullAgentInstance); } public AgentInstance register(AgentRuntimeInfo runtimeInfo) { AgentInstance agentInstance = findAgentAndRefreshStatus(runtimeInfo.getUUId()); if (!agentInstance.isRegistered()) { if (isMaxPendingAgentsLimitReached()) { throw new MaxPendingAgentsLimitReachedException(systemEnvironment.get(MAX_PENDING_AGENTS_ALLOWED)); } agentInstance = AgentInstance.createFromLiveAgent(runtimeInfo, systemEnvironment, agentStatusChangeListener); this.add(agentInstance); } agentInstance.update(runtimeInfo); return agentInstance; } public void updateAgentRuntimeInfo(AgentRuntimeInfo runtimeInfo) { AgentInstance agentInstance = this.findAgentAndRefreshStatus(runtimeInfo.getUUId()); agentInstance.update(runtimeInfo); } public void building(String uuid, AgentBuildingInfo agentBuildingInfo) { findAgentAndRefreshStatus(uuid).building(agentBuildingInfo); } public List<AgentInstance> filter(List<String> uuids) { if (CollectionUtils.isEmpty(uuids)) { return emptyList(); } return stream(this.spliterator(), false) .filter(agentInstance -> uuids.contains(agentInstance.getUuid())) .collect(toList()); } public LinkedMultiValueMap<String, ElasticAgentMetadata> getAllElasticAgentsGroupedByPluginId() { LinkedMultiValueMap<String, ElasticAgentMetadata> map = new LinkedMultiValueMap<>(); for (Map.Entry<String, AgentInstance> entry : uuidToAgentInstanceMap.entrySet()) { AgentInstance agentInstance = entry.getValue(); if (agentInstance.isElastic()) { ElasticAgentMetadata metadata = agentInstance.elasticAgentMetadata(); map.add(metadata.elasticPluginId(), metadata); } } return map; } public AgentInstance findElasticAgent(final String elasticAgentId, final String elasticPluginId) { Collection<AgentInstance> agentInstances = uuidToAgentInstanceMap.values(); List<AgentInstance> matchingElasticInstances = agentInstances.stream() .filter(agentInstance -> agentInstance.isElastic() && agentInstance.elasticAgentMetadata().elasticAgentId().equals(elasticAgentId) && agentInstance.elasticAgentMetadata().elasticPluginId().equals(elasticPluginId)) .collect(toList()); if (CollectionUtils.isEmpty(matchingElasticInstances)) { return null; } if (matchingElasticInstances.size() > 1) { Collection<String> uuids = matchingElasticInstances.stream().map(AgentInstance::getUuid).collect(toList()); throw new IllegalStateException(format("Found multiple agents with the same elastic agent id [%s]", join(uuids, ", "))); } return matchingElasticInstances.iterator().next(); } public List<Agent> filterPendingAgents(List<String> uuids) { return (CollectionUtils.isEmpty(uuids) ? new ArrayList<String>() : uuids) .stream() .map(this::findAgent) .filter(this::isPendingAndNotNullInstance) .map(agentInstance -> agentInstance.getAgent().deepClone()) .collect(toList()); } public List<String> filterBy(List<String> uuids, FilterBy filter) { return (CollectionUtils.isEmpty(uuids) ? new ArrayList<String>() : uuids) .stream() .map(this::findAgent) .filter(agentInstance -> agentInstance.matches(filter)) .map(AgentInstance::getUuid) .collect(toList()); } private boolean isPendingAndNotNullInstance(AgentInstance agentInstance) { return agentInstance.isPending() && !agentInstance.isNullAgent(); } private List<AgentInstance> getRemovableAgents() { return stream(this.spliterator(), false) .filter(AgentInstance::canRemove) .collect(toList()); } private Collection<AgentInstance> currentInstances() { return new TreeSet<>(uuidToAgentInstanceMap.values()); } private boolean isMaxPendingAgentsLimitReached() { Integer maxPendingAgentsAllowed = systemEnvironment.get(MAX_PENDING_AGENTS_ALLOWED); int pendingAgentsCount = this.size() - findRegisteredAgents().size(); return pendingAgentsCount >= maxPendingAgentsAllowed; } }
package com.capitalone.dashboard.service; import com.capitalone.dashboard.misc.HygieiaException; import com.capitalone.dashboard.model.Collector; import com.capitalone.dashboard.model.CollectorItem; import com.capitalone.dashboard.model.CollectorType; import com.capitalone.dashboard.model.Component; import com.capitalone.dashboard.model.DataResponse; import com.capitalone.dashboard.model.EnvironmentComponent; import com.capitalone.dashboard.model.EnvironmentStatus; import com.capitalone.dashboard.model.deploy.DeployableUnit; import com.capitalone.dashboard.model.deploy.Environment; import com.capitalone.dashboard.model.deploy.Server; import com.capitalone.dashboard.repository.CollectorItemRepository; import com.capitalone.dashboard.repository.CollectorRepository; import com.capitalone.dashboard.repository.ComponentRepository; import com.capitalone.dashboard.repository.EnvironmentComponentRepository; import com.capitalone.dashboard.repository.EnvironmentStatusRepository; import com.capitalone.dashboard.request.CollectorRequest; import com.capitalone.dashboard.request.DeployDataCreateRequest; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import org.bson.types.ObjectId; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.util.CollectionUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @Service public class DeployServiceImpl implements DeployService { private final ComponentRepository componentRepository; private final EnvironmentComponentRepository environmentComponentRepository; private final EnvironmentStatusRepository environmentStatusRepository; private final CollectorRepository collectorRepository; private final CollectorItemRepository collectorItemRepository; private final CollectorService collectorService; @Autowired public DeployServiceImpl(ComponentRepository componentRepository, EnvironmentComponentRepository environmentComponentRepository, EnvironmentStatusRepository environmentStatusRepository, CollectorRepository collectorRepository, CollectorItemRepository collectorItemRepository, CollectorService collectorService) { this.componentRepository = componentRepository; this.environmentComponentRepository = environmentComponentRepository; this.environmentStatusRepository = environmentStatusRepository; this.collectorRepository = collectorRepository; this.collectorItemRepository = collectorItemRepository; this.collectorService = collectorService; } @Override public DataResponse<List<Environment>> getDeployStatus(ObjectId componentId) { Component component = componentRepository.findOne(componentId); CollectorItem item = component.getCollectorItems() .get(CollectorType.Deployment).get(0); ObjectId collectorItemId = item.getId(); List<EnvironmentComponent> components = environmentComponentRepository .findByCollectorItemId(collectorItemId); List<EnvironmentStatus> statuses = environmentStatusRepository .findByCollectorItemId(collectorItemId); List<Environment> environments = new ArrayList<>(); for (Map.Entry<Environment, List<EnvironmentComponent>> entry : groupByEnvironment( components).entrySet()) { Environment env = entry.getKey(); environments.add(env); for (EnvironmentComponent envComponent : entry.getValue()) { env.getUnits().add( new DeployableUnit(envComponent, servers(envComponent, statuses))); } } Collector collector = collectorRepository .findOne(item.getCollectorId()); return new DataResponse<>(environments, collector.getLastExecuted()); } private Map<Environment, List<EnvironmentComponent>> groupByEnvironment( List<EnvironmentComponent> components) { Map<Environment, List<EnvironmentComponent>> map = new LinkedHashMap<>(); for (EnvironmentComponent component : components) { Environment env = new Environment(component.getEnvironmentName(), component.getEnvironmentUrl()); if (!map.containsKey(env)) { map.put(env, new ArrayList<EnvironmentComponent>()); } // Following logic is to send only the latest deployment status - there may be better way to do this Iterator<EnvironmentComponent> alreadyAddedIter = map.get(env) .iterator(); boolean found = false; ArrayList<EnvironmentComponent> toRemove = new ArrayList<EnvironmentComponent>(); ArrayList<EnvironmentComponent> toAdd = new ArrayList<EnvironmentComponent>(); while (alreadyAddedIter.hasNext()) { EnvironmentComponent ec = (EnvironmentComponent) alreadyAddedIter .next(); if (component.getComponentName().equalsIgnoreCase( ec.getComponentName())) { found = true; if (component.getAsOfDate() > ec.getAsOfDate()) { toRemove.add(ec); toAdd.add(component); } } } if (!found) { toAdd.add(component); } map.get(env).removeAll(toRemove); map.get(env).addAll(toAdd); } return map; } private Iterable<Server> servers(final EnvironmentComponent component, List<EnvironmentStatus> statuses) { return Iterables.transform( Iterables.filter(statuses, new ComponentMatches(component)), new ToServer()); } private class ComponentMatches implements Predicate<EnvironmentStatus> { private EnvironmentComponent component; public ComponentMatches(EnvironmentComponent component) { this.component = component; } @Override public boolean apply(EnvironmentStatus environmentStatus) { return environmentStatus.getEnvironmentName().equals( component.getEnvironmentName()) && environmentStatus.getComponentName().equals( component.getComponentName()); } } private class ToServer implements Function<EnvironmentStatus, Server> { @Override public Server apply(EnvironmentStatus status) { return new Server(status.getResourceName(), status.isOnline()); } } @Override public String create(DeployDataCreateRequest request) throws HygieiaException { /** * Step 1: create Collector if not there * Step 2: create Collector item if not there * Step 3: Insert build data if new. If existing, update it. */ Collector collector = createCollector(); if (collector == null) { throw new HygieiaException("Failed creating Deploy collector.", HygieiaException.COLLECTOR_CREATE_ERROR); } CollectorItem collectorItem = createCollectorItem(collector, request); if (collectorItem == null) { throw new HygieiaException("Failed creating Deploy collector item.", HygieiaException.COLLECTOR_ITEM_CREATE_ERROR); } EnvironmentComponent deploy = createEnvComponent(collectorItem, request); if (deploy == null) { throw new HygieiaException("Failed inserting/updating Deployment information.", HygieiaException.ERROR_INSERTING_DATA); } return deploy.getId().toString(); } @Override public DataResponse<List<Environment>> getDeployStatus(String applicationName) { //FIXME: Remove hardcoding of Jenkins. List<Collector> collectorList = collectorRepository.findByCollectorTypeAndName(CollectorType.Deployment, "Jenkins"); if (CollectionUtils.isEmpty(collectorList)) return new DataResponse<>(null, 0); Collector collector = collectorList.get(0); CollectorItem item = collectorItemRepository.findByOptionsAndDeployedApplicationName(collector.getId(), applicationName); if (item == null) return new DataResponse<>(null, 0); ObjectId collectorItemId = item.getId(); List<EnvironmentComponent> components = environmentComponentRepository .findByCollectorItemId(collectorItemId); List<EnvironmentStatus> statuses = environmentStatusRepository .findByCollectorItemId(collectorItemId); List<Environment> environments = new ArrayList<>(); for (Map.Entry<Environment, List<EnvironmentComponent>> entry : groupByEnvironment( components).entrySet()) { Environment env = entry.getKey(); environments.add(env); for (EnvironmentComponent envComponent : entry.getValue()) { env.getUnits().add( new DeployableUnit(envComponent, servers(envComponent, statuses))); } } return new DataResponse<>(environments, collector.getLastExecuted()); } private Collector createCollector() { CollectorRequest collectorReq = new CollectorRequest(); collectorReq.setName("Jenkins"); //for now hardcode it. collectorReq.setCollectorType(CollectorType.Deployment); Collector col = collectorReq.toCollector(); col.setEnabled(true); col.setOnline(true); col.setLastExecuted(System.currentTimeMillis()); return collectorService.createCollector(col); } private CollectorItem createCollectorItem(Collector collector, DeployDataCreateRequest request) { CollectorItem tempCi = new CollectorItem(); tempCi.setCollectorId(collector.getId()); tempCi.setDescription(request.getAppName()); tempCi.setPushed(true); tempCi.setLastUpdated(System.currentTimeMillis()); tempCi.setNiceName(request.getNiceName()); Map<String, Object> option = new HashMap<>(); option.put("applicationName", request.getAppName()); option.put("instanceUrl", request.getInstanceUrl()); tempCi.getOptions().putAll(option); CollectorItem collectorItem = collectorService.createCollectorItem(tempCi); return collectorItem; } private EnvironmentComponent createEnvComponent(CollectorItem collectorItem, DeployDataCreateRequest request) { EnvironmentComponent deploy = environmentComponentRepository. findByUniqueKey(collectorItem.getId(), request.getArtifactName(), request.getArtifactName(), request.getEndTime()); if ( deploy == null) { deploy = new EnvironmentComponent(); } deploy.setAsOfDate(System.currentTimeMillis()); deploy.setCollectorItemId(collectorItem.getId()); deploy.setComponentID(request.getArtifactGroup()); deploy.setComponentName(request.getArtifactName()); deploy.setComponentVersion(request.getArtifactVersion()); deploy.setEnvironmentName(request.getEnvName()); deploy.setDeployTime(request.getEndTime()); deploy.setDeployed("SUCCESS".equalsIgnoreCase(request.getDeployStatus())); return environmentComponentRepository.save(deploy); // Save = Update (if ID present) or Insert (if ID not there) } }
/* Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mobiperf_library; import android.accounts.Account; import android.accounts.AccountManager; import android.accounts.AccountManagerCallback; import android.accounts.AccountManagerFuture; import android.accounts.AuthenticatorException; import android.accounts.OperationCanceledException; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.params.ClientPNames; import org.apache.http.cookie.Cookie; import org.apache.http.impl.client.DefaultHttpClient; import com.mobiperf_library.util.Logger; import com.mobiperf_library.util.PhoneUtils; import java.io.IOException; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; /** * Helper class for google account checkins */ public class AccountSelector { private static final String ACCOUNT_TYPE = "com.google"; private static final String ACCOUNT_NAME = "@google.com"; // The authentication period in milliseconds private static final long AUTHENTICATE_PERIOD_MSEC = 24 * 3600 * 1000; private Context context; private String authToken = null; private ExecutorService checkinExecutor = null; private Future<Cookie> checkinFuture = null; private long lastAuthTime = 0; private boolean authImmediately = false; private PhoneUtils phoneUtils; private boolean isAnonymous = true; public boolean isAnonymous() { return isAnonymous; } public AccountSelector(Context context) { this.context = context; this.checkinExecutor = Executors.newFixedThreadPool(1); this.phoneUtils = PhoneUtils.getPhoneUtils(); } /** Returns the Future to monitor the checkin progress */ public synchronized Future<Cookie> getCheckinFuture() { return this.checkinFuture; } /** After checkin finishes, the client of AccountSelector SHOULD reset checkinFuture */ public synchronized void resetCheckinFuture() { this.checkinFuture = null; } /** Shuts down the executor thread */ public void shutDown() { // shutdown() removes all previously submitted task and no new tasks are accepted this.checkinExecutor.shutdown(); // shutdownNow stops all currently executing tasks this.checkinExecutor.shutdownNow(); } /** Allows clients of AccountSelector to request an authentication upon the next call * to authenticate() */ public synchronized void setAuthImmediately(boolean val) { this.authImmediately = val; } private synchronized boolean shouldAuthImmediately() { return this.authImmediately; } private synchronized void setLastAuthTime(long lastTime) { this.lastAuthTime = lastTime; } private synchronized long getLastAuthTime() { return this.lastAuthTime; } /** * Return the list of account names for users to select */ public static String[] getAccountList(Context context) { AccountManager accountManager = AccountManager.get(context.getApplicationContext()); Account[] accounts = accountManager.getAccountsByType(ACCOUNT_TYPE); int numAccounts = accounts == null ? 1 : accounts.length + 1; String[] accountNames = new String[numAccounts]; for (int i = 0 ; i < accounts.length ; i++) { accountNames[i] = accounts[i].name; } accountNames[numAccounts - 1] = Config.DEFAULT_USER; return accountNames; } /** Starts an authentication request */ public void authenticate() throws OperationCanceledException, AuthenticatorException, IOException { Logger.i("AccountSelector.authenticate() running"); /* We only need to authenticate every AUTHENTICATE_PERIOD_MILLI milliseconds, during * which we can reuse the cookie. If authentication fails due to expired * authToken, the client of AccountSelector can call authImmedately() to request * authenticate() upon the next checkin */ long authTimeLast = this.getLastAuthTime(); long timeSinceLastAuth = System.currentTimeMillis() - authTimeLast; if (!this.shouldAuthImmediately() && authTimeLast != 0 && (timeSinceLastAuth < AUTHENTICATE_PERIOD_MSEC)) { return; } Logger.i("Authenticating. Last authentication is " + timeSinceLastAuth / 1000 / 60 + " minutes ago. "); AccountManager accountManager = AccountManager.get(context.getApplicationContext()); if (this.authToken != null) { // There will be no effect on the token if it is still valid Logger.i("Invalidating token"); accountManager.invalidateAuthToken(ACCOUNT_TYPE, this.authToken); } Account[] accounts = accountManager.getAccountsByType(ACCOUNT_TYPE); Logger.i("Got " + accounts.length + " accounts"); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this.context); String selectedAccount = prefs.getString(Config.PREF_KEY_SELECTED_ACCOUNT, null); final String defaultUserName = Config.DEFAULT_USER; isAnonymous = true; if (selectedAccount != null && selectedAccount.equals(defaultUserName)) { return; } if (accounts != null && accounts.length > 0) { // Default account should be the Anonymous account Account accountToUse = accounts[accounts.length-1]; if (!accounts[accounts.length-1].name.equals(defaultUserName)) { for (Account account : accounts) { if (account.name.equals(defaultUserName)) { accountToUse = account; break; } } } if (selectedAccount != null) { for (Account account : accounts) { if (account.name.equals(selectedAccount)) { accountToUse = account; break; } } } isAnonymous = accountToUse.name.equals(defaultUserName); if (isAnonymous) { Logger.d("Skipping authentication as account is " + defaultUserName); return; } Logger.i("Trying to get auth token for " + accountToUse); AccountManagerFuture<Bundle> future = accountManager.getAuthToken( accountToUse, "ah", false, new AccountManagerCallback<Bundle>() { @Override public void run(AccountManagerFuture<Bundle> result) { Logger.i("AccountManagerCallback invoked"); try { getAuthToken(result); } catch (RuntimeException e) { Logger.e("Failed to get authToken", e); /* TODO(Wenjie): May ask the user whether to quit the app nicely here if a number * of trials have been made and failed. Since Speedometer is basically useless * without checkin */ } }}, null); Logger.i("AccountManager.getAuthToken returned " + future); } else { throw new RuntimeException("No google account found"); } } private void getAuthToken(AccountManagerFuture<Bundle> result) { Logger.i("getAuthToken() called, result " + result); String errMsg = "Failed to get login cookie. "; Bundle bundle; try { bundle = result.getResult(); Intent intent = (Intent) bundle.get(AccountManager.KEY_INTENT); if (intent != null) { // User input required. (A UI will pop up for user's consent to allow // this app access account information.) Logger.i("Starting account manager activity"); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); } else { Logger.i("Executing getCookie task"); synchronized (this) { this.authToken = bundle.getString(AccountManager.KEY_AUTHTOKEN); this.checkinFuture = checkinExecutor.submit(new GetCookieTask()); } } } catch (OperationCanceledException e) { Logger.e(errMsg, e); throw new RuntimeException("Can't get login cookie", e); } catch (AuthenticatorException e) { Logger.e(errMsg, e); throw new RuntimeException("Can't get login cookie", e); } catch (IOException e) { Logger.e(errMsg, e); throw new RuntimeException("Can't get login cookie", e); } } private class GetCookieTask implements Callable<Cookie> { @Override public Cookie call() { Logger.i("GetCookieTask running: " + authToken); DefaultHttpClient httpClient = new DefaultHttpClient(); boolean success = false; try { String loginUrlPrefix = phoneUtils.getServerUrl() + "/_ah/login?continue=" + phoneUtils.getServerUrl() + "&action=Login&auth="; // Don't follow redirects httpClient.getParams().setBooleanParameter( ClientPNames.HANDLE_REDIRECTS, false); HttpGet httpGet = new HttpGet(loginUrlPrefix + authToken); HttpResponse response; Logger.i("Accessing: " + loginUrlPrefix + authToken); response = httpClient.execute(httpGet); if (response.getStatusLine().getStatusCode() != 302) { // Response should be a redirect to the "continue" URL. Logger.e("Failed to get login cookie: " + loginUrlPrefix + " returned unexpected error code " + response.getStatusLine().getStatusCode()); throw new RuntimeException("Failed to get login cookie: " + loginUrlPrefix + " returned unexpected error code " + response.getStatusLine().getStatusCode()); } Logger.i("Got " + httpClient.getCookieStore().getCookies().size() + " cookies back"); for (Cookie cookie : httpClient.getCookieStore().getCookies()) { Logger.i("Checking cookie " + cookie); if (cookie.getName().equals("SACSID") || cookie.getName().equals("ACSID")) { Logger.i("Got cookie " + cookie); setLastAuthTime(System.currentTimeMillis()); success = true; return cookie; } } Logger.e("No (S)ASCID cookies returned"); throw new RuntimeException("Failed to get login cookie: " + loginUrlPrefix + " did not return any (S)ACSID cookie"); } catch (ClientProtocolException e) { Logger.e("Failed to get login cookie", e); throw new RuntimeException("Failed to get login cookie", e); } catch (IOException e) { Logger.e("Failed to get login cookie", e); throw new RuntimeException("Failed to get login cookie", e); } finally { httpClient.getParams().setBooleanParameter( ClientPNames.HANDLE_REDIRECTS, true); if (!success) { resetCheckinFuture(); } } } } }
package main.java.com.alexhennieroed.desolationserver.networking; import com.mongodb.MongoClient; import com.mongodb.ServerAddress; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; import static com.mongodb.client.model.Filters.*; import static com.mongodb.client.model.Projections.*; import static com.mongodb.client.model.Updates.*; import main.java.com.alexhennieroed.desolationserver.Server; import main.java.com.alexhennieroed.desolationserver.Settings; import main.java.com.alexhennieroed.desolationserver.game.model.Character; import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; import java.time.LocalDateTime; import java.util.List; import java.util.ArrayList; /** * Manages the connection to the MongoDB database * @author Alexander Hennie-Roed * @version 1.0.0 */ public class DatabaseConnector { private final Server myServer; private MongoDatabase database; private MongoCollection<Document> usercol; private MongoCollection<Document> charcol; private int numusers; private String lastSave; /** * Connects to or creates the database */ public DatabaseConnector(Server server) { this.myServer = server; numusers = 0; lastSave = "Init"; connect(); usercol = database.getCollection("users"); charcol = database.getCollection("characters"); MongoCursor<Document> cursor = usercol.find().iterator(); while (cursor.hasNext()) { numusers++; cursor.next(); } } /** * Connects to the database from Settings */ private void connect() { MongoClient mongoClient = new MongoClient(new ServerAddress(Settings.MONGODB_HOST, Settings.MONGODB_PORT)); database = mongoClient.getDatabase(Settings.DATABASE_NAME); } /** * Gets a list of all users in the database * @return the list of users */ public List<User> getAllUsers() { List userList = new ArrayList(); MongoCursor<Document> cursor = usercol.find().projection(excludeId()).iterator(); while (cursor.hasNext()) { List<Object> list = new ArrayList<>(cursor.next().values()); User user = new User((String) list.get(0), (String) list.get(1)); user.setCharacter(getCharacter((ObjectId) list.get(2))); userList.add(user); } return userList; } /** * Updates all of the users in the list * @param userList the list of users to update */ public void updateAllUsers(List<User> userList) { for (User u : userList) { updateUser(u); } myServer.getLogger().logDatabaseEvent("Updated all users in database."); lastSave = LocalDateTime.now().toString().split("\\.")[0].replace('T', '@'); } /** * Adds a new user to the database * @param user the user to add * @return a boolean representing success */ public boolean addUser(User user) { if (numusers >= Settings.MAX_USER_COUNT) { myServer.getLogger().logDatabaseEvent("Failed to create new user " + user.getUsername() + " because the maximum user count has been reached."); } MongoCursor<Document> cursor = usercol.find(eq("username", user.getUsername())).iterator(); if (cursor.hasNext()) { myServer.getLogger().logDatabaseEvent("Failed to create new user " + user.getUsername() + "\nbecause the username already exists."); return false; } Document doc = new Document("username", user.getUsername()) .append("password", user.getPassword()) .append("character_id", addCharacter(user.getCharacter())); usercol.insertOne(doc); numusers++; return true; } /** * Adds a new character to the database * @param character the character to add * @return the id of the character's document */ private ObjectId addCharacter(Character character) { ObjectId id = new ObjectId(); Document doc = new Document("_id", id) .append("name", character.getName()) .append("level", character.getLevel()) .append("max_health", character.getMaxHealth()) .append("current_health", character.getCurrentHealth()) .append("max_stamina", character.getMaxStamina()) .append("current_stamina", character.getCurrentStamina()) .append("exp_to_next", character.getExpToNext()) .append("current_exp", character.getCurrentExp()); charcol.insertOne(doc); return id; } /** * Removes the user from the database * @param user the user to remove */ public boolean removeUser(User user) { if (checkUser(user)) { MongoCursor<Document> cursor = usercol.find(eq("username", user.getUsername())) .projection(fields(include("character_id"))) .iterator(); ObjectId id = (ObjectId) new ArrayList<>(cursor.next().values()).get(0); removeCharacter(id); usercol.deleteOne(eq("username", user.getUsername())); numusers--; myServer.getLogger().logDatabaseEvent("Removed user " + user.getUsername() + "."); return true; } else { myServer.getLogger().logDatabaseEvent("Failed to remove user " + user.getUsername() + " because the credentials did not match."); } return false; } /** * Removes the character from the database * @param id the character_id in the user */ private void removeCharacter(ObjectId id) { charcol.deleteOne(eq("_id", id)); } /** * Checks the credentials of the user * @param user the user in question * @return a boolean representing the success */ public boolean checkUser(User user) { MongoCursor<Document> cursor = usercol.find(eq("username", user.getUsername())) .projection(fields(include("password"), excludeId())) .iterator(); if (cursor.hasNext()) { List list = new ArrayList(cursor.next().values()); return user.getPassword().equals(list.get(0)); } return false; } /** * Finds the user in the database based on the username * @param username the username of the user * @return the user in the database */ public User getUser(String username) { MongoCursor<Document> cursor = usercol.find(eq("username", username)) .projection(fields(include("username", "password", "character_id"), excludeId())) .iterator(); if (cursor.hasNext()) { List<Object> list = new ArrayList<>(cursor.next().values()); User user = new User((String) list.get(0), (String) list.get(1)); user.setCharacter(getCharacter((ObjectId) list.get(2))); return user; } myServer.getLogger().logDatabaseEvent("Failed to get user " + username + " because the user does not exist."); return null; } /** * Finds the character in the database based on the ObjectId * @param id the character_id in the user's document * @return the character in the database */ private Character getCharacter(ObjectId id) { MongoCursor<Document> cursor = charcol.find(eq("_id", id)) .projection(fields(excludeId())) .iterator(); if (cursor.hasNext()) { List<Object> list = new ArrayList<>(cursor.next().values()); return new Character(list); } myServer.getLogger().logDatabaseEvent("Failed to get character from _id " + id.toHexString()); return null; } /** * Updates a user in the database * @param user the user to update */ public void updateUser(User user) { if (checkUser(user)) { usercol.updateOne(eq("username", user.getUsername()), set("password", user.getPassword())); MongoCursor<Document> cursor = usercol.find(eq("username", user.getUsername())) .projection(fields(include("character_id"), excludeId())) .iterator(); ObjectId id = (ObjectId) new ArrayList<>(cursor.next().values()).get(0); updateCharacter(user.getCharacter(), id); } else { myServer.getLogger().logDatabaseEvent("Failed to update user" + user.getUsername() + " because the credentials did not match."); } } /** * Updates a character in the database * @param character the character to update * @param id the character_id in the user document */ private void updateCharacter(Character character, ObjectId id) { Bson update = combine(set("name", character.getName()), set("level", character.getLevel()), set("max_health", character.getMaxHealth()), set("current_health", character.getCurrentHealth()), set("max_stamina", character.getMaxStamina()), set("current_stamina", character.getCurrentStamina()), set("exp_to_next", character.getExpToNext()), set("current_exp", character.getCurrentExp())); charcol.updateOne(eq("_id", id), update); } /** * Returns the number of users on the database * @return the number of users */ public int getNumusers() { return numusers; } /** * Returns the last save of the database * @return the last save */ public String getLastSave() { return lastSave; } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.worker.netty; import alluxio.Configuration; import alluxio.Constants; import alluxio.PropertyKey; import alluxio.network.protocol.RPCProtoMessage; import alluxio.network.protocol.databuffer.DataBuffer; import alluxio.network.protocol.databuffer.DataFileChannel; import alluxio.network.protocol.databuffer.DataNettyBufferV2; import alluxio.proto.dataserver.Protocol; import alluxio.proto.status.Status.PStatus; import alluxio.util.CommonUtils; import alluxio.util.WaitForOptions; import alluxio.util.io.BufferUtils; import alluxio.util.proto.ProtoMessage; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelPipeline; import io.netty.channel.FileRegion; import io.netty.channel.embedded.EmbeddedChannel; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; import java.util.Random; import java.util.concurrent.TimeoutException; public abstract class ReadHandlerTest { protected static final long PACKET_SIZE = Configuration.getBytes(PropertyKey.USER_NETWORK_NETTY_READER_PACKET_SIZE_BYTES); private final Random mRandom = new Random(); protected String mFile; protected EmbeddedChannel mChannel; protected EmbeddedChannel mChannelNoException; @Rule public TemporaryFolder mTestFolder = new TemporaryFolder(); /** * Reads all bytes of a file. */ @Test public void readFullFile() throws Exception { long checksumExpected = populateInputFile(PACKET_SIZE * 10, 0, PACKET_SIZE * 10 - 1); mChannel.writeInbound(buildReadRequest(0, PACKET_SIZE * 10)); checkAllReadResponses(mChannel, checksumExpected); } /** * Reads a sub-region of a file. */ @Test public void readPartialFile() throws Exception { long start = 3; long end = PACKET_SIZE * 10 - 99; long checksumExpected = populateInputFile(PACKET_SIZE * 10, start, end); mChannel.writeInbound(buildReadRequest(start, end + 1 - start)); checkAllReadResponses(mChannel, checksumExpected); } /** * Handles multiple read requests within a channel sequentially. */ @Test public void reuseChannel() throws Exception { long fileSize = PACKET_SIZE * 5; long checksumExpected = populateInputFile(fileSize, 0, fileSize - 1); mChannel.writeInbound(buildReadRequest(0, fileSize)); checkAllReadResponses(mChannel, checksumExpected); fileSize = fileSize / 2 + 1; long start = 3; long end = fileSize - 1; checksumExpected = populateInputFile(fileSize, start, end); mChannel.writeInbound(buildReadRequest(start, end - start + 1)); checkAllReadResponses(mChannel, checksumExpected); } /** * Fails if the read request tries to read an empty file. */ @Test public void readEmptyFile() throws Exception { populateInputFile(0, 0, 0); mChannelNoException.writeInbound(buildReadRequest(0, 0)); Object response = waitForOneResponse(mChannelNoException); checkReadResponse(response, PStatus.INVALID_ARGUMENT); } /** * Cancels the read request immediately after the read request is sent. */ @Test public void cancelRequest() throws Exception { long fileSize = PACKET_SIZE * 100 + 1; populateInputFile(fileSize, 0, fileSize - 1); RPCProtoMessage readRequest = buildReadRequest(0, fileSize); Protocol.ReadRequest request = readRequest.getMessage().asReadRequest(); RPCProtoMessage cancelRequest = new RPCProtoMessage(new ProtoMessage(request.toBuilder().setCancel(true).build()), null); mChannel.writeInbound(readRequest); mChannel.writeInbound(cancelRequest); // Make sure we can still get EOF after cancelling though the read request is not necessarily // fulfilled. boolean eof = false; long maxIterations = 100; while (maxIterations > 0) { Object response = waitForOneResponse(mChannel); // There is small chance that we can still receive an OK response here because it is too // fast to read all the data. If that ever happens, either increase the file size or allow it // to be OK here. DataBuffer buffer = checkReadResponse(response, PStatus.CANCELED); if (buffer == null) { eof = true; break; } buffer.release(); maxIterations--; Assert.assertTrue(mChannel.isOpen()); } Assert.assertTrue(eof); } @Test public void UnregisteredChannelFired() throws Exception { ChannelPipeline p = mChannel.pipeline(); p.fireChannelUnregistered(); } @Test public void UnregisteredChannelFiredAfterRequest() throws Exception { populateInputFile(PACKET_SIZE * 10, 0, PACKET_SIZE * 10 - 1); mChannel.writeInbound(buildReadRequest(0, PACKET_SIZE * 10)); ChannelPipeline p = mChannel.pipeline(); p.fireChannelUnregistered(); } /** * Populates the input file, also computes the checksum for part of the file. * * @param length the length of the file * @param start the start position to compute the checksum * @param end the last position to compute the checksum * @return the checksum */ protected long populateInputFile(long length, long start, long end) throws Exception { long checksum = 0; File file = mTestFolder.newFile(); long pos = 0; if (length > 0) { FileOutputStream fileOutputStream = new FileOutputStream(file); while (length > 0) { byte[] buffer = new byte[(int) Math.min(length, Constants.MB)]; mRandom.nextBytes(buffer); for (int i = 0; i < buffer.length; i++) { if (pos >= start && pos <= end) { checksum += BufferUtils.byteToInt(buffer[i]); } pos++; } fileOutputStream.write(buffer); length -= buffer.length; } fileOutputStream.close(); } mFile = file.getPath(); mockReader(start); return checksum; } /** * Checks all the read responses. */ protected void checkAllReadResponses(EmbeddedChannel channel, long checksumExpected) throws Exception { boolean eof = false; long checksumActual = 0; while (!eof) { Object readResponse = waitForOneResponse(channel); if (readResponse == null) { Assert.fail(); break; } DataBuffer buffer = checkReadResponse(readResponse, PStatus.OK); eof = buffer == null; if (buffer != null) { if (buffer instanceof DataNettyBufferV2) { ByteBuf buf = (ByteBuf) buffer.getNettyOutput(); while (buf.readableBytes() > 0) { checksumActual += BufferUtils.byteToInt(buf.readByte()); } buf.release(); } else { Assert.assertTrue(buffer instanceof DataFileChannel); final ByteBuffer byteBuffer = ByteBuffer.allocate((int) buffer.getLength()); WritableByteChannel writableByteChannel = new WritableByteChannel() { @Override public boolean isOpen() { return true; } @Override public void close() throws IOException {} @Override public int write(ByteBuffer src) throws IOException { int sz = src.remaining(); byteBuffer.put(src); return sz; } }; try { ((FileRegion) buffer.getNettyOutput()).transferTo(writableByteChannel, 0); } catch (IOException e) { Assert.fail(); } byteBuffer.flip(); while (byteBuffer.remaining() > 0) { checksumActual += BufferUtils.byteToInt(byteBuffer.get()); } } } } Assert.assertEquals(checksumExpected, checksumActual); Assert.assertTrue(eof); } /** * Checks the read response message given the expected error code. * * @param readResponse the read response * @param statusExpected the expected error code * @return the data buffer extracted from the read response */ protected DataBuffer checkReadResponse(Object readResponse, PStatus statusExpected) { Assert.assertTrue(readResponse instanceof RPCProtoMessage); ProtoMessage response = ((RPCProtoMessage) readResponse).getMessage(); Assert.assertTrue(response.isResponse()); DataBuffer buffer = ((RPCProtoMessage) readResponse).getPayloadDataBuffer(); if (buffer != null) { Assert.assertEquals(PStatus.OK, response.asResponse().getStatus()); } else { Assert.assertEquals(statusExpected, response.asResponse().getStatus()); } return buffer; } /** * Waits for one read response message. * * @return the read response */ protected Object waitForOneResponse(final EmbeddedChannel channel) throws TimeoutException, InterruptedException { return CommonUtils.waitForResult("response from the channel", () -> channel.readOutbound(), WaitForOptions.defaults().setTimeoutMs(Constants.MINUTE_MS)); } /** * Builds a read request. * * @param offset the offset * @param len the length to read * @return the proto message */ protected abstract RPCProtoMessage buildReadRequest(long offset, long len); /** * Mocks the reader (block reader or UFS file reader). * * @param start the start pos of the reader */ protected abstract void mockReader(long start) throws Exception; }
package org.i3xx.step.clock.util; /* * #%L * NordApp OfficeBase :: clock * %% * Copyright (C) 2014 - 2015 I.D.S. DialogSysteme GmbH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.BitSet; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; /** * The BitMap provides a 2 dimensional index of bits to sort and select values. * <p> * The bits have the following meaning. * <ul> * <li> 0 - 59 0-59 Seconds * <li> 60 - 119 0-59 Minutes * <li>120 - 143 0-23 Hours * <li>144 - 174 1-31 Days * <li>175 - 186 0-11 Month * <li>187 - Monday * <li>188 - Tuesday * <li>189 - Wednesday * <li>190 - Thursday * <li>191 - Friday * <li>192 - Saturday * <li>193 - Sunday * <li>194 - First * <li>195 - Ultimo * <li>196 - Once * <li>197 - Immediate (at boot time) * <li>200 - 255 2015-2070 * </ul> * <p> * The fields matches the value of the gregorian calendar * set with a 'cal.setFirstDayOfWeek(Calendar.MONDAY);' * <ul> * <li>Calendar.SECOND * <li>Calendar.MINUTE * <li>Calendar.HOUR_OF_DAY * <li>Calendar.DAY_OF_MONTH * <li>Calendar.MONTH * <li>Calendar.YEAR * <li>Calendar.DAY_OF_WEEK * </ul> * * @author Stefan * */ public class BitTime { /** * The first year that is in the range of valid years */ public static final int FIRST_YEAR = 2015; /** * The last year that is in the range of valid years */ public static final int LAST_YEAR = 2070; /** * The day offset to match the field DAY_OF_MONTH of the Gregorian Calendar */ public static final int DAY_OFFSET = 1; /** * The day offset to match the field DAY_OF_WEEK_OFFSET of the Gregorian Calendar */ public static final int DAY_OF_WEEK_OFFSET = 1; /** The time value*/ private BitSet time; /** * Creates a new BitTime object with the current time. */ public BitTime() { time = new BitSet(); } /** * Creates a new BitTime object with the date. * * @param date The date of the BitTime */ public BitTime(Date date) { time = new BitSet(); setTime(date); } /** * Creates a new BitTime object with the date. The date uses the format * described in SimpleDateFormat. * * @param date The date as a String * @param format The format of the String (@see SimpleDateFormat) * @throws ParseException */ public BitTime(String date, String format) throws ParseException { time = new BitSet(); setTime(date, format); } /** * Creates a new BitTime object with the date. The date uses the same * time format as a cron job <tt>(minute, hour, day, month, day-of-week)</tt>. * * 'http://troubadix.dn.fh-koeln.de/unix/cronjobs_syntax.html' * * @param date The date as a String */ public BitTime(String date) { CrontabParser p = new CrontabParser(); time = p.parseToBitTime(date).getTime(); } /** * @param year The value * @param month The value * @param day The value * @param hour The value * @param minute The value * @param second The value * @param dayOfTheWeek The value */ public BitTime(int year, int month, int day, int hour, int minute, int second, int dayOfTheWeek) { time = new BitSet(); if(year>-1) setYear(year); if(month>-1) setMonth(month); if(day>-1) setDay(day); if(hour>-1) setHour(hour); if(minute>-1) setMinute(minute); if(second>-1) setSecond(second); if(dayOfTheWeek>-1) setDayOfTheWeek(dayOfTheWeek); } /** * Gets the BitSet * * @return The BitSet */ public BitSet getTime() { return time; } /** * Gets the BitSet as a Date * * @return The Date */ public Date getDate() { int second = getSecond(); int minute = getMinute(); int hour = getHour(); int day = getDay(); int month = getMonth(); int year = getYear(); GregorianCalendar cal = new GregorianCalendar(); cal.setFirstDayOfWeek(Calendar.SUNDAY); cal.set(year, month, day, hour, minute, second); //skip millis cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * Sets the time and date * * @param date * @param format (dd.MM.yyyy HH:mm:ss) * @throws ParseException * * @see SimpleDateFormat */ void setTime(String date, String format) throws ParseException { SimpleDateFormat sdf = new SimpleDateFormat(format); Date d = sdf.parse(date); setTime(d); } /** * Sets the bits using a Date object. * The first day of the week is the sunday. * * @param date The date to set the bits */ void setTime(Date date) { GregorianCalendar cal = new GregorianCalendar(); cal.setFirstDayOfWeek(Calendar.SUNDAY); cal.setTime(date); int second = cal.get(Calendar.SECOND); int minute = cal.get(Calendar.MINUTE); int hour = cal.get(Calendar.HOUR_OF_DAY); int day = cal.get(Calendar.DAY_OF_MONTH); int month = cal.get(Calendar.MONTH); int year = cal.get(Calendar.YEAR); int dayOfWeek = cal.get(Calendar.DAY_OF_WEEK); setSecond(second); setMinute(minute); setHour(hour); setDay(day); setMonth(month); setYear(year); setDayOfTheWeek(dayOfWeek); } /** * Clears the time */ public void clear() { time.clear(); } /** * Sets the bit of the second * * @param val The second 0-59 */ public void setSecond(int val) { if(val<0 || val>59) throw new IllegalArgumentException("The value "+val+" is no valid second (0-59)."); time.set(val); } /** * Gets the second * * @return The value */ public int getSecond() { BitSet s = time.get(0, 60); return s.nextSetBit(0); } /** * Sets the bit of the minute * * @param val The minute 0-59 */ public void setMinute(int val) { if(val<0 || val>59) throw new IllegalArgumentException("The value "+val+" is no valid minute (0-59)."); time.set(60+val); } /** * Gets the minute * * @return The value */ public int getMinute() { BitSet s = time.get(60, 120); return s.nextSetBit(0); } /** * Sets the bit of the hour * * @param val The hour 0-23 */ public void setHour(int val) { if(val<0 || val>23) throw new IllegalArgumentException("The value "+val+" is no valid hour (0-23)."); time.set(120+val); } /** * Gets the hour 0-23 * * @return The value */ public int getHour() { BitSet s = time.get(120, 144); return s.nextSetBit(0); } /** * Sets the bit of the day. Note: The field is 1 less than the field * day-of-month of the gregorian calendar. * * @param val The day 1-31 */ public void setDay(int val) { if(val<0 || val>31) throw new IllegalArgumentException("The value "+val+" is no valid day (0-30)."); time.set(144+(val-DAY_OFFSET)); } /** * Gets the day 1-31 * * @return The value */ public int getDay() { BitSet s = time.get(144, 175); return s.nextSetBit(0)+DAY_OFFSET; } /** * Sets the bit of the month * * @param val The month 0-11 */ public void setMonth(int val) { if(val<0 || val>11) throw new IllegalArgumentException("The value "+val+" is no valid month (0-11)."); time.set(175+val); } /** * Gets the month 0-11 * * @return The value */ public int getMonth() { BitSet s = time.get(175, 187); return s.nextSetBit(0); } /** * Sets the bit of the day of the week * * 1 - Sunday * 2 - Monday * 3 - Tuesday * 4 - Wednesday * 5 - Thursday * 6 - Friday * 7 - Saturday * * @param val The day of the week 1-7 */ public void setDayOfTheWeek(int val) { if(val<1 || val>7) throw new IllegalArgumentException("The value "+val+" is no valid day of the week (1-7)."); time.set(187+(val-DAY_OF_WEEK_OFFSET)); } /** * Gets the day of the week * * @return The value */ public int getDayOfTheWeek() { BitSet s = time.get(187, 194); return s.nextSetBit(0)+DAY_OF_WEEK_OFFSET; } /** * Sets the bit of first */ public void setFirst(boolean flag) { time.set(194, flag); } /** * Gets the bit of first * @return The bit */ public boolean isFirst() { return time.get(194); } /** * Sets the bit of ultimo */ public void setUltimo(boolean flag) { time.set(195, flag); } /** * Gets the bit of ultimo * @return The bit */ public boolean isUltimo() { return time.get(195); } /** * Sets the bit of once */ public void setOnce(boolean flag) { time.set(196, flag); } /** * Gets the bit of once * @return The bit */ public boolean isOnce() { return time.get(196); } /** * Sets the bit of immediate (boot time) */ public void setImmediate(boolean flag) { time.set(197, flag); } /** * Gets the bit of immediate (boot time) * @return The bit */ public boolean isImmediate() { return time.get(197); } /** * Sets the bit of the year * * @param val The year FIRST_YEAR-LAST_YEAR */ public void setYear(int val) { if(val<FIRST_YEAR || val>LAST_YEAR) throw new IllegalArgumentException("The value "+val+" is no valid year (2015-2070)."); time.set(200+(val-FIRST_YEAR)); } /** * Gets the day of the week * * @return The value */ public int getYear() { BitSet s = time.get(200, time.length()+1); return s.nextSetBit(0)+FIRST_YEAR; } }
// ======================================================================== // Copyright 2007-2008 NEXCOM Systems // ------------------------------------------------------------------------ // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== package org.cipango.server.bio; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertFalse; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertTrue; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.util.Iterator; import javax.servlet.ServletException; import javax.servlet.sip.Address; import javax.servlet.sip.SipServletMessage; import javax.servlet.sip.SipURI; import org.cipango.server.SipHandler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.junit.After; import org.junit.Before; import org.junit.Test; public class UdpConnectorTest { UdpConnector _connector; SipServletMessage _message; @Before public void setUp() throws Exception { _connector = new UdpConnector(); _connector.setHost("localhost"); _connector.setPort(5040); _connector.setThreadPool(new QueuedThreadPool()); _connector.setHandler(new TestHandler()); _connector.start(); _message = null; } @After public void tearDown() throws Exception { Thread.sleep(40); _connector.stop(); Thread.sleep(10); } @Test public void testLifeCycle() throws Exception { UdpConnector connector = new UdpConnector(); connector.setHost("localhost"); connector.setPort(5070); connector.setThreadPool(new QueuedThreadPool()); for (int i = 0; i < 10; i++) { connector.start(); assertTrue(connector.isRunning()); connector.stop(); assertFalse(connector.isRunning()); Thread.sleep(10); } } @Test public void testParam() throws Exception { assertEquals("sip:localhost:5040", _connector.getSipUri().toString()); _connector.stop(); _connector.setTransportParam(true); _connector.start(); assertEquals("sip:localhost:5040;transport=udp", _connector.getSipUri().toString()); } @Test public void testPing() throws Exception { for (int i = 0; i < 100; i++) { send(_pingEol); send(_pingEolEol); } } @Test public void testMessage() throws Exception { send(_msg); SipServletMessage message = getMessage(1000); send(_msg2); Thread.sleep(300); assertNotNull(message); assertEquals("REGISTER", message.getMethod()); assertEquals("c117fdfda2ffd6f4a859a2d504aedb25@127.0.0.1", message.getCallId()); } private SipServletMessage getMessage(long timeout) throws InterruptedException { if (_message != null) return _message; long absTimeout = System.currentTimeMillis() + timeout; while (absTimeout - System.currentTimeMillis() > 0) { Thread.sleep(50); if (_message != null) return _message; } return null; } @Test public void testRoute() throws Exception { send(_test); SipServletMessage message = getMessage(1000); send(_msg2); send(_msg2); send(_msg2); send(_msg2); Thread.sleep(100); assertNotNull(_message); Iterator<Address> it = message.getAddressHeaders("route"); assertEquals("proxy-gen2xx", ((SipURI) it.next().getURI()).getUser()); assertTrue(it.hasNext()); assertEquals("com.bea.sipservlet.tck.apps.spectestapp.uas", message.getHeader("application-name")); } private void send(String message) throws Exception { DatagramSocket ds = new DatagramSocket(); byte[] b = message.getBytes("UTF-8"); DatagramPacket packet = new DatagramPacket(b, 0, b.length, InetAddress.getByName("localhost"), 5040); ds.send(packet); } class TestHandler implements SipHandler { public void handle(SipServletMessage message) throws IOException, ServletException { _message = message; } public Server getServer() { // TODO Auto-generated method stub return null; } public void setServer(Server server) { // TODO Auto-generated method stub } } String _pingEolEol = "\r\n\r\n"; String _pingEol = "\r\n"; String _msg = "REGISTER sip:127.0.0.1:5070 SIP/2.0\r\n" + "Call-ID: c117fdfda2ffd6f4a859a2d504aedb25@127.0.0.1\r\n" + "CSeq: 2 REGISTER\r\n" + "From: <sip:cipango@cipango.org>;tag=9Aaz+gQAAA\r\n" + "To: <sip:cipango@cipango.org>\r\n" + "Via: SIP/2.0/UDP 127.0.0.1:6010\r\n" + "Max-Forwards: 70\r\n" + "User-Agent: Test Script\r\n" + "Contact: \"Cipango\" <sip:127.0.0.1:6010;transport=udp>\r\n" + "Allow: INVITE, ACK, BYE, CANCEL, PRACK, REFER, MESSAGE, SUBSCRIBE\r\n" + "MyHeader: toto\r\n" + "Content-Length: 0\r\n\r\n"; String _msg2 = "REGISTER sip:127.0.0.1:5070 SIP/2.0\r\n" + "Call-ID: foo@bar\r\n" + "CSeq: 2 REGISTER\r\n" + "From: <sip:cipango@cipango.org>;tag=9Aaz+gQAAA\r\n" + "To: <sip:cipango@cipango.org>\r\n" + "Via: SIP/2.0/UDP 127.0.0.1:6010\r\n" + "Max-Forwards: 70\r\n" + "User-Agent: Test Script\r\n" + "Contact: \"Cipango\" <sip:127.0.0.1:6010;transport=udp>\r\n" + "Allow: INVITE, ACK, BYE, CANCEL, PRACK, REFER, MESSAGE, SUBSCRIBE\r\n" + "MyHeader: toto\r\n" + "Content-Length: 0\r\n\r\n"; String _test = "MESSAGE sip:proxy-gen2xx@127.0.0.1:5060 SIP/2.0\r\n" + "Call-ID: 13a769769217a57d911314c67df8c729@192.168.1.205\r\n" + "CSeq: 1 MESSAGE\r\n" + "From: \"Alice\" <sip:alice@192.168.1.205:5071>;tag=1727584951\r\n" + "To: \"JSR289_TCK\" <sip:JSR289_TCK@127.0.0.1:5060>\r\n" + "Via: SIP/2.0/UDP 192.168.1.205:5071;branch=z9hG4bKaf9d7cee5d176c7edf2fbf9b1e33fc3a\r\n" + "Max-Forwards: 5\r\n" + "Route: \"JSR289_TCK\" <sip:proxy-gen2xx@127.0.0.1:5060;lr>,<sip:127.0.0.1:5060;transport=udp;lr>\r\n" + "Application-Name: com.bea.sipservlet.tck.apps.spectestapp.uas\r\n" + "Servlet-Name: Addressing\r\n" + "Content-Type: text/plain\r\n" + "Content-Length: 0\r\n\r\n"; }
/** * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.conductor.dao.dynomite; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ExecutionException; import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Singleton; import com.netflix.conductor.core.config.Configuration; import com.netflix.discovery.DiscoveryClient; import com.netflix.dyno.connectionpool.exception.DynoException; import com.netflix.dyno.jedis.DynoJedisClient; import redis.clients.jedis.JedisCommands; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; import redis.clients.jedis.Tuple; import redis.clients.jedis.params.sortedset.ZAddParams; /** * * @author Viren Proxy for the Dynomite client */ @Singleton public class DynoProxy { private static Logger logger = LoggerFactory.getLogger(DynoProxy.class); protected DiscoveryClient dc; protected JedisCommands dynoClient; @Inject public DynoProxy(DiscoveryClient dc, Configuration config) throws DynoException, InterruptedException, ExecutionException { this.dc = dc; String cluster = config.getProperty("workflow.dynomite.cluster", null); String applicationName = config.getAppId(); this.dynoClient = new DynoJedisClient.Builder().withApplicationName(applicationName).withDynomiteClusterName(cluster).withDiscoveryClient(dc) .build(); } public DynoProxy(JedisCommands dynoClient) { this.dynoClient = dynoClient; } public Set<String> zrange(String key, long start, long end) { return dynoClient.zrange(key, start, end); } public Set<Tuple> zrangeByScoreWithScores(String key, double maxScore, int count) { return dynoClient.zrangeByScoreWithScores(key, 0, maxScore, 0, count); } public Set<String> zrangeByScore(String key, double maxScore, int count) { return dynoClient.zrangeByScore(key, 0, maxScore, 0, count); } public Set<String> zrangeByScore(String key, double minScore, double maxScore, int count) { return dynoClient.zrangeByScore(key, minScore, maxScore, 0, count); } public ScanResult<Tuple> zscan(String key, int cursor) { return dynoClient.zscan(key, "" + cursor); } public String get(String key) { return dynoClient.get(key); } public Long zcard(String key) { return dynoClient.zcard(key); } public Long del(String key) { return dynoClient.del(key); } public Long zrem(String key, String member) { return dynoClient.zrem(key, member); } public String set(String key, String value) { String retVal = dynoClient.set(key, value); return retVal; } public Long setnx(String key, String value) { Long added = dynoClient.setnx(key, value); return added; } public Long zadd(String key, double score, String member) { Long retVal = dynoClient.zadd(key, score, member); return retVal; } public Long zaddnx(String key, double score, String member) { ZAddParams params = ZAddParams.zAddParams().nx(); Long retVal = dynoClient.zadd(key, score, member, params); return retVal; } public Long hset(String key, String field, String value) { Long retVal = dynoClient.hset(key, field, value); return retVal; } public Long hsetnx(String key, String field, String value) { Long retVal = dynoClient.hsetnx(key, field, value); return retVal; } public Long hlen(String key) { Long retVal = dynoClient.hlen(key); return retVal; } public String hget(String key, String field) { return dynoClient.hget(key, field); } public Map<String, String> hscan(String key, int count) { Map<String, String> m = new HashMap<>(); int cursor = 0; do { ScanResult<Entry<String, String>> sr = dynoClient.hscan(key, "" + cursor); cursor = Integer.parseInt(sr.getStringCursor()); for (Entry<String, String> r : sr.getResult()) { m.put(r.getKey(), r.getValue()); } if(m.size() > count) { break; } } while (cursor > 0); return m; } public Map<String, String> hgetAll(String key) { Map<String, String> m = new HashMap<>(); JedisCommands dyno = dynoClient; int cursor = 0; do { ScanResult<Entry<String, String>> sr = dyno.hscan(key, "" + cursor); cursor = Integer.parseInt(sr.getStringCursor()); for (Entry<String, String> r : sr.getResult()) { m.put(r.getKey(), r.getValue()); } } while (cursor > 0); return m; } public List<String> hvals(String key) { logger.trace("hvals {}", key); return dynoClient.hvals(key); } public Set<String> hkeys(String key) { logger.trace("hkeys {}", key); JedisCommands client = dynoClient; Set<String> keys = new HashSet<>(); int cursor = 0; do { ScanResult<Entry<String, String>> sr = client.hscan(key, "" + cursor); cursor = Integer.parseInt(sr.getStringCursor()); List<Entry<String, String>> result = sr.getResult(); for (Entry<String, String> e : result) { keys.add(e.getKey()); } } while (cursor > 0); return keys; } public Long hdel(String key, String... fields) { logger.trace("hdel {} {}", key, fields[0]); return dynoClient.hdel(key, fields); } public Long expire(String key, int seconds) { return dynoClient.expire(key, seconds); } public Boolean hexists(String key, String field) { return dynoClient.hexists(key, field); } public Long sadd(String key, String value) { logger.trace("sadd {} {}", key, value); Long retVal = dynoClient.sadd(key, value); return retVal; } public Long srem(String key, String member) { logger.trace("srem {} {}", key, member); Long retVal = dynoClient.srem(key, member); return retVal; } public boolean sismember(String key, String member) { return dynoClient.sismember(key, member); } public Set<String> smembers(String key) { logger.trace("smembers {}", key); JedisCommands client = dynoClient; Set<String> r = new HashSet<>(); int cursor = 0; ScanParams sp = new ScanParams(); sp.count(50); do { ScanResult<String> sr = client.sscan(key, "" + cursor, sp); cursor = Integer.parseInt(sr.getStringCursor()); r.addAll(sr.getResult()); } while (cursor > 0); return r; } public Long scard(String key) { return dynoClient.scard(key); } }
package com.kbremote.main.model; import java.io.File; import java.io.FileWriter; import java.util.List; import ModbusClient.ModbusClient; import com.kbremote.main.service.AddressService; import com.kbremote.main.service.UserService; import com.kbremote.main.util.AddressUtils; import com.kbremote.main.util.PropertyUtils; public class Trial { public static int MAKINE_ON = 0; public static int RECETE = 6057; public static int KONVEYOR_HIZI = 6036; public static int KONVEYOR_ON = 160; public static int SALINIM_KOL_HIZI = 220; public static int SALINIM_KOLU_ON = 162; public static int POMPA_BASINCI = 6065; public static int BOYA_ATIMI_S_ON = 164; public static int BOYA_ATIMI_S_OFF = 170; public static int BOYA_ATIMI_Y_ON = 171; public static int BOYA_ATIMI_Y_OFF = 170; public static int KAZIMA_UNITESI_ON = 163; public static int HAVALANDIRMA_DEBISI = 1302; public static int HAVALANDIRMA_ON = 161; public static int SAG_GOLGE = 6041; public static int SOL_GOLGE = 6041; public static int ON_ARKA_GOLGE = 6042; public static int KOL_TABANCA_A_ON = 560; public static int KOL_TABANCA_B_ON = 561; public static int KOL_TABANCA_C_ON = 562; public static int TOZ_ALMA_UNITESI_ON = 172; public static int POMPA_ON = 296; public static int KOL_TABANCA_SISTEM_SECIMI_ON = 565; public static int TABANCA_1_A_YUKSEKLIK = 6000; public static int TABANCA_1_A_BETA = 6001; public static int TABANCA_1_A_ALFA = 6002; public static int TABANCA_1_B_YUKSEKLIK = 6012; public static int TABANCA_1_B_BETA = 6013; public static int TABANCA_1_B_ALFA = 6014; public static int TABANCA_1_C_YUKSEKLIK = 6024; public static int TABANCA_1_C_BETA = 6025; public static int TABANCA_1_C_ALFA = 6026; public static int TABANCA_2_A_YUKSEKLIK = 6003; public static int TABANCA_2_A_BETA = 6004; public static int TABANCA_2_A_ALFA = 6005; public static int TABANCA_2_B_YUKSEKLIK = 6015; public static int TABANCA_2_B_BETA = 6016; public static int TABANCA_2_B_ALFA = 6017; public static int TABANCA_2_C_YUKSEKLIK = 6027; public static int TABANCA_2_C_BETA = 6028; public static int TABANCA_2_C_ALFA = 6029; public static int TABANCA_3_A_YUKSEKLIK = 6006; public static int TABANCA_3_A_BETA = 6007; public static int TABANCA_3_A_ALFA = 6008; public static int TABANCA_3_B_YUKSEKLIK = 6018; public static int TABANCA_3_B_BETA = 6019; public static int TABANCA_3_B_ALFA = 6020; public static int TABANCA_3_C_YUKSEKLIK = 6030; public static int TABANCA_3_C_BETA = 6031; public static int TABANCA_3_C_ALFA = 6032; public static int TABANCA_4_A_YUKSEKLIK = 6009; public static int TABANCA_4_A_BETA = 6010; public static int TABANCA_4_A_ALFA = 6011; public static int TABANCA_4_B_YUKSEKLIK = 6021; public static int TABANCA_4_B_BETA = 6022; public static int TABANCA_4_B_ALFA = 6023; public static int TABANCA_4_C_YUKSEKLIK = 6033; public static int TABANCA_4_C_BETA = 6034; public static int TABANCA_4_C_ALFA = 6035; public static int TABANCA_SAG_DUZELTME = 6044; public static int TABANCA_SOL_DUZELTME = 6045; public static int MAX_BOYAMA_ALANI = 6046; public static int MIN_BOYAMA_ALANI = 6047; public static int MAX_FOTOSEL_CALISMA_ALANI = 6048; public static int MIN_FOTOSEL_CALISMA_ALANI = 6049; public static int SENSOR_ILE_1_TABANCA_ARASI = 6050; public static int SENSOR_ILE_2_TABANCA_ARASI = 6052; public static int SERVO_HIZLANMA_RAMPASI = 6054; public static int SERVO_YAVASLAMA_RAMPASI = 6055; public static int SENSOR_DUZELTME = 6056; public static int POMPA_BEKLEME_SURESI = 6056; public static int POMPA_CALISMA_SURESI = 6056; public static int SENSOR_ZAMANLAYICI_T_ON = 6056; public static int SENSOR_ZAMANLAYICI_T_OFF = 6056; public static int POMPA_HAVA_BASINCI_LIMITI = 6056; public static void main(String[] args) { //testModbusClient(); testReadAddresses(); //testWriteValues(); //testAddressService(); //testAddressUtils(); //testProperties(); //testUserService(); } public static void testReadAddresses(){ AddressService service = new AddressService(); List<Address> addresList = service.getAddressList(); try { System.out.println("READ START"); ModbusClient client = new ModbusClient(AddressUtils.getClientIP(), 502); client.Connect(); for(Address add : addresList) { if(AddressUtils.M_TYPE.equals(add.getType())){ System.out.println(add.getName() + " - " + add.getBit() + " - " + client.ReadCoils(add.getValue(), 1)[0]); } else if(AddressUtils.D_TYPE.equals(add.getType())){ System.out.println(add.getName() + " - " + add.getBit() + " - " + client.ReadHoldingRegisters(add.getValue(), 1)[0]); } } client.Disconnect(); System.out.println("READ END"); } catch (Exception e) { e.printStackTrace(); } } public static void testWriteValues(){ AddressService service = new AddressService(); List<Address> addresList = service.getAddressList(); try { System.out.println("WRITE START"); ModbusClient client = new ModbusClient(AddressUtils.getClientIP(), 502); client.Connect(); for(Address add : addresList){ if(AddressUtils.M_TYPE.equals(add.getType())){ client.WriteSingleCoil(add.getValue(), true); } else if(AddressUtils.D_TYPE.equals(add.getType())){ client.WriteSingleRegister(add.getValue(), 50); } } client.Disconnect(); System.out.println("WRITE END"); } catch (Exception e) { e.printStackTrace(); } } public static void testAddressService(){ AddressService addressService = new AddressService(); List<Address> addList = addressService.getAddressList(); /* for(Address a : addList){ System.out.println("System.out.println(AddressUtils." + a.getName() + ");"); //System.out.println("public static int " + a.getName() + " = " + a.getValue() + ";"); } */ } public static void testAddressUtils(){ System.out.println(AddressUtils.MAKINE_ON); System.out.println(AddressUtils.RECETE); System.out.println(AddressUtils.KONVEYOR_HIZI); System.out.println(AddressUtils.KONVEYOR_ON); System.out.println(AddressUtils.SALINIM_KOL_HIZI); System.out.println(AddressUtils.SALINIM_KOLU_ON); System.out.println(AddressUtils.POMPA_BASINCI); System.out.println(AddressUtils.BOYA_ATIMI_S_ON); System.out.println(AddressUtils.BOYA_ATIMI_S_OFF); System.out.println(AddressUtils.BOYA_ATIMI_Y_ON); System.out.println(AddressUtils.BOYA_ATIMI_Y_OFF); System.out.println(AddressUtils.KAZIMA_UNITESI_ON); System.out.println(AddressUtils.HAVALANDIRMA_DEBISI); System.out.println(AddressUtils.HAVALANDIRMA_ON); System.out.println(AddressUtils.SAG_GOLGE); System.out.println(AddressUtils.SOL_GOLGE); System.out.println(AddressUtils.ON_ARKA_GOLGE); System.out.println(AddressUtils.KOL_TABANCA_A_ON); System.out.println(AddressUtils.KOL_TABANCA_B_ON); System.out.println(AddressUtils.KOL_TABANCA_C_ON); System.out.println(AddressUtils.TOZ_ALMA_UNITESI_ON); System.out.println(AddressUtils.POMPA_ON); System.out.println(AddressUtils.KOL_TABANCA_SISTEM_SECIMI_ON); System.out.println(AddressUtils.TABANCA_1_A_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_1_A_BETA); System.out.println(AddressUtils.TABANCA_1_A_ALFA); System.out.println(AddressUtils.TABANCA_1_B_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_1_B_BETA); System.out.println(AddressUtils.TABANCA_1_B_ALFA); System.out.println(AddressUtils.TABANCA_1_C_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_1_C_BETA); System.out.println(AddressUtils.TABANCA_1_C_ALFA); System.out.println(AddressUtils.TABANCA_2_A_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_2_A_BETA); System.out.println(AddressUtils.TABANCA_2_A_ALFA); System.out.println(AddressUtils.TABANCA_2_B_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_2_B_BETA); System.out.println(AddressUtils.TABANCA_2_B_ALFA); System.out.println(AddressUtils.TABANCA_2_C_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_2_C_BETA); System.out.println(AddressUtils.TABANCA_2_C_ALFA); System.out.println(AddressUtils.TABANCA_3_A_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_3_A_BETA); System.out.println(AddressUtils.TABANCA_3_A_ALFA); System.out.println(AddressUtils.TABANCA_3_B_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_3_B_BETA); System.out.println(AddressUtils.TABANCA_3_B_ALFA); System.out.println(AddressUtils.TABANCA_3_C_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_3_C_BETA); System.out.println(AddressUtils.TABANCA_3_C_ALFA); System.out.println(AddressUtils.TABANCA_4_A_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_4_A_BETA); System.out.println(AddressUtils.TABANCA_4_A_ALFA); System.out.println(AddressUtils.TABANCA_4_B_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_4_B_BETA); System.out.println(AddressUtils.TABANCA_4_B_ALFA); System.out.println(AddressUtils.TABANCA_4_C_YUKSEKLIK); System.out.println(AddressUtils.TABANCA_4_C_BETA); System.out.println(AddressUtils.TABANCA_4_C_ALFA); System.out.println(AddressUtils.TABANCA_SAG_DUZELTME); System.out.println(AddressUtils.TABANCA_SOL_DUZELTME); System.out.println(AddressUtils.MAX_BOYAMA_ALANI); System.out.println(AddressUtils.MIN_BOYAMA_ALANI); System.out.println(AddressUtils.MAX_FOTOSEL_CALISMA_ALANI); System.out.println(AddressUtils.MIN_FOTOSEL_CALISMA_ALANI); System.out.println(AddressUtils.SENSOR_ILE_1_TABANCA_ARASI); System.out.println(AddressUtils.SENSOR_ILE_2_TABANCA_ARASI); System.out.println(AddressUtils.SERVO_HIZLANMA_RAMPASI); System.out.println(AddressUtils.SERVO_YAVASLAMA_RAMPASI); System.out.println(AddressUtils.SENSOR_DUZELTME); System.out.println(AddressUtils.POMPA_BEKLEME_SURESI); System.out.println(AddressUtils.POMPA_CALISMA_SURESI); System.out.println(AddressUtils.SENSOR_ZAMANLAYICI_T_ON); System.out.println(AddressUtils.SENSOR_ZAMANLAYICI_T_OFF); System.out.println(AddressUtils.POMPA_HAVA_BASINCI_LIMITI); } public static void testProperties(){ PropertyUtils propUtil = new PropertyUtils(); System.out.println(PropertyUtils.UPDATE_FLAG); System.out.println(PropertyUtils.CLIENT_IP); } public static void testUserService(){ UserService userService = new UserService(); List<User> userList = userService.getUserList(); for(User u : userList){ System.out.print("USER : " + u.getUsername() + " - PASSWORD : " + u.getPassword() + " - ROLES : "); for(Role r : u.getAuthorities()){ System.out.print(r.getName() + " | "); } System.out.println(); } } public static void testModbusClient() { try { ModbusClient client = new ModbusClient(AddressUtils.getClientIP(), 502); client.Connect(); /* * coils numbers start with a zero and then span from 00001 to 09999 * discrete input numbers start with a one and then span from 10001 to 19999 * input register numbers start with a three and then span from 30001 to 39999 * holding register numbers start with a four and then span from 40001 to 49999 */ System.out.println("############ START READING ############"); System.out.println("############ COILS ############"); String content = ""; for(int i=0; i < 10000; i++){ boolean[] bArr = client.ReadCoils(i, 1); String tmp = i + " : "; if(bArr.length > 1){ for(int j=0; j < bArr.length; j++){ tmp = tmp + bArr[j] + " - "; } } else { tmp = tmp + bArr[0]; } content += tmp + "\n"; } File coilsFile = new File("C:\\MODBUS\\1-Coils2.txt"); FileWriter writer = new FileWriter(coilsFile); writer.write(content); writer.close(); System.out.println("ENDED"); /* content = ""; System.out.println("############ DISCRETE INPUTS ############"); for(int i=0; i < 65535; i++){ boolean[] bArr = client.ReadDiscreteInputs(i, 1); String tmp = i + " : "; if(bArr.length > 1){ for(int j=0; j < bArr.length; j++){ tmp = tmp + bArr[j] + " - "; } } else { tmp = tmp + bArr[0]; } content += tmp + "\n"; } File discreteFile = new File("C:\\MODBUS\\2-DiscreteInputs.txt"); writer = new FileWriter(discreteFile); writer.write(content); writer.close(); content = ""; System.out.println("############ INPUT REGISTERS ############"); for(int i=0; i < 65535; i++){ int[] bArr = client.ReadInputRegisters(i, 1); String tmp = i + " : "; if(bArr.length > 1){ for(int j=0; j < bArr.length; j++){ tmp = tmp + bArr[j] + " - "; } } else { tmp = tmp + bArr[0]; } content += tmp + "\n"; } File inputRegFile = new File("C:\\MODBUS\\3-InputRegisters2.txt"); writer = new FileWriter(inputRegFile); writer.write(content); writer.close(); content = ""; System.out.println("############ HOLDING REGISTERS ############"); for(int i=0; i < 65535; i++){ int[] bArr = client.ReadHoldingRegisters(i, 1); String tmp = i + " : "; if(bArr.length > 1){ for(int j=0; j < bArr.length; j++){ tmp = tmp + bArr[j] + " - "; } } else { tmp = tmp + bArr[0]; } content += tmp + "\n"; } File holdingFile = new File("C:\\MODBUS\\4-HoldingRegisters2.txt"); writer = new FileWriter(holdingFile); writer.write(content); writer.close(); System.out.println("############ END READING ############"); */ client.Disconnect(); } catch (Exception e) { e.printStackTrace(); } } }
// Copyright 2016 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.util; import com.yahoo.bard.webservice.data.time.Granularity; import com.yahoo.bard.webservice.data.time.TimeGrain; import com.yahoo.bard.webservice.data.time.ZonedTimeGrain; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; /** * Util functions to perform operations on JodaTime objects. */ public class DateTimeUtils { private static final Logger LOG = LoggerFactory.getLogger(DateTimeUtils.class); /** * Adds timeGrain to a given dateTime. * * @param dateTime dateTime to which timeGrain is to be added * @param timeGrain timeGrain to be added * * @return new dateTime i.e. old dateTime + timeGrain */ public static DateTime addTimeGrain(DateTime dateTime, TimeGrain timeGrain) { return dateTime.plus(timeGrain.getPeriod()); } /** * Merge all contiguous and overlapping intervals in a set together and return the set with the merged intervals. * * @param unmergedIntervals A set of intervals that may abut or overlap * * @return The set of merged intervals */ public static Set<Interval> mergeIntervalSet(Set<Interval> unmergedIntervals) { // Create a self sorting set of intervals TreeSet<Interval> sortedIntervals = new TreeSet<>(IntervalStartComparator.INSTANCE); for (Interval mergingInterval : unmergedIntervals) { Iterator<Interval> it = sortedIntervals.iterator(); while (it.hasNext()) { Interval sortedInterval = it.next(); if (mergingInterval.overlaps(sortedInterval) || mergingInterval.abuts(sortedInterval)) { // Remove the interval being merged with it.remove(); // find start and end of new interval DateTime start = (mergingInterval.getStart().isBefore(sortedInterval.getStart())) ? mergingInterval.getStart() : sortedInterval.getStart(); DateTime end = (mergingInterval.getEnd().isAfter(sortedInterval.getEnd())) ? mergingInterval.getEnd() : sortedInterval.getEnd(); mergingInterval = new Interval(start, end); } } sortedIntervals.add(mergingInterval); } return sortedIntervals; } /** * Merge an interval into the given interval set. * * @param intervals set of intervals to which an interval is to be added/merged * @param intervalToMerge interval to be merged * * @return set of intervals */ public static Set<Interval> mergeIntervalToSet(Set<Interval> intervals, Interval intervalToMerge) { LinkedHashSet<Interval> copyOfOriginalSet = new LinkedHashSet<>(intervals); copyOfOriginalSet.add(intervalToMerge); return mergeIntervalSet(copyOfOriginalSet); } /** * Finds the gaps in available vs needed interval sets. * * @param availableIntervals availability intervals * @param neededIntervals needed intervals * * @return set of intervals that are needed, but not fully available. */ public static SortedSet<Interval> findFullAvailabilityGaps( Set<Interval> availableIntervals, Set<Interval> neededIntervals ) { // Use just one comparator Comparator<Interval> intervalStartComparator = new IntervalStartComparator(); // Sort the intervals by start time, earliest to latest so we iterate over them in order SortedSet<Interval> sortedAvailableIntervals = new TreeSet<>(intervalStartComparator); sortedAvailableIntervals.addAll(availableIntervals); SortedSet<Interval> sortedNeededIntervals = new TreeSet<>(intervalStartComparator); sortedNeededIntervals.addAll(neededIntervals); // TODO: Consolidate available intervals to remove false misses // Get the 1st available interval Iterator<Interval> availableIntervalsIterator = sortedAvailableIntervals.iterator(); if (!availableIntervalsIterator.hasNext()) { // We have no available intervals so all needed intervals are missing return sortedNeededIntervals; } Interval available = availableIntervalsIterator.next(); // Walk through the needed intervals, adding missing ones SortedSet<Interval> missingIntervals = new TreeSet<>(intervalStartComparator); for (Interval needed : sortedNeededIntervals) { // Get the next available interval that can determine availability of the needed interval while (!canDetermineAvailability(available, needed) && availableIntervalsIterator.hasNext()) { available = availableIntervalsIterator.next(); } // If current available interval contains the needed interval, it's not missing. Next! if (available.contains(needed)) { continue; } // Either the needed interval starts before the available interval, or we have no other available intervals. missingIntervals.add(needed); } return missingIntervals; } /** * Check to see if we can determine availability from the given available and needed intervals. * * @param available Available interval * @param needed Needed interval * * @return True if we can determine availability, false if not */ private static boolean canDetermineAvailability(Interval available, Interval needed) { if (available != null && needed != null) { if (available.contains(needed) || available.getStart().isAfter(needed.getStart())) { return true; } } return false; } /** * Converts an interval to a specified string format. * * @param interval interval to be formatted * @param formatter date time formatter for the * @param separator string to separate interval start and end * * @return formatted interval string */ public static String intervalToString(Interval interval, DateTimeFormatter formatter, String separator) { return interval.getStart().toString(formatter) + separator + interval.getEnd().toString(formatter); } /** * Slices the intervals into smaller intervals of the timeGrain duration. * * @param interval interval to be sliced * @param timeGrain size of the slice * * @return list of intervals obtained by slicing the larger interval * * @throws java.lang.IllegalArgumentException if the interval is not an even multiple of the time grain */ public static List<Interval> sliceIntervals(Interval interval, TimeGrain timeGrain) { // TODO: Refactor me to use a Period DateTime intervalEnd = interval.getEnd(); DateTime sliceStart = interval.getStart(); DateTime periodStart = timeGrain.roundFloor(sliceStart); if (!sliceStart.equals(periodStart)) { LOG.info("Interval {} is not aligned to TimeGrain {} starting {}", interval, timeGrain, periodStart); throw new IllegalArgumentException("Interval must be aligned to the TimeGrain starting " + periodStart); } List<Interval> intervalSlices = new ArrayList<>(); while (sliceStart.isBefore(intervalEnd)) { // Find the end of the next slice DateTime sliceEnd = DateTimeUtils.addTimeGrain(sliceStart, timeGrain); // Make the next slice Interval slicedInterval = new Interval(sliceStart, sliceEnd); // Make sure that our slice is fully contained within our interval if (!interval.contains(slicedInterval)) { LOG.info("Interval {} is not a multiple of TimeGrain {}", interval, timeGrain); throw new IllegalArgumentException("Interval must be a multiple of the TimeGrain"); } // Add the slice intervalSlices.add(slicedInterval); // Move the slicer forward sliceStart = sliceEnd; } LOG.debug("Sliced interval {} into {} slices of {} grain", interval, intervalSlices.size(), timeGrain); return intervalSlices; } /** * Round the date time back to the beginning of the nearest (inclusive) month of January, April, July, October. * * @param from the date being rounded * * @return The nearest previous start of month for one of the three quarter months */ public static DateTime quarterlyRound(DateTime from) { DateTime.Property property = from.monthOfYear(); // Shift the month from a one to a zero basis (Jan == 0), then adjust backwards to one of the months that are // an integer multiple of three months from the start of the year, then round to the start of that month. return property.addToCopy(-1 * ((property.get() - 1) % 3)).monthOfYear().roundFloorCopy(); } /** * Given a granularity, produce a time zone. * * @param granularity The granularity's time zone, or if there isn't one, the default time zone * * @return A time zone */ public static DateTimeZone getTimeZone(Granularity granularity) { return (granularity instanceof ZonedTimeGrain) ? ((ZonedTimeGrain) granularity).getTimeZone() : DateTimeZone.getDefault(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.pcap.query; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.metron.common.Constants; import org.apache.metron.common.system.Clock; import org.apache.metron.common.utils.timestamp.TimestampConverters; import org.apache.metron.pcap.filter.fixed.FixedPcapFilter; import org.apache.metron.pcap.filter.query.QueryPcapFilter; import org.apache.metron.pcap.mr.PcapJob; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.EnumMap; import java.util.List; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.*; import static org.mockito.Mockito.when; public class PcapCliTest { @Mock private PcapJob jobRunner; @Mock private ResultsWriter resultsWriter; @Mock private Clock clock; @Before public void setup() { MockitoAnnotations.initMocks(this); } @Test public void runs_fixed_pcap_filter_job_with_default_argument_list() throws Exception { String[] args = { "fixed", "-start_time", "500", "-ip_src_addr", "192.168.1.1", "-ip_dst_addr", "192.168.1.2", "-ip_src_port", "8081", "-ip_dst_port", "8082", "-protocol", "6" }; List<byte[]> pcaps = Arrays.asList(new byte[][]{asBytes("abc"), asBytes("def"), asBytes("ghi")}); Path base_path = new Path(CliConfig.BASE_PATH_DEFAULT); Path base_output_path = new Path(CliConfig.BASE_OUTPUT_PATH_DEFAULT); EnumMap<Constants.Fields, String> query = new EnumMap<Constants.Fields, String>(Constants.Fields.class) {{ put(Constants.Fields.SRC_ADDR, "192.168.1.1"); put(Constants.Fields.DST_ADDR, "192.168.1.2"); put(Constants.Fields.SRC_PORT, "8081"); put(Constants.Fields.DST_PORT, "8082"); put(Constants.Fields.PROTOCOL, "6"); put(Constants.Fields.INCLUDES_REVERSE_TRAFFIC, "false"); }}; when(jobRunner.query(eq(base_path), eq(base_output_path), anyLong(), anyLong(), eq(query), isA(Configuration.class), isA(FileSystem.class), isA(FixedPcapFilter.Configurator.class))).thenReturn(pcaps); when(clock.currentTimeFormatted("yyyyMMddHHmmssSSSZ")).thenReturn("20160615183527162+0000"); PcapCli cli = new PcapCli(jobRunner, resultsWriter, clock); assertThat("Expect no errors on run", cli.run(args), equalTo(0)); Mockito.verify(resultsWriter).write(pcaps, "pcap-data-20160615183527162+0000.pcap"); } @Test public void runs_fixed_pcap_filter_job_with_full_argument_list_and_default_dateformat() throws Exception { String[] args = { "fixed", "-start_time", "500", "-end_time", "1000", "-base_path", "/base/path", "-base_output_path", "/base/output/path", "-ip_src_addr", "192.168.1.1", "-ip_dst_addr", "192.168.1.2", "-ip_src_port", "8081", "-ip_dst_port", "8082", "-protocol", "6", "-include_reverse" }; List<byte[]> pcaps = Arrays.asList(new byte[][]{asBytes("abc"), asBytes("def"), asBytes("ghi")}); Path base_path = new Path("/base/path"); Path base_output_path = new Path("/base/output/path"); EnumMap<Constants.Fields, String> query = new EnumMap<Constants.Fields, String>(Constants.Fields.class) {{ put(Constants.Fields.SRC_ADDR, "192.168.1.1"); put(Constants.Fields.DST_ADDR, "192.168.1.2"); put(Constants.Fields.SRC_PORT, "8081"); put(Constants.Fields.DST_PORT, "8082"); put(Constants.Fields.PROTOCOL, "6"); put(Constants.Fields.INCLUDES_REVERSE_TRAFFIC, "true"); }}; when(jobRunner.query(eq(base_path), eq(base_output_path), anyLong(), anyLong(), eq(query), isA(Configuration.class), isA(FileSystem.class), isA(FixedPcapFilter.Configurator.class))).thenReturn(pcaps); when(clock.currentTimeFormatted("yyyyMMddHHmmssSSSZ")).thenReturn("20160615183527162+0000"); PcapCli cli = new PcapCli(jobRunner, resultsWriter, clock); assertThat("Expect no errors on run", cli.run(args), equalTo(0)); Mockito.verify(resultsWriter).write(pcaps, "pcap-data-20160615183527162+0000.pcap"); } @Test public void runs_fixed_pcap_filter_job_with_full_argument_list() throws Exception { String[] args = { "fixed", "-start_time", "2016-06-13-18:35.00", "-end_time", "2016-06-15-18:35.00", "-date_format", "yyyy-MM-dd-HH:mm.ss", "-base_path", "/base/path", "-base_output_path", "/base/output/path", "-ip_src_addr", "192.168.1.1", "-ip_dst_addr", "192.168.1.2", "-ip_src_port", "8081", "-ip_dst_port", "8082", "-protocol", "6", "-include_reverse" }; List<byte[]> pcaps = Arrays.asList(new byte[][]{asBytes("abc"), asBytes("def"), asBytes("ghi")}); Path base_path = new Path("/base/path"); Path base_output_path = new Path("/base/output/path"); EnumMap<Constants.Fields, String> query = new EnumMap<Constants.Fields, String>(Constants.Fields.class) {{ put(Constants.Fields.SRC_ADDR, "192.168.1.1"); put(Constants.Fields.DST_ADDR, "192.168.1.2"); put(Constants.Fields.SRC_PORT, "8081"); put(Constants.Fields.DST_PORT, "8082"); put(Constants.Fields.PROTOCOL, "6"); put(Constants.Fields.INCLUDES_REVERSE_TRAFFIC, "true"); }}; long startAsNanos = asNanos("2016-06-13-18:35.00", "yyyy-MM-dd-HH:mm.ss"); long endAsNanos = asNanos("2016-06-15-18:35.00", "yyyy-MM-dd-HH:mm.ss"); when(jobRunner.query(eq(base_path), eq(base_output_path), eq(startAsNanos), eq(endAsNanos), eq(query), isA(Configuration.class), isA(FileSystem.class), isA(FixedPcapFilter.Configurator.class))).thenReturn(pcaps); when(clock.currentTimeFormatted("yyyyMMddHHmmssSSSZ")).thenReturn("20160615183527162+0000"); PcapCli cli = new PcapCli(jobRunner, resultsWriter, clock); assertThat("Expect no errors on run", cli.run(args), equalTo(0)); Mockito.verify(resultsWriter).write(pcaps, "pcap-data-20160615183527162+0000.pcap"); } private long asNanos(String inDate, String format) throws ParseException { SimpleDateFormat sdf = new SimpleDateFormat(format); Date date = sdf.parse(inDate); return TimestampConverters.MILLISECONDS.toNanoseconds(date.getTime()); } private byte[] asBytes(String val) { return val.getBytes(StandardCharsets.UTF_8); } @Test public void runs_query_pcap_filter_job_with_default_argument_list() throws Exception { String[] args = { "query", "-start_time", "500", "-query", "some query string" }; List<byte[]> pcaps = Arrays.asList(new byte[][]{asBytes("abc"), asBytes("def"), asBytes("ghi")}); Path base_path = new Path(CliConfig.BASE_PATH_DEFAULT); Path base_output_path = new Path(CliConfig.BASE_OUTPUT_PATH_DEFAULT); String query = "some query string"; when(jobRunner.query(eq(base_path), eq(base_output_path), anyLong(), anyLong(), eq(query), isA(Configuration.class), isA(FileSystem.class), isA(QueryPcapFilter.Configurator.class))).thenReturn(pcaps); when(clock.currentTimeFormatted("yyyyMMddHHmmssSSSZ")).thenReturn("20160615183527162+0000"); PcapCli cli = new PcapCli(jobRunner, resultsWriter, clock); assertThat("Expect no errors on run", cli.run(args), equalTo(0)); Mockito.verify(resultsWriter).write(pcaps, "pcap-data-20160615183527162+0000.pcap"); } @Test public void runs_query_pcap_filter_job_with_full_argument_list() throws Exception { String[] args = { "query", "-start_time", "500", "-end_time", "1000", "-base_path", "/base/path", "-base_output_path", "/base/output/path", "-query", "some query string" }; List<byte[]> pcaps = Arrays.asList(new byte[][]{asBytes("abc"), asBytes("def"), asBytes("ghi")}); Path base_path = new Path("/base/path"); Path base_output_path = new Path("/base/output/path"); String query = "some query string"; when(jobRunner.query(eq(base_path), eq(base_output_path), anyLong(), anyLong(), eq(query), isA(Configuration.class), isA(FileSystem.class), isA(QueryPcapFilter.Configurator.class))).thenReturn(pcaps); when(clock.currentTimeFormatted("yyyyMMddHHmmssSSSZ")).thenReturn("20160615183527162+0000"); PcapCli cli = new PcapCli(jobRunner, resultsWriter, clock); assertThat("Expect no errors on run", cli.run(args), equalTo(0)); Mockito.verify(resultsWriter).write(pcaps, "pcap-data-20160615183527162+0000.pcap"); } @Test public void invalid_fixed_filter_arg_prints_help() throws Exception { PrintStream originalOutStream = System.out; try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); PrintStream testStream = new PrintStream(new BufferedOutputStream(bos)); System.setOut(testStream); String[] args = { "fixed", "-start_time", "500", "-end_time", "1000", "-base_path", "/base/path", "-base_output_path", "/base/output/path", "-query", "THIS IS AN ERROR" }; PcapCli cli = new PcapCli(jobRunner, resultsWriter, clock); assertThat("Expect errors on run", cli.run(args), equalTo(-1)); assertThat(bos.toString(), bos.toString().contains("usage: Fixed filter options"), equalTo(true)); } finally { System.setOut(originalOutStream); } } @Test public void invalid_query_filter_arg_prints_help() throws Exception { PrintStream originalOutStream = System.out; try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); PrintStream outStream = new PrintStream(new BufferedOutputStream(bos)); System.setOut(outStream); String[] args = { "query", "-start_time", "500", "-end_time", "1000", "-base_path", "/base/path", "-base_output_path", "/base/output/path", "-ip_src_addr", "THIS IS AN ERROR" }; PcapCli cli = new PcapCli(jobRunner, resultsWriter, clock); assertThat("Expect errors on run", cli.run(args), equalTo(-1)); assertThat(bos.toString(), bos.toString().contains("usage: Query filter options"), equalTo(true)); } finally { System.setOut(originalOutStream); } } }
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.core.jms.activemq; import javax.validation.Valid; import javax.validation.constraints.NotNull; import org.apache.activemq.ActiveMQConnectionFactory; import com.adaptris.annotation.AdvancedConfig; import com.adaptris.annotation.AutoPopulated; import com.adaptris.annotation.DisplayOrder; import com.adaptris.util.KeyValuePair; import com.adaptris.util.KeyValuePairSet; import com.adaptris.util.SimpleBeanUtil; import com.thoughtworks.xstream.annotations.XStreamAlias; /** * <p> * ActiveMQ implementation of <code>VendorImplementation</code>. * </p> * <p> * This vendor implementation class directly exposes almost all the getter and setters that are available in the ConnectionFactory * for maximum flexibility in configuration. * </p> * <p> * The key from the <code>connection-factory-properties</code> element should match the name of the underlying ActiveMQ * ConnectionFactory property. * * <pre> * {@code * <connection-factory-properties> * <key-value-pair> * <key>AlwaysSessionAsync</key> * <value>true</value> * </key-value-pair> * </connection-factory-properties> * } * </pre> * will invoke {@link ActiveMQConnectionFactory#setAlwaysSessionAsync(boolean)}, setting the AlwaysSessionAsync property to true. * </p> * <p> * <b>This was built against ActiveMQ 5.9.0</b> * </p> * <p> * * @config advanced-active-mq-implementation * */ @XStreamAlias("advanced-active-mq-implementation") @DisplayOrder(order = {"brokerUrl", "connectionFactoryProperties"}) public class AdvancedActiveMqImplementation extends BasicActiveMqImplementation { /** * Non-Exhaustive list that matches various ActiveMQConnectionFactory methods. */ public enum ConnectionFactoryProperty { /** * @see ActiveMQConnectionFactory#setAlwaysSessionAsync(boolean) */ AlwaysSessionAsync { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setAlwaysSessionAsync(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setAlwaysSyncSend(boolean) */ AlwaysSyncSend { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setAlwaysSyncSend(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setAuditDepth(int) * */ AuditDepth { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setAuditDepth(Integer.parseInt(o)); } }, /** * @see ActiveMQConnectionFactory#setAuditMaximumProducerNumber(int) */ AuditMaximumProducerNumber{ @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setAuditMaximumProducerNumber(Integer.parseInt(o)); } }, /** * @see ActiveMQConnectionFactory#setCheckForDuplicates(boolean) */ CheckForDuplicates { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setCheckForDuplicates(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setClientID(String) */ ClientID { @Override void applyProperty(ActiveMQConnectionFactory f, String o) { f.setClientID(o); } }, /** * @see ActiveMQConnectionFactory#setClientIDPrefix(String) */ ClientIDPrefix { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setClientIDPrefix(o); } }, /** * @see ActiveMQConnectionFactory#setCloseTimeout(int) */ CloseTimeout { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setCloseTimeout(Integer.parseInt(o)); } }, /** * @see ActiveMQConnectionFactory#setConnectionIDPrefix(String) * */ ConnectionIDPrefix { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setConnectionIDPrefix(o); } }, /** * @see ActiveMQConnectionFactory#setConsumerFailoverRedeliveryWaitPeriod(long) */ ConsumerFailoverRedeliveryWaitPeriod { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setConsumerFailoverRedeliveryWaitPeriod(Long.parseLong(o)); } }, /** * @see ActiveMQConnectionFactory#setCopyMessageOnSend(boolean) */ CopyMessageOnSend { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setCopyMessageOnSend(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setDisableTimeStampsByDefault(boolean) */ DisableTimeStampsByDefault { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setDisableTimeStampsByDefault(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setDispatchAsync(boolean) */ DispatchAsync { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setDispatchAsync(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setExclusiveConsumer(boolean) */ ExclusiveConsumer { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setExclusiveConsumer(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setMaxThreadPoolSize(int) * */ MaxThreadPoolSize { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setMaxThreadPoolSize(Integer.parseInt(o)); } }, /** * @see ActiveMQConnectionFactory#setMessagePrioritySupported(boolean) * */ MessagePrioritySupported { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setMessagePrioritySupported(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setNestedMapAndListEnabled(boolean) */ NestedMapAndListEnabled { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setNestedMapAndListEnabled(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setNonBlockingRedelivery(boolean) * */ NonBlockingRedelivery { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setNonBlockingRedelivery(Boolean.parseBoolean(o)); } }, /** * This is just the correct spelling for * {@link ActiveMQConnectionFactory#setObjectMessageSerializationDefered(boolean)} * * @see ActiveMQConnectionFactory#setObjectMessageSerializationDefered(boolean) */ ObjectMessageSerializationDeferred { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setObjectMessageSerializationDefered(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setOptimizeAcknowledge(boolean) */ OptimizeAcknowledge { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setOptimizeAcknowledge(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setOptimizeAcknowledgeTimeOut(long) */ OptimizeAcknowledgeTimeout { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setOptimizeAcknowledgeTimeOut(Long.parseLong(o)); } }, /** * @see ActiveMQConnectionFactory#setOptimizedAckScheduledAckInterval(long) */ OptimizedAckScheduledAckInterval { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setOptimizedAckScheduledAckInterval(Long.parseLong(o)); } }, /** * @see ActiveMQConnectionFactory#setOptimizedMessageDispatch(boolean) */ OptimizedMessageDispatch { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setOptimizedMessageDispatch(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setProducerWindowSize(int) */ ProducerWindowSize { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setProducerWindowSize(Integer.parseInt(o)); } }, /** * @see ActiveMQConnectionFactory#setSendAcksAsync(boolean) */ SendAcksAsync { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setSendAcksAsync(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setSendTimeout(int) */ SendTimeout { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setSendTimeout(Integer.parseInt(o)); } }, /** * @see ActiveMQConnectionFactory#setStatsEnabled(boolean) */ StatsEnabled { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setStatsEnabled(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setTransactedIndividualAck(boolean) */ TransactedIndividualAck { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setTransactedIndividualAck(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setUseAsyncSend(boolean) */ UseAsyncSend { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setUseAsyncSend(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setUseCompression(boolean) */ UseCompression { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setUseCompression(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setUseDedicatedTaskRunner(boolean) */ UseDedicatedTaskRunner { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setUseDedicatedTaskRunner(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setUseRetroactiveConsumer(boolean) */ UseRetroactiveConsumer { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setUseRetroactiveConsumer(Boolean.parseBoolean(o)); } }, /** * @see ActiveMQConnectionFactory#setWarnAboutUnstartedConnectionTimeout(long) */ WarnAboutUnstartedConnectionTimeout { @Override void applyProperty(ActiveMQConnectionFactory cf, String o) { cf.setWarnAboutUnstartedConnectionTimeout(Long.parseLong(o)); } }; abstract void applyProperty(ActiveMQConnectionFactory cf, String s); }; @AdvancedConfig private BlobTransferPolicyFactory blobTransferPolicy; @AdvancedConfig private PrefetchPolicyFactory prefetchPolicy; @AdvancedConfig private RedeliveryPolicyFactory redeliveryPolicy; @NotNull @AutoPopulated @Valid private KeyValuePairSet connectionFactoryProperties; public AdvancedActiveMqImplementation() { super(); setConnectionFactoryProperties(new KeyValuePairSet()); } @Override protected ActiveMQConnectionFactory create(String url) { ActiveMQConnectionFactory cf = super.create(url); if (blobTransferPolicy != null) { cf.setBlobTransferPolicy(blobTransferPolicy.create()); } if (prefetchPolicy != null) { cf.setPrefetchPolicy(prefetchPolicy.create()); } if (redeliveryPolicy != null) { cf.setRedeliveryPolicy(redeliveryPolicy.create()); } for (KeyValuePair kvp : getConnectionFactoryProperties().getKeyValuePairs()) { // Yeah we could use valueOf here, but really, are we going to be // consistent valueOf is case sensitive. boolean matched = false; for (ConnectionFactoryProperty sp : ConnectionFactoryProperty.values()) { if (kvp.getKey().equalsIgnoreCase(sp.toString())) { sp.applyProperty(cf, kvp.getValue()); matched = true; break; } } if (!matched) { if (!SimpleBeanUtil.callSetter(cf, "set" + kvp.getKey(), kvp.getValue())) { log.trace("Ignoring unsupported Property {}", kvp.getKey()); } } } return cf; } /** * @return The additional connection factory properties. */ public KeyValuePairSet getConnectionFactoryProperties() { return connectionFactoryProperties; } /** * Set any additional ActiveMQConnectionFactory properties that are required. * <p> * The key from the <code>connection-factory-properties</code> element should match the name of the underlying ActiveMQ * ConnectionFactory property. * * <pre> * {@code * <connection-factory-properties> * <key-value-pair> * <key>AlwaysSessionAsync</key> * <value>true</value> * </key-value-pair> * </connection-factory-properties> * } * </pre> * will invoke {@link ActiveMQConnectionFactory#setAlwaysSessionAsync(boolean)}, setting the AlwaysSessionAsync property to true. * Only explicitly configured properties will invoke the associated setter method. * </p> * * @param kvps the additional connectionFactoryProperties to set */ public void setConnectionFactoryProperties(KeyValuePairSet kvps) { this.connectionFactoryProperties = kvps; } /** * Get the Blob Transfer Policy. * * @return the Blob Transfer Policy. * @see org.apache.activemq.blob.BlobTransferPolicy * @see BlobTransferPolicyFactory */ public BlobTransferPolicyFactory getBlobTransferPolicy() { return blobTransferPolicy; } /** * Set the Blob Transfer Policy. * <p> * If not explicitly configured then the associated setter * {@link ActiveMQConnectionFactory#setBlobTransferPolicy(org.apache.activemq.blob.BlobTransferPolicy)} * is never invoked. * <p> * * @param f the Blob Transfer Policy. * @see org.apache.activemq.blob.BlobTransferPolicy * @see BlobTransferPolicyFactory */ public void setBlobTransferPolicy(BlobTransferPolicyFactory f) { this.blobTransferPolicy = f; } /** * Get the Prefetch Policy. * * @return the PrefetchPolicy. * @see org.apache.activemq.ActiveMQPrefetchPolicy * @see PrefetchPolicyFactory */ public PrefetchPolicyFactory getPrefetchPolicy() { return prefetchPolicy; } /** * Set the Prefetch Policy. * <p> * If not explicitly configured then the associated setter * {@link ActiveMQConnectionFactory#setPrefetchPolicy(org.apache.activemq.ActiveMQPrefetchPolicy)} * is never invoked. * <p> * * @param f the PrefetchPolicy. * @see org.apache.activemq.ActiveMQPrefetchPolicy * @see PrefetchPolicyFactory */ public void setPrefetchPolicy(PrefetchPolicyFactory f) { this.prefetchPolicy = f; } /** * Get the Redelivery Policy. * * @return the redelivery Policy. * @see org.apache.activemq.RedeliveryPolicy * @see RedeliveryPolicyFactory */ public RedeliveryPolicyFactory getRedeliveryPolicy() { return redeliveryPolicy; } /** * Set the Redelivery Policy. * <p> * If not explicitly configured then the associated setter * {@link ActiveMQConnectionFactory#setRedeliveryPolicy(org.apache.activemq.RedeliveryPolicy)} is * never invoked. * <p> * * @param f the redelivery Policy. * @see org.apache.activemq.RedeliveryPolicy * @see RedeliveryPolicyFactory */ public void setRedeliveryPolicy(RedeliveryPolicyFactory f) { this.redeliveryPolicy = f; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1beta2/version.proto package com.google.devtools.artifactregistry.v1beta2; /** * * * <pre> * The request to delete a version. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.DeleteVersionRequest} */ public final class DeleteVersionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) DeleteVersionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteVersionRequest.newBuilder() to construct. private DeleteVersionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteVersionRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteVersionRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteVersionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } case 16: { force_ = input.readBool(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_DeleteVersionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_DeleteVersionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest.class, com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * The name of the version to delete. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name of the version to delete. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FORCE_FIELD_NUMBER = 2; private boolean force_; /** * * * <pre> * By default, a version that is tagged may not be deleted. If force=true, the * version and any tags pointing to the version are deleted. * </pre> * * <code>bool force = 2;</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (force_ != false) { output.writeBool(2, force_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (force_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, force_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest other = (com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) obj; if (!getName().equals(other.getName())) return false; if (getForce() != other.getForce()) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request to delete a version. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.DeleteVersionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_DeleteVersionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_DeleteVersionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest.class, com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest.Builder.class); } // Construct using // com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; force_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_DeleteVersionRequest_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest.getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest build() { com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest buildPartial() { com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest result = new com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest(this); result.name_ = name_; result.force_ = force_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) { return mergeFrom((com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest other) { if (other == com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (other.getForce() != false) { setForce(other.getForce()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * The name of the version to delete. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the version to delete. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the version to delete. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * The name of the version to delete. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * The name of the version to delete. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private boolean force_; /** * * * <pre> * By default, a version that is tagged may not be deleted. If force=true, the * version and any tags pointing to the version are deleted. * </pre> * * <code>bool force = 2;</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } /** * * * <pre> * By default, a version that is tagged may not be deleted. If force=true, the * version and any tags pointing to the version are deleted. * </pre> * * <code>bool force = 2;</code> * * @param value The force to set. * @return This builder for chaining. */ public Builder setForce(boolean value) { force_ = value; onChanged(); return this; } /** * * * <pre> * By default, a version that is tagged may not be deleted. If force=true, the * version and any tags pointing to the version are deleted. * </pre> * * <code>bool force = 2;</code> * * @return This builder for chaining. */ public Builder clearForce() { force_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.DeleteVersionRequest) private static final com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest(); } public static com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteVersionRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteVersionRequest>() { @java.lang.Override public DeleteVersionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteVersionRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DeleteVersionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteVersionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.DeleteVersionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.impl.preferences; import org.jkiss.dbeaver.model.preferences.DBPPreferenceListener; import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore; import org.jkiss.utils.CommonUtils; import java.util.HashMap; import java.util.Map; /** * Preference store which can be stored/loaded in any way. * Also it can use parent store to obtain values from it if this store do not contain the key. * However, save will always use THIS store, not parent. * Originally copied from standard PreferenceStore class */ public abstract class SimplePreferenceStore extends AbstractPreferenceStore { private DBPPreferenceStore parentStore; private Map<String, String> properties; private Map<String, String> defaultProperties; private boolean dirty = false; public SimplePreferenceStore() { defaultProperties = new HashMap<>(); properties = new HashMap<>(); } protected SimplePreferenceStore(DBPPreferenceStore parentStore) { this(); this.parentStore = parentStore; if (parentStore != null) { // FIXME: ? adding self as parent change listener produces too many events. And this seems to be senseless. // FIXME: but i'm not 100% sure. // FIXME: In any case we have to remove listener at dispose to avoid leaks and dead links. //parentStore.addPropertyChangeListener(this); } } public DBPPreferenceStore getParentStore() { return parentStore; } public Map<String, String> getProperties() { return properties; } public void setProperties(Map<String, String> properties) { this.properties = properties; } public Map<String, String> getDefaultProperties() { return defaultProperties; } public void setDefaultProperties(Map<String, String> defaultProperties) { this.defaultProperties = defaultProperties; } public void clear() { properties.clear(); } @Override public void addPropertyChangeListener(DBPPreferenceListener listener) { addListenerObject(listener); } @Override public void removePropertyChangeListener(DBPPreferenceListener listener) { removeListenerObject(listener); } @Override public boolean contains(String name) { return (properties.containsKey(name) || defaultProperties .containsKey(name)); } @Override public boolean getBoolean(String name) { return toBoolean(getString(name)); } @Override public boolean getDefaultBoolean(String name) { return toBoolean(getDefaultString(name)); } private boolean toBoolean(String value) { return value != null && value.equals(AbstractPreferenceStore.TRUE); } @Override public double getDouble(String name) { return toDouble(getString(name)); } @Override public double getDefaultDouble(String name) { return toDouble(getDefaultString(name)); } private double toDouble(String value) { double ival = DOUBLE_DEFAULT_DEFAULT; if (!CommonUtils.isEmpty(value)) { try { ival = Double.parseDouble(value); } catch (NumberFormatException e) { // do nothing } } return ival; } @Override public float getFloat(String name) { return toFloat(getString(name)); } @Override public float getDefaultFloat(String name) { return toFloat(getDefaultString(name)); } private float toFloat(String value) { float ival = FLOAT_DEFAULT_DEFAULT; if (!CommonUtils.isEmpty(value)) { try { ival = Float.parseFloat(value); } catch (NumberFormatException e) { // do nothing } } return ival; } @Override public int getInt(String name) { return toInt(getString(name)); } @Override public int getDefaultInt(String name) { return toInt(getDefaultString(name)); } private int toInt(String value) { int ival = INT_DEFAULT_DEFAULT; if (!CommonUtils.isEmpty(value)) { try { ival = Integer.parseInt(value); } catch (NumberFormatException e) { // do nothing } } return ival; } @Override public long getLong(String name) { return toLong(getString(name)); } @Override public long getDefaultLong(String name) { return toLong(getDefaultString(name)); } private long toLong(String value) { long ival = LONG_DEFAULT_DEFAULT; if (!CommonUtils.isEmpty(value)) { try { ival = Long.parseLong(value); } catch (NumberFormatException e) { // do nothing } } return ival; } @Override public String getString(String name) { String value = properties.get(name); if (value == null) { if (parentStore.isDefault(name)) { value = defaultProperties.get(name); } if (value == null) { value = parentStore.getString(name); } } return value; } @Override public String getDefaultString(String name) { String value = defaultProperties.get(name); if (value == null) { if (parentStore.isDefault(name)) { return parentStore.getDefaultString(name); } else { return ""; } } return value; } @Override public boolean isDefault(String name) { return (!properties.containsKey(name) && (defaultProperties.containsKey(name) || parentStore.isDefault(name))); } public boolean isSet(String name) { return properties.containsKey(name); } @Override public boolean needsSaving() { return dirty; } public String[] preferenceNames() { return properties.keySet().toArray(new String[properties.size()]); } @Override public void setDefault(String name, double value) { defaultProperties.put(name, String.valueOf(value)); } @Override public void setDefault(String name, float value) { defaultProperties.put(name, String.valueOf(value)); } @Override public void setDefault(String name, int value) { defaultProperties.put(name, String.valueOf(value)); } @Override public void setDefault(String name, long value) { defaultProperties.put(name, String.valueOf(value)); } @Override public void setDefault(String name, String value) { defaultProperties.put(name, String.valueOf(value)); } @Override public void setDefault(String name, boolean value) { defaultProperties.put(name, String.valueOf(value)); } @Override public void setToDefault(String name) { Object oldValue = properties.get(name); properties.remove(name); dirty = true; Object newValue = null; if (defaultProperties != null) { newValue = defaultProperties.get(name); } firePropertyChangeEvent(name, oldValue, newValue); } @Override public void setValue(String name, double value) { double oldValue = getDouble(name); if (oldValue != value || !isSet(name)) { properties.put(name, String.valueOf(value)); dirty = true; firePropertyChangeEvent(name, oldValue, value); } } @Override public void setValue(String name, float value) { float oldValue = getFloat(name); if (oldValue != value || !isSet(name)) { properties.put(name, String.valueOf(value)); dirty = true; firePropertyChangeEvent(name, oldValue, value); } } @Override public void setValue(String name, int value) { int oldValue = getInt(name); if (oldValue != value || !isSet(name)) { properties.put(name, String.valueOf(value)); dirty = true; firePropertyChangeEvent(name, oldValue, value); } } @Override public void setValue(String name, long value) { long oldValue = getLong(name); if (oldValue != value || !isSet(name)) { properties.put(name, String.valueOf(value)); dirty = true; firePropertyChangeEvent(name, oldValue, value); } } @Override public void setValue(String name, String value) { String oldValue = getString(name); if (oldValue == null || !oldValue.equals(value) || !isSet(name)) { properties.put(name, value); dirty = true; firePropertyChangeEvent(name, oldValue, value); } } @Override public void setValue(String name, boolean value) { boolean oldValue = getBoolean(name); if (oldValue != value || !isSet(name)) { properties.put(name, String.valueOf(value)); dirty = true; firePropertyChangeEvent(name, oldValue ? Boolean.TRUE : Boolean.FALSE, value ? Boolean.TRUE : Boolean.FALSE); } } }
/* * Copyright The OpenTelemetry Authors * SPDX-License-Identifier: Apache-2.0 */ package io.opentelemetry.sdk.metrics.internal.view; import static org.assertj.core.api.Assertions.assertThat; import io.opentelemetry.sdk.common.InstrumentationLibraryInfo; import io.opentelemetry.sdk.metrics.common.InstrumentType; import io.opentelemetry.sdk.metrics.common.InstrumentValueType; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.view.Aggregation; import io.opentelemetry.sdk.metrics.view.InstrumentSelector; import io.opentelemetry.sdk.metrics.view.View; import org.junit.jupiter.api.Test; class ViewRegistryTest { private static final InstrumentationLibraryInfo INSTRUMENTATION_LIBRARY_INFO = InstrumentationLibraryInfo.create("name", "version", "schema_url"); @Test void selection_onType() { View view = View.builder().build(); ViewRegistry viewRegistry = ViewRegistry.builder() .addView(InstrumentSelector.builder().setType(InstrumentType.COUNTER).build(), view) .build(); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isEqualTo(view); // this one hasn't been configured, so it gets the default still. assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.UP_DOWN_COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); } @Test void selection_onName() { View view = View.builder().build(); ViewRegistry viewRegistry = ViewRegistry.builder() .addView(InstrumentSelector.builder().setName("overridden").build(), view) .build(); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "overridden", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(view); // this one hasn't been configured, so it gets the default still. assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "default", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); } @Test void selection_FirstAddedViewWins() { View view1 = View.builder().setAggregation(Aggregation.lastValue()).build(); View view2 = View.builder().setAggregation(Aggregation.explicitBucketHistogram()).build(); ViewRegistry viewRegistry = ViewRegistry.builder() .addView( InstrumentSelector.builder().setName(name -> name.equals("overridden")).build(), view2) .addView(InstrumentSelector.builder().setName(name -> true).build(), view1) .build(); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "overridden", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(2) .element(0) .isEqualTo(view2); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "default", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isEqualTo(view1); } @Test void selection_typeAndName() { View view = View.builder().setAggregation(Aggregation.lastValue()).build(); ViewRegistry viewRegistry = ViewRegistry.builder() .addView( InstrumentSelector.builder() .setType(InstrumentType.COUNTER) .setName("overrides") .build(), view) .build(); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "overrides", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isEqualTo(view); // this one hasn't been configured, so it gets the default still.. assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "overrides", "", "", InstrumentType.UP_DOWN_COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isEqualTo(ViewRegistry.DEFAULT_VIEW); // this one hasn't been configured, so it gets the default still.. assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "default", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isEqualTo(ViewRegistry.DEFAULT_VIEW); } @Test void defaults() { ViewRegistry viewRegistry = ViewRegistry.builder().build(); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.UP_DOWN_COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.HISTOGRAM, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.OBSERVABLE_COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.OBSERVABLE_GAUGE, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); assertThat( viewRegistry.findViews( InstrumentDescriptor.create( "", "", "", InstrumentType.OBSERVABLE_UP_DOWN_COUNTER, InstrumentValueType.LONG), INSTRUMENTATION_LIBRARY_INFO)) .hasSize(1) .element(0) .isSameAs(ViewRegistry.DEFAULT_VIEW); } }
/******************************************************************************* * Copyright (c) 2011, Daniel Murphy * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package org.jbox2d.dynamics.contacts; import org.jbox2d.collision.Manifold; import org.jbox2d.collision.ManifoldPoint; import org.jbox2d.collision.WorldManifold; import org.jbox2d.collision.shapes.Shape; import org.jbox2d.common.Mat22; import org.jbox2d.common.MathUtils; import org.jbox2d.common.Rot; import org.jbox2d.common.Settings; import org.jbox2d.common.Transform; import org.jbox2d.common.Vec2; import org.jbox2d.dynamics.Body; import org.jbox2d.dynamics.Fixture; import org.jbox2d.dynamics.TimeStep; import org.jbox2d.dynamics.contacts.ContactVelocityConstraint.VelocityConstraintPoint; /** @author Daniel */ public class ContactSolver { public static final boolean DEBUG_SOLVER = false; public static final float k_errorTol = 1e-3f; /** For each solver, this is the initial number of constraints in the array, which expands as needed. */ public static final int INITIAL_NUM_CONSTRAINTS = 256; /** Ensure a reasonable condition number. for the block solver */ public static final float k_maxConditionNumber = 100.0f; public TimeStep m_step; public Position[] m_positions; public Velocity[] m_velocities; public ContactPositionConstraint[] m_positionConstraints; public ContactVelocityConstraint[] m_velocityConstraints; public Contact[] m_contacts; public int m_count; public ContactSolver () { m_positionConstraints = new ContactPositionConstraint[INITIAL_NUM_CONSTRAINTS]; m_velocityConstraints = new ContactVelocityConstraint[INITIAL_NUM_CONSTRAINTS]; for (int i = 0; i < INITIAL_NUM_CONSTRAINTS; i++) { m_positionConstraints[i] = new ContactPositionConstraint(); m_velocityConstraints[i] = new ContactVelocityConstraint(); } } // djm pooling private final Vec2 tangent = new Vec2(); private final Vec2 temp1 = new Vec2(); private final Vec2 temp2 = new Vec2(); public final void init (ContactSolverDef def) { // System.out.println("Initializing contact solver"); m_step = def.step; m_count = def.count; if (m_positionConstraints.length < m_count) { ContactPositionConstraint[] old = m_positionConstraints; m_positionConstraints = new ContactPositionConstraint[MathUtils.max(old.length * 2, m_count)]; System.arraycopy(old, 0, m_positionConstraints, 0, old.length); for (int i = old.length; i < m_positionConstraints.length; i++) { m_positionConstraints[i] = new ContactPositionConstraint(); } } if (m_velocityConstraints.length < m_count) { ContactVelocityConstraint[] old = m_velocityConstraints; m_velocityConstraints = new ContactVelocityConstraint[MathUtils.max(old.length * 2, m_count)]; System.arraycopy(old, 0, m_velocityConstraints, 0, old.length); for (int i = old.length; i < m_velocityConstraints.length; i++) { m_velocityConstraints[i] = new ContactVelocityConstraint(); } } m_positions = def.positions; m_velocities = def.velocities; m_contacts = def.contacts; for (int i = 0; i < m_count; ++i) { // System.out.println("contacts: " + m_count); final Contact contact = m_contacts[i]; final Fixture fixtureA = contact.m_fixtureA; final Fixture fixtureB = contact.m_fixtureB; final Shape shapeA = fixtureA.getShape(); final Shape shapeB = fixtureB.getShape(); final float radiusA = shapeA.m_radius; final float radiusB = shapeB.m_radius; final Body bodyA = fixtureA.getBody(); final Body bodyB = fixtureB.getBody(); final Manifold manifold = contact.getManifold(); int pointCount = manifold.pointCount; assert (pointCount > 0); ContactVelocityConstraint vc = m_velocityConstraints[i]; vc.friction = contact.m_friction; vc.restitution = contact.m_restitution; vc.tangentSpeed = contact.m_tangentSpeed; vc.indexA = bodyA.m_islandIndex; vc.indexB = bodyB.m_islandIndex; vc.invMassA = bodyA.m_invMass; vc.invMassB = bodyB.m_invMass; vc.invIA = bodyA.m_invI; vc.invIB = bodyB.m_invI; vc.contactIndex = i; vc.pointCount = pointCount; vc.K.setZero(); vc.normalMass.setZero(); ContactPositionConstraint pc = m_positionConstraints[i]; pc.indexA = bodyA.m_islandIndex; pc.indexB = bodyB.m_islandIndex; pc.invMassA = bodyA.m_invMass; pc.invMassB = bodyB.m_invMass; pc.localCenterA.set(bodyA.m_sweep.localCenter); pc.localCenterB.set(bodyB.m_sweep.localCenter); pc.invIA = bodyA.m_invI; pc.invIB = bodyB.m_invI; pc.localNormal.set(manifold.localNormal); pc.localPoint.set(manifold.localPoint); pc.pointCount = pointCount; pc.radiusA = radiusA; pc.radiusB = radiusB; pc.type = manifold.type; // System.out.println("contact point count: " + pointCount); for (int j = 0; j < pointCount; j++) { ManifoldPoint cp = manifold.points[j]; VelocityConstraintPoint vcp = vc.points[j]; if (m_step.warmStarting) { // assert(cp.normalImpulse == 0); // System.out.println("contact normal impulse: " + cp.normalImpulse); vcp.normalImpulse = m_step.dtRatio * cp.normalImpulse; vcp.tangentImpulse = m_step.dtRatio * cp.tangentImpulse; } else { vcp.normalImpulse = 0; vcp.tangentImpulse = 0; } vcp.rA.setZero(); vcp.rB.setZero(); vcp.normalMass = 0; vcp.tangentMass = 0; vcp.velocityBias = 0; pc.localPoints[j].set(cp.localPoint); } } } // djm pooling, and from above private final Vec2 P = new Vec2(); private final Vec2 temp = new Vec2(); public void warmStart () { // Warm start. for (int i = 0; i < m_count; ++i) { final ContactVelocityConstraint vc = m_velocityConstraints[i]; int indexA = vc.indexA; int indexB = vc.indexB; float mA = vc.invMassA; float iA = vc.invIA; float mB = vc.invMassB; float iB = vc.invIB; int pointCount = vc.pointCount; Vec2 vA = m_velocities[indexA].v; float wA = m_velocities[indexA].w; Vec2 vB = m_velocities[indexB].v; float wB = m_velocities[indexB].w; Vec2 normal = vc.normal; Vec2.crossToOutUnsafe(normal, 1.0f, tangent); for (int j = 0; j < pointCount; ++j) { VelocityConstraintPoint vcp = vc.points[j]; // System.out.println("vcp normal impulse is " + vcp.normalImpulse); temp.set(normal).mulLocal(vcp.normalImpulse); P.set(tangent).mulLocal(vcp.tangentImpulse).addLocal(temp); wA -= iA * Vec2.cross(vcp.rA, P); vA.subLocal(temp.set(P).mulLocal(mA)); // assert(vA.x == 0); // assert(wA == 0); wB += iB * Vec2.cross(vcp.rB, P); vB.addLocal(temp.set(P).mulLocal(mB)); // assert(vB.x == 0); // assert(wB == 0); } m_velocities[indexA].w = wA; m_velocities[indexB].w = wB; // System.out.println("Ending velocity for " + indexA + " is " + vA.x + "," + vA.y + " - " + wA); // System.out.println("Ending velocity for " + indexB + " is " + vB.x + "," + vB.y + " - " + wB); } } // djm pooling, and from above private final Transform xfA = new Transform(); private final Transform xfB = new Transform(); private final WorldManifold worldManifold = new WorldManifold(); public final void initializeVelocityConstraints () { // System.out.println("Initializing velocity constraints for " + m_count + " contacts"); // Warm start. for (int i = 0; i < m_count; ++i) { ContactVelocityConstraint vc = m_velocityConstraints[i]; ContactPositionConstraint pc = m_positionConstraints[i]; float radiusA = pc.radiusA; float radiusB = pc.radiusB; Manifold manifold = m_contacts[vc.contactIndex].getManifold(); int indexA = vc.indexA; int indexB = vc.indexB; float mA = vc.invMassA; float mB = vc.invMassB; float iA = vc.invIA; float iB = vc.invIB; Vec2 localCenterA = pc.localCenterA; Vec2 localCenterB = pc.localCenterB; Vec2 cA = m_positions[indexA].c; float aA = m_positions[indexA].a; Vec2 vA = m_velocities[indexA].v; float wA = m_velocities[indexA].w; Vec2 cB = m_positions[indexB].c; float aB = m_positions[indexB].a; Vec2 vB = m_velocities[indexB].v; float wB = m_velocities[indexB].w; assert (manifold.pointCount > 0); xfA.q.set(aA); xfB.q.set(aB); Rot.mulToOutUnsafe(xfA.q, localCenterA, temp); xfA.p.set(cA).subLocal(temp); Rot.mulToOutUnsafe(xfB.q, localCenterB, temp); xfB.p.set(cB).subLocal(temp); worldManifold.initialize(manifold, xfA, radiusA, xfB, radiusB); vc.normal.set(worldManifold.normal); int pointCount = vc.pointCount; for (int j = 0; j < pointCount; ++j) { VelocityConstraintPoint vcp = vc.points[j]; vcp.rA.set(worldManifold.points[j]).subLocal(cA); vcp.rB.set(worldManifold.points[j]).subLocal(cB); float rnA = Vec2.cross(vcp.rA, vc.normal); float rnB = Vec2.cross(vcp.rB, vc.normal); float kNormal = mA + mB + iA * rnA * rnA + iB * rnB * rnB; vcp.normalMass = kNormal > 0.0f ? 1.0f / kNormal : 0.0f; Vec2.crossToOutUnsafe(vc.normal, 1.0f, tangent); float rtA = Vec2.cross(vcp.rA, tangent); float rtB = Vec2.cross(vcp.rB, tangent); float kTangent = mA + mB + iA * rtA * rtA + iB * rtB * rtB; vcp.tangentMass = kTangent > 0.0f ? 1.0f / kTangent : 0.0f; // Setup a velocity bias for restitution. vcp.velocityBias = 0.0f; Vec2.crossToOutUnsafe(wB, vcp.rB, temp1); Vec2.crossToOutUnsafe(wA, vcp.rA, temp2); temp.set(vB).addLocal(temp1).subLocal(vA).subLocal(temp2); float vRel = Vec2.dot(vc.normal, temp); if (vRel < -Settings.velocityThreshold) { vcp.velocityBias = -vc.restitution * vRel; } } // If we have two points, then prepare the block solver. if (vc.pointCount == 2) { VelocityConstraintPoint vcp1 = vc.points[0]; VelocityConstraintPoint vcp2 = vc.points[1]; float rn1A = Vec2.cross(vcp1.rA, vc.normal); float rn1B = Vec2.cross(vcp1.rB, vc.normal); float rn2A = Vec2.cross(vcp2.rA, vc.normal); float rn2B = Vec2.cross(vcp2.rB, vc.normal); float k11 = mA + mB + iA * rn1A * rn1A + iB * rn1B * rn1B; float k22 = mA + mB + iA * rn2A * rn2A + iB * rn2B * rn2B; float k12 = mA + mB + iA * rn1A * rn2A + iB * rn1B * rn2B; if (k11 * k11 < k_maxConditionNumber * (k11 * k22 - k12 * k12)) { // K is safe to invert. vc.K.ex.set(k11, k12); vc.K.ey.set(k12, k22); vc.K.invertToOut(vc.normalMass); } else { // The constraints are redundant, just use one. // TODO_ERIN use deepest? vc.pointCount = 1; } } } } // djm pooling from above private final Vec2 dv = new Vec2(); private final Vec2 a = new Vec2(); private final Vec2 b = new Vec2(); private final Vec2 dv1 = new Vec2(); private final Vec2 dv2 = new Vec2(); private final Vec2 x = new Vec2(); private final Vec2 d = new Vec2(); private final Vec2 P1 = new Vec2(); private final Vec2 P2 = new Vec2(); public final void solveVelocityConstraints () { for (int i = 0; i < m_count; ++i) { final ContactVelocityConstraint vc = m_velocityConstraints[i]; int indexA = vc.indexA; int indexB = vc.indexB; float mA = vc.invMassA; float mB = vc.invMassB; float iA = vc.invIA; float iB = vc.invIB; int pointCount = vc.pointCount; Vec2 vA = m_velocities[indexA].v; float wA = m_velocities[indexA].w; Vec2 vB = m_velocities[indexB].v; float wB = m_velocities[indexB].w; // assert(wA == 0); // assert(wB == 0); Vec2 normal = vc.normal; // Vec2.crossToOutUnsafe(normal, 1f, tangent); tangent.x = 1.0f * vc.normal.y; tangent.y = -1.0f * vc.normal.x; final float friction = vc.friction; assert (pointCount == 1 || pointCount == 2); // Solve tangent constraints for (int j = 0; j < pointCount; ++j) { final VelocityConstraintPoint vcp = vc.points[j]; // Vec2.crossToOutUnsafe(wA, vcp.rA, temp); // Vec2.crossToOutUnsafe(wB, vcp.rB, dv); // dv.addLocal(vB).subLocal(vA).subLocal(temp); final Vec2 a = vcp.rA; dv.x = -wB * vcp.rB.y + vB.x - vA.x + wA * a.y; dv.y = wB * vcp.rB.x + vB.y - vA.y - wA * a.x; // Compute tangent force final float vt = dv.x * tangent.x + dv.y * tangent.y - vc.tangentSpeed; float lambda = vcp.tangentMass * (-vt); // Clamp the accumulated force final float maxFriction = friction * vcp.normalImpulse; final float newImpulse = MathUtils.clamp(vcp.tangentImpulse + lambda, -maxFriction, maxFriction); lambda = newImpulse - vcp.tangentImpulse; vcp.tangentImpulse = newImpulse; // Apply contact impulse // Vec2 P = lambda * tangent; final float Px = tangent.x * lambda; final float Py = tangent.y * lambda; // vA -= invMassA * P; vA.x -= Px * mA; vA.y -= Py * mA; wA -= iA * (vcp.rA.x * Py - vcp.rA.y * Px); // vB += invMassB * P; vB.x += Px * mB; vB.y += Py * mB; wB += iB * (vcp.rB.x * Py - vcp.rB.y * Px); // System.out.println("tangent solve velocity (point "+j+") for " + indexA + " is " + vA.x + "," + vA.y + " rot " + // wA); // System.out.println("tangent solve velocity (point "+j+") for " + indexB + " is " + vB.x + "," + vB.y + " rot " + // wB); } // Solve normal constraints if (vc.pointCount == 1) { final VelocityConstraintPoint vcp = vc.points[0]; Vec2 a1 = vcp.rA; // Relative velocity at contact // Vec2 dv = vB + Cross(wB, vcp.rB) - vA - Cross(wA, vcp.rA); // Vec2.crossToOut(wA, vcp.rA, temp1); // Vec2.crossToOut(wB, vcp.rB, dv); // dv.addLocal(vB).subLocal(vA).subLocal(temp1); // dv.x = -wB * vcp.rB.y + vB.x - vA.x + wA * a1.y; dv.y = wB * vcp.rB.x + vB.y - vA.y - wA * a1.x; // Compute normal impulse final float vn = dv.x * normal.x + dv.y * normal.y; float lambda = -vcp.normalMass * (vn - vcp.velocityBias); // Clamp the accumulated impulse float a = vcp.normalImpulse + lambda; final float newImpulse = (a > 0.0f ? a : 0.0f); lambda = newImpulse - vcp.normalImpulse; // assert(newImpulse == 0); vcp.normalImpulse = newImpulse; // Apply contact impulse float Px = normal.x * lambda; float Py = normal.y * lambda; // vA -= invMassA * P; vA.x -= Px * mA; vA.y -= Py * mA; wA -= iA * (vcp.rA.x * Py - vcp.rA.y * Px); // assert(vA.x == 0); // vB += invMassB * P; vB.x += Px * mB; vB.y += Py * mB; wB += iB * (vcp.rB.x * Py - vcp.rB.y * Px); // assert(vB.x == 0); } else { // Block solver developed in collaboration with Dirk Gregorius (back in 01/07 on // Box2D_Lite). // Build the mini LCP for this contact patch // // vn = A * x + b, vn >= 0, , vn >= 0, x >= 0 and vn_i * x_i = 0 with i = 1..2 // // A = J * W * JT and J = ( -n, -r1 x n, n, r2 x n ) // b = vn_0 - velocityBias // // The system is solved using the "Total enumeration method" (s. Murty). The complementary // constraint vn_i * x_i // implies that we must have in any solution either vn_i = 0 or x_i = 0. So for the 2D // contact problem the cases // vn1 = 0 and vn2 = 0, x1 = 0 and x2 = 0, x1 = 0 and vn2 = 0, x2 = 0 and vn1 = 0 need to be // tested. The first valid // solution that satisfies the problem is chosen. // // In order to account of the accumulated impulse 'a' (because of the iterative nature of // the solver which only requires // that the accumulated impulse is clamped and not the incremental impulse) we change the // impulse variable (x_i). // // Substitute: // // x = a + d // // a := old total impulse // x := new total impulse // d := incremental impulse // // For the current iteration we extend the formula for the incremental impulse // to compute the new total impulse: // // vn = A * d + b // = A * (x - a) + b // = A * x + b - A * a // = A * x + b' // b' = b - A * a; final VelocityConstraintPoint cp1 = vc.points[0]; final VelocityConstraintPoint cp2 = vc.points[1]; a.x = cp1.normalImpulse; a.y = cp2.normalImpulse; assert (a.x >= 0.0f && a.y >= 0.0f); // Relative velocity at contact // Vec2 dv1 = vB + Cross(wB, cp1.rB) - vA - Cross(wA, cp1.rA); dv1.x = -wB * cp1.rB.y + vB.x - vA.x + wA * cp1.rA.y; dv1.y = wB * cp1.rB.x + vB.y - vA.y - wA * cp1.rA.x; // Vec2 dv2 = vB + Cross(wB, cp2.rB) - vA - Cross(wA, cp2.rA); dv2.x = -wB * cp2.rB.y + vB.x - vA.x + wA * cp2.rA.y; dv2.y = wB * cp2.rB.x + vB.y - vA.y - wA * cp2.rA.x; // Compute normal velocity float vn1 = dv1.x * normal.x + dv1.y * normal.y; float vn2 = dv2.x * normal.x + dv2.y * normal.y; b.x = vn1 - cp1.velocityBias; b.y = vn2 - cp2.velocityBias; // System.out.println("b is " + b.x + "," + b.y); // Compute b' Mat22 R = vc.K; b.x -= R.ex.x * a.x + R.ey.x * a.y; b.y -= R.ex.y * a.x + R.ey.y * a.y; // System.out.println("b' is " + b.x + "," + b.y); // final float k_errorTol = 1e-3f; // B2_NOT_USED(k_errorTol); for (;;) { // // Case 1: vn = 0 // // 0 = A * x' + b' // // Solve for x': // // x' = - inv(A) * b' // // Vec2 x = - Mul(c.normalMass, b); Mat22.mulToOutUnsafe(vc.normalMass, b, x); x.mulLocal(-1); if (x.x >= 0.0f && x.y >= 0.0f) { // System.out.println("case 1"); // Get the incremental impulse // Vec2 d = x - a; d.set(x).subLocal(a); // Apply incremental impulse // Vec2 P1 = d.x * normal; // Vec2 P2 = d.y * normal; P1.set(normal).mulLocal(d.x); P2.set(normal).mulLocal(d.y); /* * vA -= invMassA * (P1 + P2); wA -= invIA * (Cross(cp1.rA, P1) + Cross(cp2.rA, P2)); * * vB += invMassB * (P1 + P2); wB += invIB * (Cross(cp1.rB, P1) + Cross(cp2.rB, P2)); */ temp1.set(P1).addLocal(P2); temp2.set(temp1).mulLocal(mA); vA.subLocal(temp2); temp2.set(temp1).mulLocal(mB); vB.addLocal(temp2); // assert(vA.x == 0); // assert(vB.x == 0); wA -= iA * (Vec2.cross(cp1.rA, P1) + Vec2.cross(cp2.rA, P2)); wB += iB * (Vec2.cross(cp1.rB, P1) + Vec2.cross(cp2.rB, P2)); // Accumulate // if(x.x != 0 || x.y != 0) { // assert(x.x != 0 || x.y != 0); // } cp1.normalImpulse = x.x; cp2.normalImpulse = x.y; /* * #if B2_DEBUG_SOLVER == 1 // Postconditions dv1 = vB + Cross(wB, cp1.rB) - vA - Cross(wA, cp1.rA); dv2 = vB + * Cross(wB, cp2.rB) - vA - Cross(wA, cp2.rA); * * // Compute normal velocity vn1 = Dot(dv1, normal); vn2 = Dot(dv2, normal); * * assert(Abs(vn1 - cp1.velocityBias) < k_errorTol); assert(Abs(vn2 - cp2.velocityBias) < k_errorTol); #endif */ if (DEBUG_SOLVER) { // Postconditions Vec2 dv1 = vB.add(Vec2.cross(wB, cp1.rB).subLocal(vA).subLocal(Vec2.cross(wA, cp1.rA))); Vec2 dv2 = vB.add(Vec2.cross(wB, cp2.rB).subLocal(vA).subLocal(Vec2.cross(wA, cp2.rA))); // Compute normal velocity vn1 = Vec2.dot(dv1, normal); vn2 = Vec2.dot(dv2, normal); assert (MathUtils.abs(vn1 - cp1.velocityBias) < k_errorTol); assert (MathUtils.abs(vn2 - cp2.velocityBias) < k_errorTol); } break; } // // Case 2: vn1 = 0 and x2 = 0 // // 0 = a11 * x1' + a12 * 0 + b1' // vn2 = a21 * x1' + a22 * 0 + ' // x.x = -cp1.normalMass * b.x; x.y = 0.0f; vn1 = 0.0f; vn2 = vc.K.ex.y * x.x + b.y; if (x.x >= 0.0f && vn2 >= 0.0f) { // System.out.println("case 2"); // Get the incremental impulse d.set(x).subLocal(a); // Apply incremental impulse // Vec2 P1 = d.x * normal; // Vec2 P2 = d.y * normal; P1.set(normal).mulLocal(d.x); P2.set(normal).mulLocal(d.y); /* * Vec2 P1 = d.x * normal; Vec2 P2 = d.y * normal; vA -= invMassA * (P1 + P2); wA -= invIA * (Cross(cp1.rA, P1) * + Cross(cp2.rA, P2)); * * vB += invMassB * (P1 + P2); wB += invIB * (Cross(cp1.rB, P1) + Cross(cp2.rB, P2)); */ temp1.set(P1).addLocal(P2); temp2.set(temp1).mulLocal(mA); vA.subLocal(temp2); temp2.set(temp1).mulLocal(mB); vB.addLocal(temp2); // assert(vA.x == 0); // assert(vB.x == 0); wA -= iA * (Vec2.cross(cp1.rA, P1) + Vec2.cross(cp2.rA, P2)); wB += iB * (Vec2.cross(cp1.rB, P1) + Vec2.cross(cp2.rB, P2)); // Accumulate // assert(x.x == 0 && x.y == 0); cp1.normalImpulse = x.x; cp2.normalImpulse = x.y; /* * #if B2_DEBUG_SOLVER == 1 // Postconditions dv1 = vB + Cross(wB, cp1.rB) - vA - Cross(wA, cp1.rA); * * // Compute normal velocity vn1 = Dot(dv1, normal); * * assert(Abs(vn1 - cp1.velocityBias) < k_errorTol); #endif */ if (DEBUG_SOLVER) { // Postconditions Vec2 dv1 = vB.add(Vec2.cross(wB, cp1.rB).subLocal(vA).subLocal(Vec2.cross(wA, cp1.rA))); // Compute normal velocity vn1 = Vec2.dot(dv1, normal); assert (MathUtils.abs(vn1 - cp1.velocityBias) < k_errorTol); } break; } // // Case 3: wB = 0 and x1 = 0 // // vn1 = a11 * 0 + a12 * x2' + b1' // 0 = a21 * 0 + a22 * x2' + ' // x.x = 0.0f; x.y = -cp2.normalMass * b.y; vn1 = vc.K.ey.x * x.y + b.x; vn2 = 0.0f; if (x.y >= 0.0f && vn1 >= 0.0f) { // System.out.println("case 3"); // Resubstitute for the incremental impulse d.set(x).subLocal(a); // Apply incremental impulse /* * Vec2 P1 = d.x * normal; Vec2 P2 = d.y * normal; vA -= invMassA * (P1 + P2); wA -= invIA * (Cross(cp1.rA, P1) * + Cross(cp2.rA, P2)); * * vB += invMassB * (P1 + P2); wB += invIB * (Cross(cp1.rB, P1) + Cross(cp2.rB, P2)); */ P1.set(normal).mulLocal(d.x); P2.set(normal).mulLocal(d.y); temp1.set(P1).addLocal(P2); temp2.set(temp1).mulLocal(mA); vA.subLocal(temp2); temp2.set(temp1).mulLocal(mB); vB.addLocal(temp2); // assert(vA.x == 0); // assert(vB.x == 0); wA -= iA * (Vec2.cross(cp1.rA, P1) + Vec2.cross(cp2.rA, P2)); wB += iB * (Vec2.cross(cp1.rB, P1) + Vec2.cross(cp2.rB, P2)); // Accumulate // assert(x.x == 0 && x.y == 0); cp1.normalImpulse = x.x; cp2.normalImpulse = x.y; /* * #if B2_DEBUG_SOLVER == 1 // Postconditions dv2 = vB + Cross(wB, cp2.rB) - vA - Cross(wA, cp2.rA); * * // Compute normal velocity vn2 = Dot(dv2, normal); * * assert(Abs(vn2 - cp2.velocityBias) < k_errorTol); #endif */ if (DEBUG_SOLVER) { // Postconditions Vec2 dv2 = vB.add(Vec2.cross(wB, cp2.rB).subLocal(vA).subLocal(Vec2.cross(wA, cp2.rA))); // Compute normal velocity vn2 = Vec2.dot(dv2, normal); assert (MathUtils.abs(vn2 - cp2.velocityBias) < k_errorTol); } break; } // // Case 4: x1 = 0 and x2 = 0 // // vn1 = b1 // vn2 = ; x.x = 0.0f; x.y = 0.0f; vn1 = b.x; vn2 = b.y; if (vn1 >= 0.0f && vn2 >= 0.0f) { // System.out.println("case 4"); // Resubstitute for the incremental impulse d.set(x).subLocal(a); // Apply incremental impulse /* * Vec2 P1 = d.x * normal; Vec2 P2 = d.y * normal; vA -= invMassA * (P1 + P2); wA -= invIA * (Cross(cp1.rA, P1) * + Cross(cp2.rA, P2)); * * vB += invMassB * (P1 + P2); wB += invIB * (Cross(cp1.rB, P1) + Cross(cp2.rB, P2)); */ P1.set(normal).mulLocal(d.x); P2.set(normal).mulLocal(d.y); temp1.set(P1).addLocal(P2); temp2.set(temp1).mulLocal(mA); vA.subLocal(temp2); temp2.set(temp1).mulLocal(mB); vB.addLocal(temp2); // assert(vA.x == 0); // assert(vB.x == 0); wA -= iA * (Vec2.cross(cp1.rA, P1) + Vec2.cross(cp2.rA, P2)); wB += iB * (Vec2.cross(cp1.rB, P1) + Vec2.cross(cp2.rB, P2)); // Accumulate // assert(x.x == 0 && x.y == 0); cp1.normalImpulse = x.x; cp2.normalImpulse = x.y; break; } // No solution, give up. This is hit sometimes, but it doesn't seem to matter. break; } } m_velocities[indexA].v.set(vA); m_velocities[indexA].w = wA; m_velocities[indexB].v.set(vB); m_velocities[indexB].w = wB; // System.out.println("Ending velocity for " + indexA + " is " + vA.x + "," + vA.y + " rot " + wA); // System.out.println("Ending velocity for " + indexB + " is " + vB.x + "," + vB.y + " rot " + wB); } } public void storeImpulses () { for (int i = 0; i < m_count; i++) { final ContactVelocityConstraint vc = m_velocityConstraints[i]; final Manifold manifold = m_contacts[vc.contactIndex].getManifold(); for (int j = 0; j < vc.pointCount; j++) { manifold.points[j].normalImpulse = vc.points[j].normalImpulse; manifold.points[j].tangentImpulse = vc.points[j].tangentImpulse; } } } /* * #if 0 // Sequential solver. bool ContactSolver::SolvePositionConstraints(float baumgarte) { float minSeparation = 0.0f; * * for (int i = 0; i < m_constraintCount; ++i) { ContactConstraint* c = m_constraints + i; Body* bodyA = c.bodyA; Body* bodyB = * c.bodyB; float invMassA = bodyA.m_mass * bodyA.m_invMass; float invIA = bodyA.m_mass * bodyA.m_invI; float invMassB = * bodyB.m_mass * bodyB.m_invMass; float invIB = bodyB.m_mass * bodyB.m_invI; * * Vec2 normal = c.normal; * * // Solve normal constraints for (int j = 0; j < c.pointCount; ++j) { ContactConstraintPoint* ccp = c.points + j; * * Vec2 r1 = Mul(bodyA.GetXForm().R, ccp.localAnchorA - bodyA.GetLocalCenter()); Vec2 r2 = Mul(bodyB.GetXForm().R, * ccp.localAnchorB - bodyB.GetLocalCenter()); * * Vec2 p1 = bodyA.m_sweep.c + r1; Vec2 p2 = bodyB.m_sweep.c + r2; Vec2 dp = p2 - p1; * * // Approximate the current separation. float separation = Dot(dp, normal) + ccp.separation; * * // Track max constraint error. minSeparation = Min(minSeparation, separation); * * // Prevent large corrections and allow slop. float C = Clamp(baumgarte * (separation + _linearSlop), -_maxLinearCorrection, * 0.0f); * * // Compute normal impulse float impulse = -ccp.equalizedMass * C; * * Vec2 P = impulse * normal; * * bodyA.m_sweep.c -= invMassA * P; bodyA.m_sweep.a -= invIA * Cross(r1, P); bodyA.SynchronizeTransform(); * * bodyB.m_sweep.c += invMassB * P; bodyB.m_sweep.a += invIB * Cross(r2, P); bodyB.SynchronizeTransform(); } } * * // We can't expect minSpeparation >= -_linearSlop because we don't // push the separation above -_linearSlop. return * minSeparation >= -1.5f * _linearSlop; } */ // djm pooling, and from above private final PositionSolverManifold psolver = new PositionSolverManifold(); private final Vec2 rA = new Vec2(); private final Vec2 rB = new Vec2(); /** Sequential solver. */ public final boolean solvePositionConstraints () { float minSeparation = 0.0f; for (int i = 0; i < m_count; ++i) { ContactPositionConstraint pc = m_positionConstraints[i]; int indexA = pc.indexA; int indexB = pc.indexB; float mA = pc.invMassA; float iA = pc.invIA; Vec2 localCenterA = pc.localCenterA; float mB = pc.invMassB; float iB = pc.invIB; Vec2 localCenterB = pc.localCenterB; int pointCount = pc.pointCount; Vec2 cA = m_positions[indexA].c; float aA = m_positions[indexA].a; Vec2 cB = m_positions[indexB].c; float aB = m_positions[indexB].a; // System.out.println("cA: " + cA.x + "," + cA.y + " - rot " + aA); // System.out.println("cB: " + cB.x + "," + cB.y + " - rot " + aB); // Solve normal constraints for (int j = 0; j < pointCount; ++j) { xfA.q.set(aA); xfB.q.set(aB); Rot.mulToOutUnsafe(xfA.q, localCenterA, xfA.p); xfA.p.negateLocal().addLocal(cA); Rot.mulToOutUnsafe(xfB.q, localCenterB, xfB.p); xfB.p.negateLocal().addLocal(cB); final PositionSolverManifold psm = psolver; psm.initialize(pc, xfA, xfB, j); final Vec2 normal = psm.normal; final Vec2 point = psm.point; final float separation = psm.separation; rA.set(point).subLocal(cA); rB.set(point).subLocal(cB); // Track max constraint error. minSeparation = MathUtils.min(minSeparation, separation); // Prevent large corrections and allow slop. final float C = MathUtils.clamp(Settings.baumgarte * (separation + Settings.linearSlop), -Settings.maxLinearCorrection, 0.0f); // Compute the effective mass. final float rnA = Vec2.cross(rA, normal); final float rnB = Vec2.cross(rB, normal); final float K = mA + mB + iA * rnA * rnA + iB * rnB * rnB; // Compute normal impulse final float impulse = K > 0.0f ? -C / K : 0.0f; P.set(normal).mulLocal(impulse); cA.subLocal(temp.set(P).mulLocal(mA)); aA -= iA * Vec2.cross(rA, P); cB.addLocal(temp.set(P).mulLocal(mB)); aB += iB * Vec2.cross(rB, P); } m_positions[indexA].c.set(cA); m_positions[indexA].a = aA; m_positions[indexB].c.set(cB); m_positions[indexB].a = aB; // System.out.println("ending pos "+indexA+": " + cA.x + "," + cA.y + " - rot " + aA); // System.out.println("ending pos "+indexB+": " + cB.x + "," + cB.y + " - rot " + aB); } // We can't expect minSpeparation >= -linearSlop because we don't // push the separation above -linearSlop. return minSeparation >= -3.0f * Settings.linearSlop; } // Sequential position solver for position constraints. public boolean solveTOIPositionConstraints (int toiIndexA, int toiIndexB) { float minSeparation = 0.0f; for (int i = 0; i < m_count; ++i) { ContactPositionConstraint pc = m_positionConstraints[i]; int indexA = pc.indexA; int indexB = pc.indexB; Vec2 localCenterA = pc.localCenterA; Vec2 localCenterB = pc.localCenterB; int pointCount = pc.pointCount; float mA = 0.0f; float iA = 0.0f; if (indexA == toiIndexA || indexA == toiIndexB) { mA = pc.invMassA; iA = pc.invIA; } float mB = pc.invMassB; float iB = pc.invIB; if (indexB == toiIndexA || indexB == toiIndexB) { mB = pc.invMassB; iB = pc.invIB; } Vec2 cA = m_positions[indexA].c; float aA = m_positions[indexA].a; Vec2 cB = m_positions[indexB].c; float aB = m_positions[indexB].a; // Solve normal constraints for (int j = 0; j < pointCount; ++j) { xfA.q.set(aA); xfB.q.set(aB); Rot.mulToOutUnsafe(xfA.q, localCenterA, xfA.p); xfA.p.negateLocal().addLocal(cA); Rot.mulToOutUnsafe(xfB.q, localCenterB, xfB.p); xfB.p.negateLocal().addLocal(cB); final PositionSolverManifold psm = psolver; psm.initialize(pc, xfA, xfB, j); Vec2 normal = psm.normal; Vec2 point = psm.point; float separation = psm.separation; rA.set(point).subLocal(cA); rB.set(point).subLocal(cB); // Track max constraint error. minSeparation = MathUtils.min(minSeparation, separation); // Prevent large corrections and allow slop. float C = MathUtils.clamp(Settings.toiBaugarte * (separation + Settings.linearSlop), -Settings.maxLinearCorrection, 0.0f); // Compute the effective mass. float rnA = Vec2.cross(rA, normal); float rnB = Vec2.cross(rB, normal); float K = mA + mB + iA * rnA * rnA + iB * rnB * rnB; // Compute normal impulse float impulse = K > 0.0f ? -C / K : 0.0f; P.set(normal).mulLocal(impulse); cA.subLocal(temp.set(P).mulLocal(mA)); aA -= iA * Vec2.cross(rA, P); cB.addLocal(temp.set(P).mulLocal(mB)); aB += iB * Vec2.cross(rB, P); } m_positions[indexA].c.set(cA); m_positions[indexA].a = aA; m_positions[indexB].c.set(cB); m_positions[indexB].a = aB; } // We can't expect minSpeparation >= -_linearSlop because we don't // push the separation above -_linearSlop. return minSeparation >= -1.5f * Settings.linearSlop; } public static class ContactSolverDef { public TimeStep step; public Contact[] contacts; public int count; public Position[] positions; public Velocity[] velocities; } } class PositionSolverManifold { public final Vec2 normal = new Vec2(); public final Vec2 point = new Vec2(); public float separation; // djm pooling private final Vec2 pointA = new Vec2(); private final Vec2 pointB = new Vec2(); private final Vec2 temp = new Vec2(); private final Vec2 planePoint = new Vec2(); private final Vec2 clipPoint = new Vec2(); public void initialize (ContactPositionConstraint pc, Transform xfA, Transform xfB, int index) { assert (pc.pointCount > 0); switch (pc.type) { case CIRCLES: { Transform.mulToOutUnsafe(xfA, pc.localPoint, pointA); Transform.mulToOutUnsafe(xfB, pc.localPoints[0], pointB); normal.set(pointB).subLocal(pointA); normal.normalize(); point.set(pointA).addLocal(pointB).mulLocal(.5f); temp.set(pointB).subLocal(pointA); separation = Vec2.dot(temp, normal) - pc.radiusA - pc.radiusB; break; } case FACE_A: { Rot.mulToOutUnsafe(xfA.q, pc.localNormal, normal); Transform.mulToOutUnsafe(xfA, pc.localPoint, planePoint); Transform.mulToOutUnsafe(xfB, pc.localPoints[index], clipPoint); temp.set(clipPoint).subLocal(planePoint); separation = Vec2.dot(temp, normal) - pc.radiusA - pc.radiusB; point.set(clipPoint); break; } case FACE_B: { Rot.mulToOutUnsafe(xfB.q, pc.localNormal, normal); Transform.mulToOutUnsafe(xfB, pc.localPoint, planePoint); Transform.mulToOutUnsafe(xfA, pc.localPoints[index], clipPoint); temp.set(clipPoint).subLocal(planePoint); separation = Vec2.dot(temp, normal) - pc.radiusA - pc.radiusB; point.set(clipPoint); // Ensure normal points from A to B normal.negateLocal(); } break; } } }
package com.abewy.android.apps.contacts.app; import java.util.List; import android.app.ActionBar; import android.app.ActionBar.OnNavigationListener; import android.app.ActionBar.Tab; import android.content.Intent; import android.os.Bundle; import android.provider.ContactsContract; import android.provider.ContactsContract.Intents; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.MenuItem.OnActionExpandListener; import android.view.ViewGroup; import android.view.ViewStub; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.SearchView; import android.widget.SearchView.OnQueryTextListener; import com.abewy.android.apps.contacts.R; import com.abewy.android.apps.contacts.adapter.LayoutType; import com.abewy.android.apps.contacts.adapter.MultiObjectAdapter; import com.abewy.android.apps.contacts.core.CoreApplication; import com.abewy.android.apps.contacts.core.CorePrefs; import com.abewy.android.apps.contacts.iab.IabHelper; import com.abewy.android.apps.contacts.iab.IabResult; import com.abewy.android.apps.contacts.iab.Inventory; import com.abewy.android.apps.contacts.iab.Purchase; import com.abewy.android.extended.items.BaseType; import com.crashlytics.android.Crashlytics; import com.jfeinstein.jazzyviewpager.JazzyViewPager; import com.jfeinstein.jazzyviewpager.JazzyViewPager.TransitionEffect; public class MainActivity extends FragmentActivity implements ActionBar.TabListener, IActionbarSpinner { private static final int SETTINGS_CODE = 125; private SectionsPagerAdapter mSectionsPagerAdapter; private JazzyViewPager mViewPager; private SearchView mSearchView; private TransitionEffect mCurrentEffect; private int mCurrentFragmentIndex; private OnQueryTextListener mQueryTextListener = new OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { return false; } @Override public boolean onQueryTextChange(String newText) { mSectionsPagerAdapter.setSearchQuery(newText); return true; } }; private OnActionExpandListener mExpandListener = new OnActionExpandListener() { @Override public boolean onMenuItemActionExpand(MenuItem item) { mViewPager.setPagingEnabled(false); mSearchView.setOnQueryTextListener(mQueryTextListener); return true; } @Override public boolean onMenuItemActionCollapse(MenuItem item) { Log.d("MainActivity", "onMenuItemActionCollapse: "); mViewPager.setPagingEnabled(true); mSearchView.setOnQueryTextListener(null); mSectionsPagerAdapter.setSearchQuery(null); return true; } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(null); Crashlytics.start(this); setContentView(R.layout.activity_main); String base64EncodedPublicKey = CoreApplication.generateIabKey(); // compute your public key and store it in base64EncodedPublicKey mHelper = new IabHelper(this, base64EncodedPublicKey); // enable debug logging (for a production application, you should set this to false). // mHelper.enableDebugLogging(true); mViewPager = (JazzyViewPager) findViewById(R.id.pager); // CorePrefs.setHasDonated(false); setupViewPager(); if (CorePrefs.isFirstLaunch()) { ViewStub showcaseStub = (ViewStub) findViewById(R.id.showcase_stub); showcaseStub.setLayoutResource(R.layout.activity_main_showcase); View showcase = showcaseStub.inflate(); showcase.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return true; } }); final Button button = (Button) showcase.findViewById(R.id.showcase_button); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { CorePrefs.setFirstLaunchDone(); ((ViewGroup) button.getParent().getParent()).removeView(((View) button.getParent())); mViewPager.setPagingEnabled(true); invalidateOptionsMenu(); } }); } if (!CorePrefs.hasDonated()) { launchIab(); } } private void setupViewPager() { mCurrentEffect = CorePrefs.getViewPagerEffect(); mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); mViewPager.setTransitionEffect(mCurrentEffect); mViewPager.setFadeEnabled(false); mViewPager.setAdapter(mSectionsPagerAdapter); mViewPager.setPageMargin(30); mViewPager.setCurrentItem(mCurrentFragmentIndex); mViewPager.setOnPageChangeListener(mSectionsPagerAdapter); mViewPager.setPagingEnabled(!CorePrefs.isFirstLaunch()); // mSectionsPagerAdapter.setData(contacts, favContacts); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == SETTINGS_CODE) { if (CorePrefs.getPrefsHaveChanged()) { CorePrefs.setPrefsHaveChanged(false); restart(); } } if (mHelper == null) return; // Pass on the activity result to the helper for handling if (!mHelper.handleActivityResult(requestCode, resultCode, data)) { // not handled, so handle it ourselves (here's where you'd // perform any handling of activity results not related to in-app // billing... super.onActivityResult(requestCode, resultCode, data); } else { Log.d(TAG, "onActivityResult handled by IABUtil."); } } private void restart() { Intent localIntent = new Intent(this, MainActivity.class); localIntent.setFlags(Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED | Intent.FLAG_ACTIVITY_NO_ANIMATION | Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_CLEAR_TASK); startActivity(localIntent); overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out); finish(); } @Override public boolean onCreateOptionsMenu(Menu menu) { if (!CorePrefs.isFirstLaunch()) { getMenuInflater().inflate(R.menu.main, menu); MenuItem item = menu.findItem(R.id.action_search); mSearchView = (SearchView) item.getActionView(); item.setOnActionExpandListener(mExpandListener); if (!CorePrefs.hasDonated()) { menu.add(Menu.NONE, R.id.action_help_me, 99, R.string.action_help_me); } } return true; } // ___ Tabs management @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.action_add_contact) { // Creates a new Intent to insert a contact Intent intent = new Intent(Intents.Insert.ACTION); // Sets the MIME type to match the Contacts Provider intent.setType(ContactsContract.RawContacts.CONTENT_TYPE); intent.putExtra("finishActivityOnSaveCompleted", true); startActivity(intent); return true; } if (item.getItemId() == R.id.action_help_me) { startActivity(new Intent(this, HelpMeActivity.class)); return true; } if (item.getItemId() == R.id.action_settings) { mCurrentFragmentIndex = mViewPager.getCurrentItem(); startActivityForResult(new Intent(this, PreferencesActivity.class), SETTINGS_CODE); return true; } return super.onOptionsItemSelected(item); } @Override public void onTabReselected(Tab tab, android.app.FragmentTransaction ft) { } @Override public void onTabSelected(Tab tab, android.app.FragmentTransaction ft) { mViewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(Tab tab, android.app.FragmentTransaction ft) { } @Override protected void onDestroy() { super.onDestroy(); if (mHelper != null) { mHelper.dispose(); mHelper = null; } if (mSectionsPagerAdapter != null) mSectionsPagerAdapter.onDestroy(); if (mViewPager != null) { mViewPager.setOnPageChangeListener(null); } mSectionsPagerAdapter = null; mViewPager = null; mSearchView = null; } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter implements ViewPager.OnPageChangeListener { private FragmentContainer peopleFragment; private FragmentContainer favoritesFragment; public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { if (position == 0) { peopleFragment = new FragmentContainer(); return peopleFragment; } else { favoritesFragment = new FavoritesFragmentContainer(); return favoritesFragment; } } @Override public int getItemPosition(Object object) { return POSITION_NONE; } @Override public int getCount() { return 2; } @Override public CharSequence getPageTitle(int position) { /*Locale l = Locale.getDefault(); switch (position) { case 0: { return getString(R.string.title_section1).toUpperCase(l); } case 1: { return getString(R.string.title_section2).toUpperCase(l); } }*/ return null; } @Override public Object instantiateItem(ViewGroup container, final int position) { Object obj = super.instantiateItem(container, position); mViewPager.setObjectForPosition(obj, position); return obj; } public void setSearchQuery(String query) { if (mViewPager.getCurrentItem() == 0) peopleFragment.searchQuery(query); else favoritesFragment.searchQuery(query); } public void destroyItem(ViewGroup container, int position, Object obj) { container.removeView(mViewPager.findViewFromObject(position)); } public void onDestroy() { peopleFragment = null; favoritesFragment = null; } @Override public void onPageSelected(int position) { if (peopleFragment == null) return; if (position == 0) { peopleFragment.onSetToFront(); favoritesFragment.onSetToBack(); } else { favoritesFragment.onSetToFront(); peopleFragment.onSetToBack(); } } @Override public void onPageScrollStateChanged(int state) { } @Override public void onPageScrolled(int arg0, float arg1, int arg2) { } } // ___ InApp Billing private IabHelper mHelper; private static final String TAG = "MainActivity"; // Listener that's called when we finish querying the items and subscriptions we own IabHelper.QueryInventoryFinishedListener mGotInventoryListener = new IabHelper.QueryInventoryFinishedListener() { public void onQueryInventoryFinished(IabResult result, Inventory inventory) { Log.d(TAG, "Query inventory finished."); // MessengerApplication.PRO_VERSION_CHECKED = true; // Have we been disposed of in the meantime? If so, quit. if (mHelper == null) return; // Is it a failure? if (result.isFailure()) { Log.d(TAG, "Failed to query inventory: " + result); // Fail to check, so we don't display ads // to avoid pro users to see ads // MessengerApplication.IS_PRO_VERSION = true; return; } Log.d(TAG, "Query inventory was successful."); /* * Check for items we own. Notice that for each purchase, we check * the developer payload to see if it's correct! See * verifyDeveloperPayload(). */ String[] skus = getResources().getStringArray(R.array.donate_values); for (String sku : skus) { Purchase donation = inventory.getPurchase(sku); if (donation != null) { // mHelper.consumeAsync(donation, null); CorePrefs.setHasDonated(true); break; } } if (CorePrefs.hasDonated()) invalidateOptionsMenu(); Log.d(TAG, "Initial inventory query finished; enabling main UI."); } }; private void launchIab() { mHelper.startSetup(new IabHelper.OnIabSetupFinishedListener() { public void onIabSetupFinished(IabResult result) { Log.d(TAG, "Setup finished."); if (!result.isSuccess()) { // Oh noes, there was a problem. Log.d("MainActivity.onCreate(...).new OnIabSetupFinishedListener() {...}", "onIabSetupFinished: Problem setting up in-app billing: " + result); return; } // Have we been disposed of in the meantime? If so, quit. if (mHelper == null) return; // IAB is fully set up. Now, let's get an inventory of stuff we own. Log.d(TAG, "Setup successful. Querying inventory."); mHelper.queryInventoryAsync(mGotInventoryListener); } }); } @Override public void displaySpinnerInActionBar(String[] array, int position, OnNavigationListener listener) { ArrayAdapter<CharSequence> list = new ArrayAdapter<CharSequence>(getActionBar().getThemedContext(), android.R.layout.simple_dropdown_item_1line, array); list.setDropDownViewResource(android.R.layout.simple_dropdown_item_1line); getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); getActionBar().setListNavigationCallbacks(list, listener); getActionBar().setSelectedNavigationItem(position); } @Override public void displaySpinnerInActionBar(int array, int position, OnNavigationListener listener) { ArrayAdapter<CharSequence> list = ArrayAdapter.createFromResource(getActionBar().getThemedContext(), array, android.R.layout.simple_dropdown_item_1line); list.setDropDownViewResource(android.R.layout.simple_dropdown_item_1line); getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); getActionBar().setListNavigationCallbacks(list, listener); getActionBar().setSelectedNavigationItem(position); } @Override public void displaySpinnerInActionBar(List<BaseType> data, int position, OnNavigationListener listener) { MultiObjectAdapter adapter = new MultiObjectAdapter(null, LayoutType.DROP_DOWN_ITEM); adapter.addAll(data); getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); getActionBar().setListNavigationCallbacks(adapter, listener); getActionBar().setSelectedNavigationItem(position); } @Override public void removeSpinnerInActionBar() { getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); getActionBar().setListNavigationCallbacks(null, null); } }
/** * Copyright 2010-2016 Boxfuse GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.commandline; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.internal.info.MigrationInfoDumper; import org.flywaydb.core.internal.util.ClassUtils; import org.flywaydb.core.internal.util.FileCopyUtils; import org.flywaydb.core.internal.util.StringUtils; import org.flywaydb.core.internal.util.VersionPrinter; import org.flywaydb.core.internal.util.logging.Log; import org.flywaydb.core.internal.util.logging.LogFactory; import org.flywaydb.core.internal.util.logging.console.ConsoleLog.Level; import org.flywaydb.core.internal.util.logging.console.ConsoleLogCreator; import java.io.Console; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStreamReader; import java.io.StringReader; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; /** * Main class and central entry point of the Flyway command-line tool. */ public class Main { private static Log LOG; /** * The property name for the directory containing a list of jars to load on the classpath. */ private static final String PROPERTY_JAR_DIRS = "flyway.jarDirs"; /** * Initializes the logging. * * @param level The minimum level to log at. */ static void initLogging(Level level) { LogFactory.setFallbackLogCreator(new ConsoleLogCreator(level)); LOG = LogFactory.getLog(Main.class); } /** * Main method. * * @param args The command-line arguments. */ public static void main(String[] args) { Level logLevel = getLogLevel(args); initLogging(logLevel); try { printVersion(); if (isPrintVersionAndExit(args)) { System.exit(0); } List<String> operations = determineOperations(args); if (operations.isEmpty()) { printUsage(); return; } Properties properties = new Properties(); initializeDefaults(properties); loadConfiguration(properties, args); overrideConfiguration(properties, args); promptForCredentialsIfMissing(properties); dumpConfiguration(properties); loadJdbcDrivers(); loadJavaMigrationsFromJarDirs(properties); Flyway flyway = new Flyway(); filterProperties(properties); flyway.configure(properties); for (String operation : operations) { executeOperation(flyway, operation); } } catch (Exception e) { if (logLevel == Level.DEBUG) { LOG.error("Unexpected error", e); } else { if (e instanceof FlywayException) { LOG.error(e.getMessage()); } else { LOG.error(e.toString()); } } System.exit(1); } } private static boolean isPrintVersionAndExit(String[] args) { for (String arg : args) { if ("-v".equals(arg)) { return true; } } return false; } /** * Executes this operation on this Flyway instance. * * @param flyway The Flyway instance. * @param operation The operation to execute. */ private static void executeOperation(Flyway flyway, String operation) { if ("clean".equals(operation)) { flyway.clean(); } else if ("baseline".equals(operation)) { flyway.baseline(); } else if ("migrate".equals(operation)) { flyway.migrate(); } else if ("validate".equals(operation)) { flyway.validate(); } else if ("info".equals(operation)) { LOG.info("\n" + MigrationInfoDumper.dumpToAsciiTable(flyway.info().all())); } else if ("repair".equals(operation)) { flyway.repair(); } else { LOG.error("Invalid operation: " + operation); printUsage(); System.exit(1); } } /** * Checks the desired log level. * * @param args The command-line arguments. * @return The desired log level. */ private static Level getLogLevel(String[] args) { for (String arg : args) { if ("-X".equals(arg)) { return Level.DEBUG; } if ("-q".equals(arg)) { return Level.WARN; } } return Level.INFO; } /** * Initializes the properties with the default configuration for the command-line tool. * * @param properties The properties object to initialize. */ private static void initializeDefaults(Properties properties) { properties.put("flyway.locations", "filesystem:" + new File(getInstallationDir(), "sql").getAbsolutePath()); properties.put(PROPERTY_JAR_DIRS, new File(getInstallationDir(), "jars").getAbsolutePath()); } /** * Filters there properties to remove the Flyway Commandline-specific ones. * * @param properties The properties to filter. */ private static void filterProperties(Properties properties) { properties.remove(PROPERTY_JAR_DIRS); properties.remove("flyway.configFile"); properties.remove("flyway.configFileEncoding"); } /** * Prints the version number on the console. * * @throws IOException when the version could not be read. */ private static void printVersion() throws IOException { VersionPrinter.printVersion(); LOG.info(""); LOG.debug("Java " + System.getProperty("java.version") + " (" + System.getProperty("java.vendor") + ")"); LOG.debug(System.getProperty("os.name") + " " + System.getProperty("os.version") + " " + System.getProperty("os.arch") + "\n"); } /** * Prints the usage instructions on the console. */ private static void printUsage() { LOG.info("Usage"); LOG.info("====="); LOG.info(""); LOG.info("flyway [options] command"); LOG.info(""); LOG.info("By default, the configuration will be read from conf/flyway.conf."); LOG.info("Options passed from the command-line override the configuration."); LOG.info(""); LOG.info("Commands"); LOG.info("--------"); LOG.info("migrate : Migrates the database"); LOG.info("clean : Drops all objects in the configured schemas"); LOG.info("info : Prints the information about applied, current and pending migrations"); LOG.info("validate : Validates the applied migrations against the ones on the classpath"); LOG.info("baseline : Baselines an existing database at the baselineVersion"); LOG.info("repair : Repairs the metadata table"); LOG.info(""); LOG.info("Options (Format: -key=value)"); LOG.info("-------"); LOG.info("driver : Fully qualified classname of the jdbc driver"); LOG.info("url : Jdbc url to use to connect to the database"); LOG.info("user : User to use to connect to the database"); LOG.info("password : Password to use to connect to the database"); LOG.info("schemas : Comma-separated list of the schemas managed by Flyway"); LOG.info("table : Name of Flyway's metadata table"); LOG.info("locations : Classpath locations to scan recursively for migrations"); LOG.info("resolvers : Comma-separated list of custom MigrationResolvers"); LOG.info("skipDefaultResolvers : Skips default resolvers (jdbc, sql and Spring-jdbc)"); LOG.info("sqlMigrationPrefix : File name prefix for sql migrations"); LOG.info("repeatableSqlMigrationPrefix : File name prefix for repeatable sql migrations"); LOG.info("sqlMigrationSeparator : File name separator for sql migrations"); LOG.info("sqlMigrationSuffix : File name suffix for sql migrations"); LOG.info("encoding : Encoding of sql migrations"); LOG.info("placeholderReplacement : Whether placeholders should be replaced"); LOG.info("placeholders : Placeholders to replace in sql migrations"); LOG.info("placeholderPrefix : Prefix of every placeholder"); LOG.info("placeholderSuffix : Suffix of every placeholder"); LOG.info("target : Target version up to which Flyway should use migrations"); LOG.info("outOfOrder : Allows migrations to be run \"out of order\""); LOG.info("callbacks : Comma-separated list of FlywayCallback classes"); LOG.info("skipDefaultCallbacks : Skips default callbacks (sql)"); LOG.info("validateOnMigrate : Validate when running migrate"); LOG.info("ignoreFutureMigrations : Allow future migrations when validating"); LOG.info("cleanOnValidationError : Automatically clean on a validation error"); LOG.info("cleanDisabled : Whether to disable clean"); LOG.info("baselineVersion : Version to tag schema with when executing baseline"); LOG.info("baselineDescription : Description to tag schema with when executing baseline"); LOG.info("baselineOnMigrate : Baseline on migrate against uninitialized non-empty schema"); LOG.info("configFile : Config file to use (default: conf/flyway.properties)"); LOG.info("configFileEncoding : Encoding of the config file (default: UTF-8)"); LOG.info("jarDirs : Dirs for Jdbc drivers & Java migrations (default: jars)"); LOG.info(""); LOG.info("Add -X to print debug output"); LOG.info("Add -q to suppress all output, except for errors and warnings"); LOG.info("Add -v to print the Flyway version and exit"); LOG.info(""); LOG.info("Example"); LOG.info("-------"); LOG.info("flyway -user=myuser -password=s3cr3t -url=jdbc:h2:mem -placeholders.abc=def migrate"); LOG.info(""); LOG.info("More info at https://flywaydb.org/documentation/commandline"); } /** * Loads all the driver jars contained in the drivers folder. (For Jdbc drivers) * * @throws IOException When the jars could not be loaded. */ private static void loadJdbcDrivers() throws IOException { File driversDir = new File(getInstallationDir(), "drivers"); File[] files = driversDir.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".jar"); } }); // see javadoc of listFiles(): null if given path is not a real directory if (files == null) { LOG.error("Directory for Jdbc Drivers not found: " + driversDir.getAbsolutePath()); System.exit(1); } for (File file : files) { addJarOrDirectoryToClasspath(file.getPath()); } } /** * Loads all the jars contained in the jars folder. (For Java Migrations) * * @param properties The configured properties. * @throws IOException When the jars could not be loaded. */ private static void loadJavaMigrationsFromJarDirs(Properties properties) throws IOException { String jarDirs = properties.getProperty(PROPERTY_JAR_DIRS); if (!StringUtils.hasLength(jarDirs)) { return; } jarDirs = jarDirs.replace(File.pathSeparator, ","); String[] dirs = StringUtils.tokenizeToStringArray(jarDirs, ","); for (String dirName : dirs) { File dir = new File(dirName); File[] files = dir.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".jar"); } }); // see javadoc of listFiles(): null if given path is not a real directory if (files == null) { LOG.error("Directory for Java Migrations not found: " + dirName); System.exit(1); } for (File file : files) { addJarOrDirectoryToClasspath(file.getPath()); } } } /** * Adds a jar or a directory with this name to the classpath. * * @param name The name of the jar or directory to add. * @throws IOException when the jar or directory could not be found. */ /* private -> for testing */ static void addJarOrDirectoryToClasspath(String name) throws IOException { LOG.debug("Adding location to classpath: " + name); try { URL url = new File(name).toURI().toURL(); URLClassLoader sysloader = (URLClassLoader) ClassLoader.getSystemClassLoader(); Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class); method.setAccessible(true); method.invoke(sysloader, url); } catch (Exception e) { throw new FlywayException("Unable to load " + name, e); } } /** * Loads the configuration from the various possible locations. * * @param properties The properties object to load to configuration into. * @param args The command-line arguments passed in. */ /* private -> for testing */ static void loadConfiguration(Properties properties, String[] args) { String encoding = determineConfigurationFileEncoding(args); loadConfigurationFile(properties, getInstallationDir() + "/conf/flyway.conf", encoding, false); loadConfigurationFile(properties, System.getProperty("user.home") + "/flyway.conf", encoding, false); loadConfigurationFile(properties, "flyway.conf", encoding, false); String configFile = determineConfigurationFileArgument(args); if (configFile != null) { loadConfigurationFile(properties, configFile, encoding, true); } } /** * Loads the configuration from the configuration file. If a configuration file is specified using the -configfile * argument it will be used, otherwise the default config file (conf/flyway.properties) will be loaded. * * @param properties The properties object to load to configuration into. * @param file The configuration file to load. * @param encoding The encoding of the configuration file. * @param failIfMissing Whether to fail if the file is missing. * @return Whether the file was loaded successfully. * @throws FlywayException when the configuration file could not be loaded. */ private static boolean loadConfigurationFile(Properties properties, String file, String encoding, boolean failIfMissing) throws FlywayException { File configFile = new File(file); String errorMessage = "Unable to load config file: " + configFile.getAbsolutePath(); if (!configFile.isFile() || !configFile.canRead()) { if (!failIfMissing) { LOG.debug(errorMessage); return false; } throw new FlywayException(errorMessage); } LOG.debug("Loading config file: " + configFile.getAbsolutePath()); try { String contents = FileCopyUtils.copyToString(new InputStreamReader(new FileInputStream(configFile), encoding)); properties.load(new StringReader(contents.replace("\\", "\\\\"))); return true; } catch (IOException e) { throw new FlywayException(errorMessage, e); } } /** * If no user or password has been provided, prompt for it. If you want to avoid the prompt, * pass in an empty user or password. * * @param properties The properties object to load to configuration into. */ private static void promptForCredentialsIfMissing(Properties properties) { Console console = System.console(); if (console == null) { // We are running in an automated build. Prompting is not possible. return; } if (!properties.containsKey("flyway.url")) { // URL is not set. We are doomed for failure anyway. return; } if (!properties.containsKey("flyway.user")) { properties.put("flyway.user", console.readLine("Database user: ")); } if (!properties.containsKey("flyway.password")) { char[] password = console.readPassword("Database password: "); properties.put("flyway.password", password == null ? "" : String.valueOf(password)); } } /** * Dumps the configuration to the console when debug output is activated. * * @param properties The configured properties. */ private static void dumpConfiguration(Properties properties) { LOG.debug("Using configuration:"); for (Map.Entry<Object, Object> entry : properties.entrySet()) { String value = entry.getValue().toString(); value = "flyway.password".equals(entry.getKey()) ? StringUtils.trimOrPad("", value.length(), '*') : value; LOG.debug(entry.getKey() + " -> " + value); } } /** * Determines the file to use for loading the configuration. * * @param args The command-line arguments passed in. * @return The path of the configuration file on disk. */ private static String determineConfigurationFileArgument(String[] args) { for (String arg : args) { if (isPropertyArgument(arg) && "configFile".equals(getArgumentProperty(arg))) { return getArgumentValue(arg); } } return null; } /** * @return The installation directory of the Flyway Command-line tool. */ @SuppressWarnings("ConstantConditions") private static String getInstallationDir() { String path = ClassUtils.getLocationOnDisk(Main.class); return new File(path).getParentFile().getParentFile().getAbsolutePath(); } /** * Determines the encoding to use for loading the configuration. * * @param args The command-line arguments passed in. * @return The encoding. (default: UTF-8) */ private static String determineConfigurationFileEncoding(String[] args) { for (String arg : args) { if (isPropertyArgument(arg) && "configFileEncoding".equals(getArgumentProperty(arg))) { return getArgumentValue(arg); } } return "UTF-8"; } /** * Overrides the configuration from the config file with the properties passed in directly from the command-line. * * @param properties The properties to override. * @param args The command-line arguments that were passed in. */ /* private -> for testing*/ static void overrideConfiguration(Properties properties, String[] args) { for (String arg : args) { if (isPropertyArgument(arg)) { properties.put("flyway." + getArgumentProperty(arg), getArgumentValue(arg)); } } } /** * Checks whether this command-line argument tries to set a property. * * @param arg The command-line argument to check. * @return {@code true} if it does, {@code false} if not. */ /* private -> for testing*/ static boolean isPropertyArgument(String arg) { return arg.startsWith("-") && arg.contains("="); } /** * Retrieves the property this command-line argument tries to assign. * * @param arg The command-line argument to check, typically in the form -key=value. * @return The property. */ /* private -> for testing*/ static String getArgumentProperty(String arg) { int index = arg.indexOf("="); return arg.substring(1, index); } /** * Retrieves the value this command-line argument tries to assign. * * @param arg The command-line argument to check, typically in the form -key=value. * @return The value or an empty string if no value is assigned. */ /* private -> for testing*/ static String getArgumentValue(String arg) { int index = arg.indexOf("="); if ((index < 0) || (index == arg.length())) { return ""; } return arg.substring(index + 1); } /** * Determine the operations Flyway should execute. * * @param args The command-line arguments passed in. * @return The operations. An empty list if none. */ private static List<String> determineOperations(String[] args) { List<String> operations = new ArrayList<String>(); for (String arg : args) { if (!arg.startsWith("-")) { operations.add(arg); } } return operations; } }
/******************************************************************************* * Copyright (c) 2011-2012 Ethan Hall * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. ******************************************************************************/ package com.ehdev.chronos.lib.types.holders; import com.ehdev.chronos.lib.enums.PayPeriodDuration; import com.ehdev.chronos.lib.types.Job; import org.joda.time.DateMidnight; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.Serializable; /** * @author Ethan Hall */ public class PayPeriodHolder implements Serializable { Job gJob; DateTime gStartOfPP = null; DateTime gEndOfPP = null; PayPeriodDuration gDuration; /** * Default constructor for PayPeriodHolder * * @param inputJob Job to do the work on */ public PayPeriodHolder(Job inputJob){ gJob = inputJob; generate(); } /** * Gets the day's in this pay period * @return int of the day's in the the pay period */ public int getDays(){ switch (gJob.getDuration()){ case ONE_WEEK: return 7; case TWO_WEEKS: return 7 * 2; case THREE_WEEKS: return 7 * 3; case FOUR_WEEKS: return 7 * 4; case FULL_MONTH: if(gStartOfPP == null) gStartOfPP = DateTime.now(); return gStartOfPP.dayOfMonth().getMaximumValue(); case FIRST_FIFTEENTH: if(gStartOfPP == null) gStartOfPP = DateTime.now(); if(gStartOfPP.getDayOfMonth() < 15) return 14; else return gStartOfPP.dayOfMonth().getMaximumValue() - 14; } return 0; } /** * Get the start of the Pay Period. It will call generate if it needs to be called. * * @return DateMidnight containing the start of the Pay Period */ public DateTime getStartOfPayPeriod(){ if(gStartOfPP == null){ generate(); } return gStartOfPP; } /** * Get the end of the Pay Period. It will call generate if it needs to be called. * * @return DateMidnight containing the end of the Pay Period */ public DateTime getEndOfPayPeriod(){ if(gEndOfPP == null){ generate(); } return gEndOfPP; } /** * Will do the calculations for the start and end of the process */ public void generate(){ //Get the start and end of pay period DateTime startOfPP = gJob.getStartOfPayPeriod(); gDuration = gJob.getDuration(); DateTime endOfPP = DateTime.now(); //Today long duration = endOfPP.getMillis() - startOfPP.getMillis(); DateTimeZone startZone = startOfPP.getZone(); DateTimeZone endZone = endOfPP.getZone(); long offset = endZone.getOffset(endOfPP) - startZone.getOffset(startOfPP); int weeks = (int)((duration + offset) / 1000 / 60 / 60 / 24 / 7); /* System.out.println("end of pp: " + endOfPP); System.out.println("start of pp: " + startOfPP); System.out.println("dur: " + duration); System.out.println("weeks diff: " + weeks); */ switch (gDuration){ case ONE_WEEK: //weeks = weeks; startOfPP = startOfPP.plusWeeks(weeks); endOfPP = startOfPP.plusWeeks(1); break; case TWO_WEEKS: weeks = weeks / 2; startOfPP = startOfPP.plusWeeks(weeks * 2); endOfPP = startOfPP.plusWeeks(2); break; case THREE_WEEKS: weeks = weeks / 3; startOfPP = startOfPP.plusWeeks(weeks * 3); endOfPP = startOfPP.plusWeeks(3); break; case FOUR_WEEKS: weeks = weeks / 4; startOfPP= startOfPP.plusWeeks(weeks * 4); endOfPP = startOfPP.plusWeeks(4); break; case FULL_MONTH: //in this case, endOfPP is equal to now startOfPP = DateMidnight.now().toDateTime().withDayOfMonth(1); endOfPP = startOfPP.plusMonths(1); break; case FIRST_FIFTEENTH: DateTime now = DateTime.now(); if(now.getDayOfMonth() >= 15){ startOfPP = now.withDayOfMonth(15); endOfPP = startOfPP.plusDays(20).withDayOfMonth(1); } else { startOfPP = now.withDayOfMonth(1); endOfPP = now.withDayOfMonth(15); } break; default: break; } if (startOfPP.isAfter(DateTime.now())){ startOfPP = startOfPP.minusWeeks(getDays()/7); endOfPP = endOfPP.minusWeeks(getDays()/7); } gStartOfPP = startOfPP; gEndOfPP = endOfPP; } public void moveBackwards(){ if(gEndOfPP == null || gStartOfPP == null){ generate(); } if(gJob.getDuration() == PayPeriodDuration.FIRST_FIFTEENTH){ if(gStartOfPP.getDayOfMonth() == 1){ gStartOfPP = gStartOfPP.minusMonths(1).withDayOfMonth(15); gEndOfPP = gStartOfPP.withDayOfMonth(1).plusMonths(1); } else { gStartOfPP = gStartOfPP.withDayOfMonth(1); gEndOfPP = gStartOfPP.withDayOfMonth(15); } } else if(gJob.getDuration() == PayPeriodDuration.FULL_MONTH){ gStartOfPP = gStartOfPP.minusDays(1).withDayOfMonth(1); gEndOfPP = gStartOfPP.plusMonths(1).withDayOfMonth(1); } else { gStartOfPP = gStartOfPP.minusDays(getDays()); gEndOfPP = gStartOfPP.plusDays(getDays()); } } public void moveForwards(){ if(gEndOfPP == null || gStartOfPP == null){ generate(); } if(gJob.getDuration() == PayPeriodDuration.FIRST_FIFTEENTH){ if(gStartOfPP.getDayOfMonth() == 1){ gStartOfPP = gStartOfPP.withDayOfMonth(15); gEndOfPP = gStartOfPP.withDayOfMonth(1).plusMonths(1).minusDays(1); } else { gStartOfPP = gStartOfPP.withDayOfMonth(1).plusMonths(1); gEndOfPP = gStartOfPP.withDayOfMonth(15); } } else if(gJob.getDuration() == PayPeriodDuration.FULL_MONTH){ gStartOfPP = gStartOfPP.plusMonths(1).withDayOfMonth(1); gEndOfPP = gStartOfPP.plusMonths(1).withDayOfMonth(1); } else { gStartOfPP = gStartOfPP.plusDays(getDays()); gEndOfPP = gStartOfPP.plusDays(getDays()); } } }
package eu.opentxs.bridge.core.modules; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Scanner; import eu.ApplicationProperties; import eu.opentxs.bridge.Text; import eu.opentxs.bridge.CustomUTC; import eu.opentxs.bridge.Util; import eu.opentxs.bridge.core.DataModel; import eu.opentxs.bridge.core.Settings; import eu.opentxs.bridge.core.commands.Commands.Extension; import eu.opentxs.bridge.core.commands.Commands.Sophistication; import eu.opentxs.bridge.core.dto.Account; import eu.opentxs.bridge.core.dto.Transaction.InstrumentType; import eu.opentxs.bridge.core.exceptions.OTException; import eu.opentxs.bridge.core.exceptions.OTSystemException; import eu.opentxs.bridge.core.exceptions.OTSystemException.Event; import eu.opentxs.bridge.core.exceptions.OTUserException; public abstract class Module { protected interface RequestGenerator { public int getRequest(); } protected static boolean verbose; protected static boolean verboseServer; protected static boolean verboseClientLog; protected static boolean verboseClientSkip; protected static boolean verboseClientWarn; protected static boolean verboseClientSuccess; protected enum AccountType { SIMPLE("simple"), ISSUER("issuer"); private String value; private AccountType(String value) { this.value = value; } public String getValue() { return value; } public static AccountType parse(String value) { if (value.equalsIgnoreCase(SIMPLE.getValue())) return SIMPLE; if (value.equalsIgnoreCase(ISSUER.getValue())) return ISSUER; return null; } } public static void init() { String walletId = DataModel.getWalletId(); File file = new File(getWalletFileName(walletId)); try {// fatal if (file.exists()) loadWallet(walletId); else createWallet(walletId); } catch (Exception e) { e.printStackTrace(); } String myServerId = DataModel.getMyServerId(); if (myServerId != null) myServerId = OTAPI.Wallet.getServerIdFromPartial(myServerId); if (!Util.isValidString(myServerId)) Settings.getInstance().setMyServerId(DataModel.EMPTY); String myNymId = DataModel.getMyNymId(); if (myNymId != null) myNymId = OTAPI.Wallet.getNymIdFromPartial(myNymId); if (!Util.isValidString(myNymId)) Settings.getInstance().setMyNymId(DataModel.EMPTY); String myAssetId = DataModel.getMyAssetId(); if (myAssetId != null) myAssetId = OTAPI.Wallet.getAssetIdFromPartial(myAssetId); if (!Util.isValidString(myAssetId)) Settings.getInstance().setMyAssetId(DataModel.EMPTY); String myAccountId = DataModel.getMyAccountId(); if (myAccountId != null) myAccountId = OTAPI.Wallet.getAccountIdFromPartial(myAccountId); if (!Util.isValidString(myAccountId)) Settings.getInstance().setMyAccountId(DataModel.EMPTY); Settings.getInstance().save(); showConfig(); verbose = false; applyVerbose(); } public static void toggleVerbose() { verbose = !verbose; applyVerbose(); info(String.format("Verbose is now %s", verbose ? "on" : "off")); } public static void setSophistication(Sophistication sophistication) { Settings.getInstance().setSophistication(sophistication.getValue()); Settings.getInstance().save(); } public static void setMyServerId(String serverId) throws OTException { String myServerId; if (!Util.isValidString(serverId)) { myServerId = DataModel.EMPTY; } else { myServerId = parseServerId(serverId); if (myServerId.equals(DataModel.getMyServerId())) { print(myServerId); info("Already using this server"); return; } } Settings.getInstance().setMyServerId(myServerId); if (!myServerId.equals(getAccountServerId(DataModel.getMyAccountId()))) Settings.getInstance().setMyAccountId(DataModel.EMPTY); Settings.getInstance().save(); showConfig(); } public static void setMyNymId(String nymId) throws OTException { String myNymId; if (!Util.isValidString(nymId)) { myNymId = DataModel.EMPTY; } else { myNymId = parseNymId(nymId); if (myNymId.equals(DataModel.getMyNymId())) { showNym(myNymId); info("Already using this nym"); return; } } Settings.getInstance().setMyNymId(myNymId); if (!myNymId.equals(getAccountNymId(DataModel.getMyAccountId()))) Settings.getInstance().setMyAccountId(DataModel.EMPTY); Settings.getInstance().save(); showConfig(); } public static void setMyAssetId(String assetId) throws OTException { String myAssetId; if (!Util.isValidString(assetId)) { myAssetId = DataModel.EMPTY; } else { myAssetId = parseAssetId(assetId); if (myAssetId.equals(DataModel.getMyAssetId())) { showAsset(myAssetId); info("Already using this asset"); return; } } Settings.getInstance().setMyAssetId(myAssetId); if (!myAssetId.equals(getAccountAssetId(DataModel.getMyAccountId()))) Settings.getInstance().setMyAccountId(DataModel.EMPTY); Settings.getInstance().save(); showConfig(); } public static void setMyAccountId(String accountId) throws OTException { String myAccountId; if (!Util.isValidString(accountId)) { myAccountId = DataModel.EMPTY; } else { myAccountId = parseAccountId(accountId); if (myAccountId.equals(DataModel.getMyAccountId())) { print(myAccountId); info("Already using this account"); return; } String myAssetId = getAccountAssetId(myAccountId); String myNymId = getAccountNymId(myAccountId); String myServerId = getAccountServerId(myAccountId); Settings.getInstance().setMyServerId(myServerId); Settings.getInstance().setMyNymId(myNymId); Settings.getInstance().setMyAssetId(myAssetId); } Settings.getInstance().setMyAccountId(myAccountId); Settings.getInstance().save(); } public static boolean hasAccess(Sophistication sophistication) { return DataModel.getSophistication().hasAccess(sophistication); } public static CustomUTC getTime() { return CustomUTC.getDateUTC(OTAPI.getTime()); } public static void showTime() { print(CustomUTC.timeToString(getTime())); } public static void showConfig() { Sophistication sophistication = DataModel.getSophistication(); String walletId = DataModel.getWalletId(); String myServerId = DataModel.getMyServerId(); String myNymId = DataModel.getMyNymId(); String myAssetId = DataModel.getMyAssetId(); String myAccountId = DataModel.getMyAccountId(); print(Util.repeat("-", 13)); print(String.format("%12s: %s", "Mode", sophistication)); print(String.format("%12s: %s", "Wallet", walletId)); print(String.format("%12s: %s (%s)", "Server", myServerId, getServerName(myServerId))); print(String.format("%12s: %s (%s)", "Nym", myNymId, getNymName(myNymId))); print(String.format("%12s: %s (%s)", "Asset", myAssetId, getAssetName(myAssetId))); print(String.format("%12s: %s (%s)", "Account", myAccountId, getAccountName(myAccountId))); print(Util.repeat("-", 13)); } public static void createWallet(String walletId) { try {//fatal InputStream is = ClassLoader.getSystemResource(ApplicationProperties.get().getString("wallet.xml")).openStream(); Scanner s = new Scanner(is); s.useDelimiter("\\A"); String content = s.hasNext() ? s.next() : ""; s.close(); is.close(); content = content.replaceAll("\\r", ""); FileWriter fw = new FileWriter(getWalletFileName(walletId)); fw.write(content); fw.close(); } catch (IOException e) { e.printStackTrace(); } } public static void loadAndShowWallet(String walletId) throws OTException { loadWallet(walletId); Settings.getInstance().setWalletId(walletId); Settings.getInstance().save(); showWallet(); } public static void showWallet() { if (hasAccess(Sophistication.SIMPLE)) { print(Util.repeat("-", 70)); showServers(); } { print(Util.repeat("-", 70)); showNyms(); } { print(Util.repeat("-", 70)); showAssets(); } { print(Util.repeat("-", 70)); Account.show(); } print(Util.repeat("-", 70)); } public static List<String> getServerIds() { List<String> servers = new ArrayList<String>(); int count = OTAPI.getServerCount(); for (int index = 0; index < count; index++) servers.add(OTAPI.GetServer.id(index)); return servers; } public static List<String> getNymIds() { List<String> nyms = new ArrayList<String>(); int count = OTAPI.getNymCount(); for (int index = 0; index < count; index++) nyms.add(OTAPI.GetNym.id(index)); return nyms; } public static List<String> getAssetIds() { List<String> assets = new ArrayList<String>(); int count = OTAPI.getAssetCount(); for (int index = 0; index < count; index++) assets.add(OTAPI.GetAsset.id(index)); return assets; } public static List<String> getAccountIds() { List<String> accounts = new ArrayList<String>(); int count = OTAPI.getAccountCount(); for (int index = 0; index < count; index++) accounts.add(OTAPI.GetAccount.id(index)); return accounts; } public static String getAccountServerId(String accountId) { if (!Util.isValidString(accountId)) return null; return OTAPI.GetAccount.serverId(accountId); } public static String getAccountNymId(String accountId) { if (!Util.isValidString(accountId)) return null; return OTAPI.GetAccount.nymId(accountId); } public static String getAccountAssetId(String accountId) { if (!Util.isValidString(accountId)) return null; return OTAPI.GetAccount.assetId(accountId); } public static Integer convertAmountToValue(String amount) throws OTException { Integer value = null; try { value = new Integer(amount); } catch (NumberFormatException e) { error(Event.STRING_TO_INTEGER_CONVERSION_ERROR); } return value; } protected static Double convertValueToVolume(String assetId, Integer value) throws OTException { String format = convertValueToFormat(assetId, value); format = format.replaceAll("[^0-9.]", ""); Double volume = null; try { volume = new Double(format); } catch (NumberFormatException e) { error(Event.STRING_TO_DOUBLE_CONVERSION_ERROR); } return volume; } protected static String convertVolumeToAmount(String assetId, Double volume) { return OTAPI.unformatAmount(assetId, volume.toString()); } protected static Integer convertVolumeToValue(String assetId, Double volume) throws OTException { return convertAmountToValue(convertVolumeToAmount(assetId, volume)); } protected static String convertAmountToFormat(String assetId, String amount) { return OTAPI.formatAmount(assetId, amount); } public static String convertValueToFormat(String assetId, Integer value) { return convertAmountToFormat(assetId, value.toString()); } public static String convertVolumeToFormat(String assetId, Double volume) { return convertAmountToFormat(assetId, convertVolumeToAmount(assetId, volume)); } public static String getServerName(String serverId) { if (!Util.isValidString(serverId)) return Text.NAME_UNKNOWN.toString(); String serverName = OTAPI.GetServer.name(serverId); if (Util.isValidString(serverName)) return serverName; return Text.NAME_UNKNOWN.toString(); } public static String getNymName(String nymId) { if (!Util.isValidString(nymId)) return Text.NAME_UNKNOWN.toString(); String nymName = OTAPI.GetNym.name(nymId); if (Util.isValidString(nymName)) return nymName; return Text.NAME_UNKNOWN.toString(); } public static String getAssetName(String assetId) { if (!Util.isValidString(assetId)) return Text.NAME_UNKNOWN.toString(); String assetName = OTAPI.GetAsset.name(assetId); if (Util.isValidString(assetName)) return assetName; return Text.NAME_UNKNOWN.toString(); } public static String getAccountName(String accountId) { if (!Util.isValidString(accountId)) return Text.NAME_UNKNOWN.toString(); if (ApplicationProperties.get().getBoolean("account.standardNaming")) return getAccountStandardName(accountId); return OTAPI.GetAccount.name(accountId); } public static String getAccountType(String accountId) { if (!Util.isValidString(accountId)) return null; return OTAPI.GetAccount.type(accountId); } public static String getAccountStandardName(String accountId) { String accountType = getAccountType(accountId); if (accountType.equals(AccountType.ISSUER.getValue())) { return String.format("%s%s's %s", Text.ISSUER_SIGN, getNymName(getAccountNymId(accountId)), getAssetName(getAccountAssetId(accountId))); } return String.format("%s's %s", getNymName(getAccountNymId(accountId)), getAssetName(getAccountAssetId(accountId))); } public static String getPurseStandardName(String nymId, String assetId) { return String.format("%s's %s", getNymName(nymId), getAssetName(assetId)); } public static InstrumentType getInstrumentType(String instrument) { return InstrumentType.parse(OTAPI.Instrument.getType(instrument)); } public static void showServer(String serverId) { print(Util.repeat("-", 13)); print(String.format("%12s: %s", "Name", getServerName(serverId))); print(String.format("%12s: %s", "Server", serverId)); print(Util.repeat("-", 13)); } public static void showNym(String nymId) { print(Util.repeat("-", 13)); print(String.format("%12s: %s", "Name", getNymName(nymId))); print(String.format("%12s: %s", "Nym", nymId)); print(Util.repeat("-", 13)); } public static void showAsset(String assetId) { print(Util.repeat("-", 13)); print(String.format("%12s: %s", "Name", getAssetName(assetId))); print(String.format("%12s: %s", "Asset", assetId)); print(Util.repeat("-", 13)); } public static void showAccount(String accountId) { print(Util.repeat("-", 13)); print(String.format("%12s: %s", "Name", getAccountName(accountId))); print(String.format("%12s: %s", "Account", accountId)); print(Util.repeat("-", 13)); } public static void showLedger(String accountId) throws OTException { String serverId = getAccountServerId(accountId); String nymId = getAccountNymId(accountId); String assetId = getAccountAssetId(accountId); String purse = OTAPI.loadPurse(serverId, nymId, assetId); Integer purseBalanceValue = new Integer(0); if (Util.isValidString(purse)) purseBalanceValue = getPurseBalanceValue(serverId, assetId, purse); print(Util.repeat("-", 13)); print(String.format("%12s: %s", "Name", getAccountName(accountId))); if (purseBalanceValue > 0) print(String.format("%12s: %s (+ %s)", "Balance", getAccountBalanceFormatted(accountId), getPurseBalanceFormatted(serverId, assetId, purse))); else print(String.format("%12s: %s", "Balance", getAccountBalanceFormatted(accountId))); print(String.format("%12s: %s", "Account", accountId)); print(String.format("%12s: %s (%s)", "Asset", assetId, getAssetName(assetId))); print(String.format("%12s: %s (%s)", "Nym", nymId, getNymName(nymId))); print(String.format("%12s: %s (%s)", "Server", serverId, getServerName(serverId))); print(Util.repeat("-", 13)); } /********************************************************************** * internal *********************************************************************/ private static String getWalletFileName(String walletId) { return String.format("%s/%s.%s", ApplicationProperties.getUserDataPath(), walletId, Extension.DEFINITION.getValue()); } private static void loadWallet(String walletId) throws OTException { String fileName = String.format("%s.%s", walletId, Extension.DEFINITION.getValue()); if (!OTAPI.setWallet(fileName)) error("Failed to set wallet"); if (!OTAPI.loadWallet()) error("Failed to load wallet"); } private static void applyVerbose() { if (verbose) { verboseServer = false; verboseClientLog = true; verboseClientSkip = true; verboseClientWarn = true; verboseClientSuccess = true; } else { verboseServer = ApplicationProperties.get().getBoolean("verbose.server"); verboseClientLog = ApplicationProperties.get().getBoolean( "verbose.client.log"); verboseClientSkip = ApplicationProperties.get().getBoolean( "verbose.client.skip"); verboseClientWarn = ApplicationProperties.get().getBoolean( "verbose.client.warn"); verboseClientSuccess = ApplicationProperties.get().getBoolean( "verbose.client.success"); } } private static void showServers() { print(String.format("%12s:", "SERVERS")); int count = OTAPI.getServerCount(); int i = 0; for (int index = 0; index < count; index++) { String serverId = OTAPI.GetServer.id(index); String serverName = getServerName(serverId); if (i == 0) print(Util.repeat("-", 13)); print(String.format("%12d: %s (%s)", ++i, serverId, serverName)); } if (i > 0) print(Util.repeat("-", 13)); } private static void showNyms() { print(String.format("%12s:", "NYMS")); int count = OTAPI.getNymCount(); int i = 0; for (int index = 0; index < count; index++) { String nymId = OTAPI.GetNym.id(index); String nymName = getNymName(nymId); if (i == 0) print(Util.repeat("-", 13)); print(String.format("%12d: %s (%s)", ++i, nymId, nymName)); } if (i > 0) print(Util.repeat("-", 13)); } private static void showAssets() { print(String.format("%12s:", "ASSETS")); int count = OTAPI.getAssetCount(); int i = 0; for (int index = 0; index < count; index++) { String assetId = OTAPI.GetAsset.id(index); String assetName = getAssetName(assetId); if (i == 0) print(Util.repeat("-", 13)); print(String.format("%12d: %s (%s)", ++i, assetId, assetName)); } if (i > 0) print(Util.repeat("-", 13)); } private static Boolean isMeantForParsing(String id) { if (id == null) return null; int min = ApplicationProperties.get().getInteger("parsing.size.min"); int len = id.length(); if (len < min) return null; int max = ApplicationProperties.get().getInteger("parsing.size.max"); return (len >= min && len <= max); } protected static String parseServerId(String serverId) throws OTException { Boolean parsing = isMeantForParsing(serverId); if (parsing == null) error(Event.PARSE_SERVER_ID_ERROR); if (parsing) { String id = OTAPI.Wallet.getServerIdFromPartial(serverId); if (!Util.isValidString(id)) error(Event.PARSE_SERVER_ID_ERROR); serverId = id; } logServerId(serverId); return serverId; } protected static String parseNymId(String nymId) throws OTException { Boolean parsing = isMeantForParsing(nymId); if (parsing == null) error(Event.PARSE_NYM_ID_ERROR); if (parsing) { String id = OTAPI.Wallet.getNymIdFromPartial(nymId); if (!Util.isValidString(id)) error(Event.PARSE_NYM_ID_ERROR); nymId = id; } logNymId(nymId); return nymId; } protected static String parseAssetId(String assetId) throws OTException { Boolean parsing = isMeantForParsing(assetId); if (parsing == null) error(Event.PARSE_ASSET_ID_ERROR); if (parsing) { String id = OTAPI.Wallet.getAssetIdFromPartial(assetId); if (!Util.isValidString(id)) error(Event.PARSE_ASSET_ID_ERROR); assetId = id; } logAssetId(assetId); return assetId; } protected static String parseAccountId(String accountId) throws OTException { Boolean parsing = isMeantForParsing(accountId); if (parsing == null) error(Event.PARSE_ACCOUNT_ID_ERROR); if (parsing) { String id = OTAPI.Wallet.getAccountIdFromPartial(accountId); if (!Util.isValidString(id)) error(Event.PARSE_ACCOUNT_ID_ERROR); accountId = id; } logAccountId(accountId); return accountId; } protected static void logServerId(String serverId) { log(String.format("%12s: %s", Text.SERVER_ID, serverId)); } protected static void logNymId(String nymId) { log(String.format("%12s: %s", Text.NYM_ID, nymId)); } protected static void logAssetId(String assetId) { log(String.format("%12s: %s", Text.ASSET_ID, assetId)); } protected static void logAccountId(String accountId) { log(String.format("%12s: %s", Text.ACCOUNT_ID, accountId)); } public static void print(Object s) { System.out.println(s); } protected static void info(String message) { print(String.format("%s: %s", Text.INFO, message)); } protected static void publish(Object s) { System.out.println(); System.out.println(); System.out.println(); System.out.println(s); System.out.println(); System.out.println(); System.out.println(); } protected static void log(String message) { if (verboseClientLog) print(String.format("%s: %s", Text.LOG, message)); } protected static void skip(String message) { if (verboseClientSkip) print(String.format("%s: %s", Text.SKIP, message)); } protected static void skip(Text text) { skip(text.toString()); } protected static void warn(String message) { if (verboseClientWarn) print(String.format("%s: %s", Text.WARN, message)); } protected static void warn(String message, int result) { if (verboseClientWarn) print(String.format("%s: %s (%d)", Text.WARN, message, result)); } protected static void warn(Text text) { warn(text.toString()); } protected static void warn(Event event) { warn(event.toString()); } protected static void warn(Text text, int result) { warn(text.toString(), result); } protected static void warn(Event event, int result) { warn(event.toString(), result); } protected static void success(String message) { if (verboseClientSuccess) print(String.format("%s: %s", Text.SUCCESS, message)); } protected static void success(String message, int result) { if (verboseClientSuccess) print(String.format("%s: %s (%d)", Text.SUCCESS, message, result)); } protected static void success(Text text) { success(text.toString()); } protected static void success(Text text, int result) { success(text.toString(), result); } protected static void attempt(String message) { log(String.format("%s..", message)); } protected static void attempt(Text text) { attempt(text.toString()); } public static void error(OTSystemException e) throws OTSystemException { throw e; } public static void error(String message) throws OTSystemException { throw new OTSystemException(message); } public static void error(String message, int result) throws OTSystemException { throw new OTSystemException(message, result); } public static void error(Event event) throws OTSystemException { throw new OTSystemException(event); } public static void error(Event event, int result) throws OTSystemException { throw new OTSystemException(event, result); } public static void error(Text text) throws OTUserException { throw new OTUserException(text); } protected static Integer getPurseBalanceValue(String serverId, String assetId, String purse) throws OTException { return convertAmountToValue(getPurseBalance(serverId, assetId, purse)); } protected static String getAccountBalance(String accountId) { return OTAPI.GetAccount.balance(accountId); } private static String getPurseBalance(String serverId, String assetId, String purse) { return OTAPI.Purse.getBalance(serverId, assetId, purse); } private static String getAccountBalanceFormatted(String accountId) { return convertAmountToFormat(getAccountAssetId(accountId), getAccountBalance(accountId)); } private static String getPurseBalanceFormatted(String serverId, String assetId, String purse) { return convertAmountToFormat(assetId, getPurseBalance(serverId, assetId, purse)); } }
/* * Copyright 2016 Danny Althoff * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.dynamicfiles.projects.gradle.plugins.javafx.tasks.workers; import com.oracle.tools.packager.Log; import com.sun.javafx.tools.packager.CreateJarParams; import com.sun.javafx.tools.packager.PackagerException; import com.sun.javafx.tools.packager.PackagerLib; import de.dynamicfiles.projects.gradle.plugins.javafx.JavaFXGradlePluginExtension; import java.io.File; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Optional; import java.util.Set; import org.gradle.api.GradleException; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.CopySpec; import org.gradle.api.specs.Specs; /** * * @author Danny Althoff */ public class JfxJarWorker extends JfxAbstractWorker { public void jfxjar(Project project) { // get our configuration JavaFXGradlePluginExtension ext = project.getExtensions().getByType(JavaFXGradlePluginExtension.class); addDeployDirToSystemClassloader(project, ext); // set logger-level Log.setLogger(new Log.Logger(ext.isVerbose())); // within maven we would get the jar-content inside some folder BEFORE it is out into JAR-file // within gradle we have to extract that folder to get all contents org.gradle.api.tasks.bundling.Jar jarTask = (org.gradle.api.tasks.bundling.Jar) project.getTasks().findByName("jar"); // within maven we would get the jar-content inside some folder BEFORE it is out into JAR-file // within gradle we have to extract that jar-file to get all contents inside a folder Path someTempDir; try{ someTempDir = Files.createTempDirectory("javafx-gradle-plugin"); } catch(IOException ex){ throw new GradleException("Couldn't create temporary folder", ex); } project.getLogger().info("Extraction of generated JAR-file ..."); project.copy((CopySpec copySpec) -> { copySpec.into(someTempDir.toFile()); if( ext.getAlternativePathToJarFile() == null ){ copySpec.from(project.zipTree(jarTask.getArchivePath())); } else { File alternativeJarFile = getAbsoluteOrProjectRelativeFile(project, ext.getAlternativePathToJarFile(), ext.isCheckForAbsolutePaths()); if( alternativeJarFile.exists() ){ copySpec.from(project.zipTree(alternativeJarFile)); } else { project.getLogger().warn("Could not find specified alternative JAR-file"); copySpec.from(project.zipTree(jarTask.getArchivePath())); } } }); project.getLogger().info("Creating parameter-map for packager..."); CreateJarParams createJarParams = new CreateJarParams(); createJarParams.setOutdir(getAbsoluteOrProjectRelativeFile(project, ext.getJfxAppOutputDir(), ext.isCheckForAbsolutePaths())); // check if we got some filename ending with ".jar" if( !ext.getJfxMainAppJarName().toLowerCase().endsWith(".jar") ){ throw new GradleException("Please provide a proper value for jfxMainAppJarName-property! It has to end with \".jar\"."); } createJarParams.setOutfile(ext.getJfxMainAppJarName()); createJarParams.setApplicationClass(ext.getMainClass()); createJarParams.setCss2bin(ext.isCss2bin()); createJarParams.setPreloader(ext.getPreLoader()); Map<String, String> manifestAttributes = ext.getManifestAttributes(); if( manifestAttributes == null ){ manifestAttributes = new HashMap<>(); } createJarParams.setManifestAttrs(manifestAttributes); final File libDir = new File(getAbsoluteOrProjectRelativeFile(project, ext.getJfxAppOutputDir(), ext.isCheckForAbsolutePaths()), ext.getLibFolderName()); if( !libDir.exists() && !libDir.mkdirs() ){ throw new GradleException("Unable to create app lib dir: " + libDir); } if( ext.isUpdateExistingJar() ){ createJarParams.addResource(null, jarTask.getArchivePath()); } else { // produced and extracted jar-file createJarParams.addResource(someTempDir.toFile(), ""); } Set<String> foundLibs = new HashSet<>(); // copy dependencies // got inspiration from: http://opensourceforgeeks.blogspot.de/2015/05/knowing-gradle-dependency-jars-download.html Configuration compileConfiguration = project.getConfigurations().getByName("compile"); if( !ext.isSkipCopyingDependencies() ){ copyModuleDependencies(compileConfiguration, "compile", project, libDir, foundLibs); copyFileDependencies(compileConfiguration, "compile", project, ext.isAddPackagerJar(), libDir, foundLibs); } else { project.getLogger().info("Skipped copying compile dependencies"); } Configuration runtimeConfiguration = project.getConfigurations().getByName("runtime"); if( !ext.isSkipCopyingDependencies() ){ copyModuleDependencies(runtimeConfiguration, "runtime", project, libDir, foundLibs); copyFileDependencies(runtimeConfiguration, "runtime", project, ext.isAddPackagerJar(), libDir, foundLibs); } else { project.getLogger().info("Skipped copying runtime dependencies"); } if( ext.isUseLibFolderContentForManifestClasspath() ){ StringBuilder scannedClasspath = new StringBuilder(); try{ Files.walkFileTree(libDir.toPath(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { scannedClasspath.append(ext.getLibFolderName().replace("\\", "/")).append("/").append(libDir.toPath().relativize(file).toString().replace("\\", "/")).append(" "); return super.visitFile(file, attrs); } }); } catch(IOException ioex){ project.getLogger().warn("Got problem while scanning lib-folder", ioex); } createJarParams.setClasspath(scannedClasspath.toString()); } else { if( !foundLibs.isEmpty() ){ createJarParams.setClasspath(ext.getLibFolderName() + "/" + String.join(" " + ext.getLibFolderName() + "/", foundLibs)); } } Optional.ofNullable(ext.getFixedManifestClasspath()).ifPresent(manifestClasspath -> { if( manifestClasspath.trim().isEmpty() ){ return; } createJarParams.setClasspath(manifestClasspath); if( ext.isUseLibFolderContentForManifestClasspath() ){ project.getLogger().warn("You specified to use the content of the lib-folder AND specified a fixed classpath. The fixed classpath will get taken."); } }); // https://docs.oracle.com/javase/8/docs/technotes/guides/deploy/manifest.html#JSDPG896 if( ext.isAllPermissions() ){ manifestAttributes.put("Permissions", "all-permissions"); } PackagerLib packagerLib = new PackagerLib(); try{ project.getLogger().info("Running packager..."); packagerLib.packageAsJar(createJarParams); } catch(PackagerException ex){ throw new GradleException("Unable to build JFX JAR for application", ex); } if( ext.isCopyAdditionalAppResourcesToJar() ){ Optional.ofNullable(ext.getAdditionalAppResources()) .filter(appRessourcesString -> appRessourcesString != null) .map(appRessourcesString -> getAbsoluteOrProjectRelativeFile(project, appRessourcesString, ext.isCheckForAbsolutePaths())) .filter(File::exists) .ifPresent(appResources -> { project.getLogger().info("Copying additional app ressources..."); try{ Path targetFolder = getAbsoluteOrProjectRelativeFile(project, ext.getJfxAppOutputDir(), ext.isCheckForAbsolutePaths()).toPath(); Path sourceFolder = appResources.toPath(); copyRecursive(sourceFolder, targetFolder, project.getLogger()); } catch(IOException e){ project.getLogger().warn("Couldn't copy additional application resource-file(s).", e); } }); } // cleanup if( libDir.list().length == 0 ){ project.getLogger().info("Deleting unused lib-folder..."); // remove lib-folder, when nothing ended up there libDir.delete(); } // cleanup gradle-temp-folder // http://www.adam-bien.com/roller/abien/entry/java_7_deleting_recursively_a try{ Files.walkFileTree(someTempDir, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } }); } catch(IOException iOException){ // ignored } } private void copyModuleDependencies(Configuration configuration, String toPrint, Project project, final File libDir, Set<String> foundLibs) { project.getLogger().info("Copying defined " + toPrint + "-dependencies..."); // this will work for all non-file dependencies configuration.getResolvedConfiguration().getFirstLevelModuleDependencies().forEach(resolvedDep -> { // TODO add dependency-filter resolvedDep.getAllModuleArtifacts().forEach(artifact -> { try{ Path artifactPath = artifact.getFile().toPath(); String artifactFileName = artifactPath.getFileName().toString(); Files.copy(artifactPath, libDir.toPath().resolve(artifactFileName), StandardCopyOption.REPLACE_EXISTING); // will only append, when everything went right foundLibs.add(artifactFileName); } catch(IOException ex){ project.getLogger().warn("Couldn't copy dependency " + artifact.getId().getComponentIdentifier().toString(), ex); } }); }); } private void copyFileDependencies(Configuration configuration, String toPrint, Project project, boolean isPackagerJarToBeAdded, final File libDir, Set<String> foundLibs) { project.getLogger().info("Copying defined " + toPrint + "-dependency-files..."); // inside "getFiles" all non-maven dependencies (like packager.jar) will be available configuration.getResolvedConfiguration().getFiles(Specs.SATISFIES_ALL).forEach(someFile -> { try{ Path artifactPath = someFile.toPath(); String artifactFileName = artifactPath.getFileName().toString(); if( "packager.jar".equals(artifactFileName) && !isPackagerJarToBeAdded ){ project.getLogger().info("Skipped adding packager.jar."); return; } // add this lib only, when not already present (could happen on file-dependencies ... which behaves different from maven-model) if( !foundLibs.contains(artifactFileName) ){ Files.copy(artifactPath, libDir.toPath().resolve(artifactFileName), StandardCopyOption.REPLACE_EXISTING); foundLibs.add(artifactFileName); } } catch(IOException ex){ project.getLogger().warn("Couldn't copy dependency " + someFile.getName(), ex); } }); } }
package com.jakewharton.u2020.ui.trending; import android.annotation.TargetApi; import android.content.Context; import android.graphics.drawable.AnimationDrawable; import android.os.Build; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.util.AttributeSet; import android.view.ContextThemeWrapper; import android.widget.LinearLayout; import android.widget.Spinner; import android.widget.TextView; import com.jakewharton.u2020.R; import com.jakewharton.u2020.data.Funcs; import com.jakewharton.u2020.data.Injector; import com.jakewharton.u2020.data.IntentFactory; import com.jakewharton.u2020.data.api.GithubService; import com.jakewharton.u2020.data.api.Order; import com.jakewharton.u2020.data.api.Results; import com.jakewharton.u2020.data.api.SearchQuery; import com.jakewharton.u2020.data.api.Sort; import com.jakewharton.u2020.data.api.model.RepositoriesResponse; import com.jakewharton.u2020.data.api.model.Repository; import com.jakewharton.u2020.data.api.transforms.SearchResultToRepositoryList; import com.jakewharton.u2020.ui.misc.BetterViewAnimator; import com.jakewharton.u2020.ui.misc.DividerItemDecoration; import com.jakewharton.u2020.ui.misc.EnumAdapter; import com.jakewharton.u2020.util.Intents; import com.squareup.picasso.Picasso; import javax.inject.Inject; import butterknife.Bind; import butterknife.BindDimen; import butterknife.ButterKnife; import butterknife.OnItemSelected; import retrofit.Response; import retrofit.Result; import rx.Observable; import rx.android.schedulers.AndroidSchedulers; import rx.functions.Action1; import rx.functions.Func1; import rx.subjects.PublishSubject; import rx.subscriptions.CompositeSubscription; import timber.log.Timber; import static com.jakewharton.u2020.ui.misc.DividerItemDecoration.VERTICAL_LIST; public final class TrendingView extends LinearLayout implements SwipeRefreshLayout.OnRefreshListener, TrendingAdapter.RepositoryClickListener { @Bind(R.id.trending_toolbar) Toolbar toolbarView; @Bind(R.id.trending_timespan) Spinner timespanView; @Bind(R.id.trending_animator) BetterViewAnimator animatorView; @Bind(R.id.trending_swipe_refresh) SwipeRefreshLayout swipeRefreshView; @Bind(R.id.trending_list) RecyclerView trendingView; @Bind(R.id.trending_loading_message) TextView loadingMessageView; @BindDimen(R.dimen.trending_divider_padding_start) float dividerPaddingStart; @Inject GithubService githubService; @Inject Picasso picasso; @Inject IntentFactory intentFactory; @Inject DrawerLayout drawerLayout; private final PublishSubject<TrendingTimespan> timespanSubject; private final EnumAdapter<TrendingTimespan> timespanAdapter; private final TrendingAdapter trendingAdapter; private final CompositeSubscription subscriptions = new CompositeSubscription(); public TrendingView(Context context, AttributeSet attrs) { super(context, attrs); if (!isInEditMode()) { Injector.obtain(context).inject(this); } timespanSubject = PublishSubject.create(); timespanAdapter = new TrendingTimespanAdapter( new ContextThemeWrapper(getContext(), R.style.Theme_U2020_TrendingTimespan)); trendingAdapter = new TrendingAdapter(picasso, this); } @Override protected void onFinishInflate() { super.onFinishInflate(); ButterKnife.bind(this); AnimationDrawable ellipsis = (AnimationDrawable) getResources().getDrawable(R.drawable.dancing_ellipsis); loadingMessageView.setCompoundDrawablesWithIntrinsicBounds(null, null, ellipsis, null); ellipsis.start(); toolbarView.setNavigationIcon(R.drawable.menu_icon); toolbarView.setNavigationOnClickListener(v -> drawerLayout.openDrawer(GravityCompat.START)); timespanView.setAdapter(timespanAdapter); timespanView.setSelection(TrendingTimespan.WEEK.ordinal()); swipeRefreshView.setColorSchemeResources(R.color.accent); swipeRefreshView.setOnRefreshListener(this); trendingAdapter.registerAdapterDataObserver(new RecyclerView.AdapterDataObserver() { @Override public void onChanged() { animatorView.setDisplayedChildId(trendingAdapter.getItemCount() == 0 // ? R.id.trending_empty // : R.id.trending_swipe_refresh); swipeRefreshView.setRefreshing(false); } }); trendingView.setLayoutManager(new LinearLayoutManager(getContext())); trendingView.addItemDecoration( new DividerItemDecoration(getContext(), VERTICAL_LIST, dividerPaddingStart, safeIsRtl())); trendingView.setAdapter(trendingAdapter); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); Observable<Result<RepositoriesResponse>> result = timespanSubject // .flatMap(trendingSearch) // .observeOn(AndroidSchedulers.mainThread()) // .share(); subscriptions.add(result // .filter(Results.isSuccess()) // .map(SearchResultToRepositoryList.instance()) // .subscribe(trendingAdapter)); subscriptions.add(result // .filter(Funcs.not(Results.isSuccess())) // .subscribe(trendingError)); // Load the default selection. onRefresh(); } private final Func1<TrendingTimespan, Observable<Result<RepositoriesResponse>>> trendingSearch = new Func1<TrendingTimespan, Observable<Result<RepositoriesResponse>>>() { @Override public Observable<Result<RepositoriesResponse>> call(TrendingTimespan trendingTimespan) { SearchQuery trendingQuery = new SearchQuery.Builder() // .createdSince(trendingTimespan.createdSince()) // .build(); return githubService.repositories(trendingQuery, Sort.STARS, Order.DESC); } }; private final Action1<Result<RepositoriesResponse>> trendingError = new Action1<Result<RepositoriesResponse>>() { @Override public void call(Result<RepositoriesResponse> result) { if (result.isError()) { Timber.e(result.error(), "Failed to get trending repositories"); } else { Response<RepositoriesResponse> response = result.response(); Timber.e("Failed to get trending repositories. Server returned " + response.code()); } swipeRefreshView.setRefreshing(false); animatorView.setDisplayedChildId(R.id.trending_error); } }; @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); subscriptions.unsubscribe(); } @OnItemSelected(R.id.trending_timespan) void timespanSelected(final int position) { if (animatorView.getDisplayedChildId() != R.id.trending_swipe_refresh) { animatorView.setDisplayedChildId(R.id.trending_loading); } // For whatever reason, the SRL's spinner does not draw itself when we call setRefreshing(true) // unless it is posted. post(() -> { swipeRefreshView.setRefreshing(true); timespanSubject.onNext(timespanAdapter.getItem(position)); }); } @Override public void onRefresh() { timespanSelected(timespanView.getSelectedItemPosition()); } @Override public void onRepositoryClick(Repository repository) { Intents.maybeStartActivity(getContext(), intentFactory.createUrlIntent(repository.html_url)); } private boolean safeIsRtl() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && isRtl(); } @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1) private boolean isRtl() { return getLayoutDirection() == LAYOUT_DIRECTION_RTL; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.impl; import java.io.File; import java.lang.reflect.Constructor; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.vfs2.CacheStrategy; import org.apache.commons.vfs2.Capability; import org.apache.commons.vfs2.FileContentInfoFactory; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemManager; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.FileType; import org.apache.commons.vfs2.FilesCache; import org.apache.commons.vfs2.NameScope; import org.apache.commons.vfs2.VFS; import org.apache.commons.vfs2.cache.SoftRefFilesCache; import org.apache.commons.vfs2.operations.FileOperationProvider; import org.apache.commons.vfs2.provider.AbstractFileName; import org.apache.commons.vfs2.provider.AbstractFileProvider; import org.apache.commons.vfs2.provider.DefaultURLStreamHandler; import org.apache.commons.vfs2.provider.FileProvider; import org.apache.commons.vfs2.provider.FileReplicator; import org.apache.commons.vfs2.provider.LocalFileProvider; import org.apache.commons.vfs2.provider.TemporaryFileStore; import org.apache.commons.vfs2.provider.UriParser; import org.apache.commons.vfs2.provider.VfsComponent; /** * A default file system manager implementation. * * @author <a href="http://commons.apache.org/vfs/team-list.html">Commons VFS team</a> * 2006) $ */ public class DefaultFileSystemManager implements FileSystemManager { /** * The provider for local files. */ private LocalFileProvider localFileProvider; /** * The default provider. */ private FileProvider defaultProvider; /** * The file replicator to use. */ private FileReplicator fileReplicator; /** * Mapping from URI scheme to FileProvider. */ private final Map<String, FileProvider> providers = new HashMap<String, FileProvider>(); /** * All components used by this manager. */ private final ArrayList<Object> components = new ArrayList<Object>(); /** * The base file to use for relative URI. */ private FileObject baseFile; /** * The files cache */ private FilesCache filesCache; /** * The cache strategy */ private CacheStrategy fileCacheStrategy; /** * Class which decorates all returned fileObjects */ private Class<?> fileObjectDecorator; private Constructor<?> fileObjectDecoratorConst; /** * The class to use to determine the content-type (mime-type) */ private FileContentInfoFactory fileContentInfoFactory; /** * The logger to use. */ private Log log = LogFactory.getLog(getClass()); /** * The context to pass to providers. */ private final DefaultVfsComponentContext context = new DefaultVfsComponentContext( this); private TemporaryFileStore tempFileStore; private final FileTypeMap map = new FileTypeMap(); private final VirtualFileProvider vfsProvider = new VirtualFileProvider(); private boolean init; private final Map<String, List<FileOperationProvider>> operationProviders = new HashMap<String, List<FileOperationProvider>>(); /** * Returns the logger used by this manager. * @return the Logger. */ protected Log getLogger() { return log; } /** * Registers a file system provider. The manager takes care of all lifecycle * management. A provider may be registered multiple times. * * @param urlScheme * The scheme the provider will handle. * @param provider * The provider. * @throws FileSystemException if an error occurs adding the provider. */ public void addProvider(final String urlScheme, final FileProvider provider) throws FileSystemException { addProvider(new String[] {urlScheme}, provider); } /** * Registers a file system provider. The manager takes care of all lifecycle * management. A provider may be registered multiple times. * * @param urlSchemes * The schemes the provider will handle. * @param provider * The provider. * @throws FileSystemException if an error occurs adding the provider. */ public void addProvider(final String[] urlSchemes, final FileProvider provider) throws FileSystemException { // Warn about duplicate providers for (int i = 0; i < urlSchemes.length; i++) { final String scheme = urlSchemes[i]; if (providers.containsKey(scheme)) { throw new FileSystemException( "vfs.impl/multiple-providers-for-scheme.error", scheme); } } // Contextualise the component (if not already) setupComponent(provider); // Add to map for (int i = 0; i < urlSchemes.length; i++) { final String scheme = urlSchemes[i]; providers.put(scheme, provider); } if (provider instanceof LocalFileProvider && localFileProvider == null) { localFileProvider = (LocalFileProvider) provider; } } /** * Returns true if this manager has a provider for a particular scheme. * @param scheme The scheme to check. * @return true if a provider is configured for this scheme, false otherwise. */ public boolean hasProvider(final String scheme) { return providers.containsKey(scheme); } /** * Adds an filename extension mapping. * * @param extension The file name extension. * @param scheme The scheme to use for files with this extension. */ public void addExtensionMap(final String extension, final String scheme) { map.addExtension(extension, scheme); } /** * Adds a mime type mapping. * * @param mimeType The mime type. * @param scheme The scheme to use for files with this mime type. */ public void addMimeTypeMap(final String mimeType, final String scheme) { map.addMimeType(mimeType, scheme); } /** * Sets the default provider. This is the provider that will handle URI with * unknown schemes. The manager takes care of all lifecycle management. * @param provider The FileProvider. * @throws FileSystemException if an error occurs setting the provider. */ public void setDefaultProvider(final FileProvider provider) throws FileSystemException { setupComponent(provider); defaultProvider = provider; } /** * Returns the filesCache implementation used to cache files. * @return The FilesCache. */ public FilesCache getFilesCache() { return filesCache; } /** * Sets the filesCache implementation used to cache files. * @param filesCache The FilesCache. * @throws FileSystemException if an error occurs setting the cache.. */ public void setFilesCache(final FilesCache filesCache) throws FileSystemException { if (init) { throw new FileSystemException("vfs.impl/already-inited.error"); } this.filesCache = filesCache; } /** * <p> * Set the cache strategy to use when dealing with file object data. You can * set it only once before the FileSystemManager is initialized. * <p /> * <p> * The default is {@link CacheStrategy#ON_RESOLVE} * </p> * * @param fileCacheStrategy The CacheStrategy to use. * @throws FileSystemException * if this is not possible. e.g. it is already set. */ public void setCacheStrategy(final CacheStrategy fileCacheStrategy) throws FileSystemException { if (init) { throw new FileSystemException("vfs.impl/already-inited.error"); } this.fileCacheStrategy = fileCacheStrategy; } /** * Get the cache strategy used. * @return The CacheStrategy. */ public CacheStrategy getCacheStrategy() { return fileCacheStrategy; } /** * Get the file object decorator used. * @return The decorator. */ public Class<?> getFileObjectDecorator() { return fileObjectDecorator; } /** * The constructor associated to the fileObjectDecorator. * We cache it here for performance reasons. * @return The decorator's Constructor. */ public Constructor<?> getFileObjectDecoratorConst() { return fileObjectDecoratorConst; } /** * Set a fileObject decorator to be used for ALL returned file objects. * * @param fileObjectDecorator must be inherted from {@link DecoratedFileObject} a has to provide a * constructor with a single {@link FileObject} as argument * @throws FileSystemException if an error occurs setting the decorator. */ public void setFileObjectDecorator(Class<?> fileObjectDecorator) throws FileSystemException { if (init) { throw new FileSystemException("vfs.impl/already-inited.error"); } if (!DecoratedFileObject.class.isAssignableFrom(fileObjectDecorator)) { throw new FileSystemException("vfs.impl/invalid-decorator.error", fileObjectDecorator.getName()); } try { fileObjectDecoratorConst = fileObjectDecorator.getConstructor(new Class[]{FileObject.class}); } catch (NoSuchMethodException e) { throw new FileSystemException("vfs.impl/invalid-decorator.error", fileObjectDecorator.getName(), e); } this.fileObjectDecorator = fileObjectDecorator; } /** * get the fileContentInfoFactory used to determine the infos of a file * content. * @return The FileContentInfoFactory. */ public FileContentInfoFactory getFileContentInfoFactory() { return fileContentInfoFactory; } /** * set the fileContentInfoFactory used to determine the infos of a file * content. * @param fileContentInfoFactory The FileContentInfoFactory. * @throws FileSystemException if an error occurs setting the FileContentInfoFactory. */ public void setFileContentInfoFactory(FileContentInfoFactory fileContentInfoFactory) throws FileSystemException { if (init) { throw new FileSystemException("vfs.impl/already-inited.error"); } this.fileContentInfoFactory = fileContentInfoFactory; } /** * Sets the file replicator to use. The manager takes care of all lifecycle * management. * @param replicator The FileReplicator. * @throws FileSystemException if an error occurs setting the replicator. */ public void setReplicator(final FileReplicator replicator) throws FileSystemException { setupComponent(replicator); fileReplicator = replicator; } /** * Sets the temporary file store to use. The manager takes care of all * lifecycle management. * @param tempFileStore The temporary FileStore. * @throws FileSystemException if an error occurs adding the file store. */ public void setTemporaryFileStore(final TemporaryFileStore tempFileStore) throws FileSystemException { setupComponent(tempFileStore); this.tempFileStore = tempFileStore; } /** * Sets the logger to use. * @param log The Logger to use. */ public void setLogger(final Log log) { this.log = log; } /** * Initialises a component, if it has not already been initialised. * @param component The component to setup. * @throws FileSystemException if an error occurs. */ private void setupComponent(final Object component) throws FileSystemException { if (!components.contains(component)) { if (component instanceof VfsComponent) { final VfsComponent vfsComponent = (VfsComponent) component; vfsComponent.setLogger(getLogger()); vfsComponent.setContext(context); vfsComponent.init(); } components.add(component); } } /** * Closes a component, if it has not already been closed. * @param component The component to close. */ private void closeComponent(final Object component) { if (component != null && components.contains(component)) { if (component instanceof VfsComponent) { final VfsComponent vfsComponent = (VfsComponent) component; vfsComponent.close(); } components.remove(component); } } /** * Returns the file replicator. * * @return The file replicator. Never returns null. * @throws FileSystemException if there is no FileReplicator. */ public FileReplicator getReplicator() throws FileSystemException { if (fileReplicator == null) { throw new FileSystemException("vfs.impl/no-replicator.error"); } return fileReplicator; } /** * Returns the temporary file store. * * @return The file store. Never returns null. * @throws FileSystemException if there is no TemporaryFileStore. */ public TemporaryFileStore getTemporaryFileStore() throws FileSystemException { if (tempFileStore == null) { throw new FileSystemException("vfs.impl/no-temp-file-store.error"); } return tempFileStore; } /** * Initialises this manager. * @throws FileSystemException if an error occurs during initialization. */ public void init() throws FileSystemException { if (filesCache == null) { // filesCache = new DefaultFilesCache(); filesCache = new SoftRefFilesCache(); } if (fileContentInfoFactory == null) { fileContentInfoFactory = new FileContentInfoFilenameFactory(); } if (fileCacheStrategy == null) { fileCacheStrategy = CacheStrategy.ON_RESOLVE; } setupComponent(filesCache); setupComponent(vfsProvider); init = true; } /** * Closes all files created by this manager, and cleans up any temporary * files. Also closes all providers and the replicator. */ public void close() { if (!init) { return; } // Close the providers. for (Iterator<?> iterator = providers.values().iterator(); iterator .hasNext();) { final Object provider = iterator.next(); closeComponent(provider); } // Close the other components closeComponent(defaultProvider); closeComponent(fileReplicator); closeComponent(tempFileStore); components.clear(); providers.clear(); filesCache.close(); localFileProvider = null; defaultProvider = null; fileReplicator = null; tempFileStore = null; init = false; } /** * Free all resources used by unused filesystems created by this manager. */ public void freeUnusedResources() { if (!init) { return; } // Close the providers. for (Iterator<FileProvider> iterator = providers.values().iterator(); iterator .hasNext();) { final AbstractFileProvider provider = (AbstractFileProvider) iterator .next(); provider.freeUnusedResources(); } } /** * Sets the base file to use when resolving relative URI. * @param baseFile The new base FileObject. * @throws FileSystemException if an error occurs. */ public void setBaseFile(final FileObject baseFile) throws FileSystemException { this.baseFile = baseFile; } /** * Sets the base file to use when resolving relative URI. * @param baseFile The new base FileObject. * @throws FileSystemException if an error occurs. */ public void setBaseFile(final File baseFile) throws FileSystemException { this.baseFile = getLocalFileProvider().findLocalFile(baseFile); } /** * Returns the base file used to resolve relative URI. * @return The FileObject that represents the base file. * @throws FileSystemException if an error occurs. */ public FileObject getBaseFile() throws FileSystemException { return baseFile; } /** * Locates a file by URI. * @param uri The URI of the file to locate. * @return The FileObject for the located file. * @throws FileSystemException if the file cannot be located or an error occurs. */ public FileObject resolveFile(final String uri) throws FileSystemException { // return resolveFile(baseFile, uri); return resolveFile(getBaseFile(), uri); } /** * Locate a file by URI, use the FileSystemOptions for file-system creation. * @param uri The URI of the file to locate. * @param fileSystemOptions The options for the FileSystem. * @return The FileObject for the located file. * @throws FileSystemException if the file cannot be located or an error occurs. */ public FileObject resolveFile(final String uri, final FileSystemOptions fileSystemOptions) throws FileSystemException { // return resolveFile(baseFile, uri, fileSystemOptions); return resolveFile(getBaseFile(), uri, fileSystemOptions); } /** * Locates a file by URI. * @param baseFile The base File to use to locate the file. * @param uri The URI of the file to locate. * @return The FileObject for the located file. * @throws FileSystemException if the file cannot be located or an error occurs. */ public FileObject resolveFile(final File baseFile, final String uri) throws FileSystemException { final FileObject baseFileObj = getLocalFileProvider().findLocalFile( baseFile); return resolveFile(baseFileObj, uri); } /** * Resolves a URI, relative to a base file. * @param baseFile The base FileOjbect to use to locate the file. * @param uri The URI of the file to locate. * @return The FileObject for the located file. * @throws FileSystemException if the file cannot be located or an error occurs. */ public FileObject resolveFile(final FileObject baseFile, final String uri) throws FileSystemException { return resolveFile(baseFile, uri, baseFile == null ? null : baseFile .getFileSystem().getFileSystemOptions()); } /** * Resolves a URI, realtive to a base file with specified FileSystem * configuration. * @param baseFile The base file. * @param uri The file name. May be a fully qualified or relative path or a url. * @param fileSystemOptions Options to pass to the file system. * @return A FileObject representing the target file. * @throws FileSystemException if an error occurs accessing the file. */ public FileObject resolveFile(final FileObject baseFile, final String uri, final FileSystemOptions fileSystemOptions) throws FileSystemException { final FileObject realBaseFile; if (baseFile != null && VFS.isUriStyle() && baseFile.getName().getType() == FileType.FILE) { realBaseFile = baseFile.getParent(); } else { realBaseFile = baseFile; } // TODO: use resolveName and use this name to resolve the fileObject UriParser.checkUriEncoding(uri); if (uri == null) { throw new IllegalArgumentException(); } // Extract the scheme final String scheme = UriParser.extractScheme(uri); if (scheme != null) { // An absolute URI - locate the provider final FileProvider provider = providers.get(scheme); if (provider != null) { return provider.findFile(realBaseFile, uri, fileSystemOptions); } // Otherwise, assume a local file } // Handle absolute file names if (localFileProvider != null && localFileProvider.isAbsoluteLocalName(uri)) { return localFileProvider.findLocalFile(uri); } if (scheme != null) { // An unknown scheme - hand it to the default provider if (defaultProvider == null) { throw new FileSystemException("vfs.impl/unknown-scheme.error", new Object[] {scheme, uri}); } return defaultProvider.findFile(realBaseFile, uri, fileSystemOptions); } // Assume a relative name - use the supplied base file if (realBaseFile == null) { throw new FileSystemException("vfs.impl/find-rel-file.error", uri); } return realBaseFile.resolveFile(uri); } /** * Resolves a name, relative to the file. If the supplied name is an * absolute path, then it is resolved relative to the root of the file * system that the file belongs to. If a relative name is supplied, then it * is resolved relative to this file name. * @param root The base FileName. * @param path The path to the file relative to the base FileName or an absolute path. * @return The constructed FileName. * @throws FileSystemException if an error occurs constructing the FileName. */ public FileName resolveName(final FileName root, final String path) throws FileSystemException { return resolveName(root, path, NameScope.FILE_SYSTEM); } /** * Resolves a name, relative to the root. * * @param base the base filename * @param name the name * @param scope the {@link NameScope} * @return The FileName of the file. * @throws FileSystemException if an error occurs. */ public FileName resolveName(final FileName base, final String name, final NameScope scope) throws FileSystemException { final FileName realBase; if (base != null && VFS.isUriStyle() && base.getType() == FileType.FILE) { realBase = base.getParent(); } else { realBase = base; } final StringBuilder buffer = new StringBuilder(name); // Adjust separators UriParser.fixSeparators(buffer); String scheme = UriParser.extractScheme(buffer.toString()); if(realBase!=null) scheme = null; // Determine whether to prepend the base path if (name.length() == 0 || (scheme == null && buffer.charAt(0) != FileName.SEPARATOR_CHAR)) { // Supplied path is not absolute if (!VFS.isUriStyle()) { // when using uris the parent already do have the trailing "/" buffer.insert(0, FileName.SEPARATOR_CHAR); } buffer.insert(0, realBase.getPath()); } // // UriParser.canonicalizePath(buffer, 0, name.length()); // Normalise the path FileType fileType = UriParser.normalisePath(buffer); // Check the name is ok final String resolvedPath = buffer.toString(); if (!AbstractFileName .checkName(realBase.getPath(), resolvedPath, scope)) { throw new FileSystemException( "vfs.provider/invalid-descendent-name.error", name); } String fullPath; if (scheme != null) { fullPath = resolvedPath; } else { scheme = realBase.getScheme(); fullPath = realBase.getRootURI() + resolvedPath; } final FileProvider provider = providers.get(scheme); if (provider != null) { // todo: extend the filename parser to be able to parse // only a pathname and take the missing informations from // the base. Then we can get rid of the string operation. // // String fullPath = base.getRootURI() + // resolvedPath.substring(1); return provider.parseUri(realBase, fullPath); } if (scheme != null) { // An unknown scheme - hand it to the default provider - if possible if (defaultProvider != null) { return defaultProvider.parseUri(realBase, fullPath); } } // todo: avoid fallback to this point // this happens if we have a virtual filesystem (no provider for scheme) return ((AbstractFileName) realBase).createName(resolvedPath, fileType); } /** * Resolve the uri to a filename. * @param uri The URI to resolve. * @return The FileName of the file. * @throws FileSystemException if an error occurs. */ public FileName resolveURI(String uri) throws FileSystemException { UriParser.checkUriEncoding(uri); if (uri == null) { throw new IllegalArgumentException(); } // Extract the scheme final String scheme = UriParser.extractScheme(uri); if (scheme != null) { // An absolute URI - locate the provider final FileProvider provider = providers.get(scheme); if (provider != null) { return provider.parseUri(null, uri); } // Otherwise, assume a local file } // Handle absolute file names if (localFileProvider != null && localFileProvider.isAbsoluteLocalName(uri)) { return localFileProvider.parseUri(null, uri); } if (scheme != null) { // An unknown scheme - hand it to the default provider if (defaultProvider == null) { throw new FileSystemException("vfs.impl/unknown-scheme.error", new Object[] {scheme, uri}); } return defaultProvider.parseUri(null, uri); } // Assume a relative name - use the supplied base file if (baseFile == null) { throw new FileSystemException("vfs.impl/find-rel-file.error", uri); } return resolveName(baseFile.getName(), uri, NameScope.FILE_SYSTEM); } /** * Converts a local file into a {@link FileObject}. * @param file The input File. * @return the create FileObject * @throws FileSystemException if an error occurs creating the file naem. */ public FileObject toFileObject(final File file) throws FileSystemException { return getLocalFileProvider().findLocalFile(file); } /** * Creates a layered file system. * @param scheme The scheme to use. * @param file The FileObject. * @return The layered FileObject. * @throws FileSystemException if an error occurs. */ public FileObject createFileSystem(final String scheme, final FileObject file) throws FileSystemException { final FileProvider provider = providers.get(scheme); if (provider == null) { throw new FileSystemException("vfs.impl/unknown-provider.error", new Object[] {scheme, file}); } return provider.createFileSystem(scheme, file, file.getFileSystem().getFileSystemOptions()); } /** * Creates a layered file system. * @param file The FileObject to use. * @return The layered FileObject. * @throws FileSystemException if an error occurs. */ public FileObject createFileSystem(final FileObject file) throws FileSystemException { final String scheme = map.getScheme(file); if (scheme == null) { throw new FileSystemException( "vfs.impl/no-provider-for-file.error", file); } return createFileSystem(scheme, file); } /** * Determines if a layered file system can be created for a given file. * * @param file The file to check for. * @return true if the FileSystem can be created. * @throws FileSystemException if an error occurs. */ public boolean canCreateFileSystem(final FileObject file) throws FileSystemException { return map.getScheme(file) != null; } /** * Creates a virtual file system. * @param rootFile The FileObject to use. * @return The FileObject in the VirtualFileSystem. * @throws FileSystemException if an error occurs creating the file. */ public FileObject createVirtualFileSystem(final FileObject rootFile) throws FileSystemException { return vfsProvider.createFileSystem(rootFile); } /** * Creates an empty virtual file system. * @param rootUri The URI to use as the root of the FileSystem. * @return A FileObject in the virtual FileSystem. * @throws FileSystemException if an error occurs. */ public FileObject createVirtualFileSystem(final String rootUri) throws FileSystemException { return vfsProvider.createFileSystem(rootUri); } /** * Locates the local file provider. * @return The LocalFileProvider. * @throws FileSystemException if an error occurs. */ private LocalFileProvider getLocalFileProvider() throws FileSystemException { if (localFileProvider == null) { throw new FileSystemException( "vfs.impl/no-local-file-provider.error"); } return localFileProvider; } /** * Get the URLStreamHandlerFactory. * @return The URLStreamHandlerFactory. */ public URLStreamHandlerFactory getURLStreamHandlerFactory() { return new VfsStreamHandlerFactory(); } /** * Closes the given filesystem.<br /> * If you use VFS as singleton it is VERY dangerous to call this method * @param filesystem The FileSystem to close. */ public void closeFileSystem(FileSystem filesystem) { // inform the cache ... getFilesCache().clear(filesystem); // just in case the cache didnt call _closeFileSystem _closeFileSystem(filesystem); } /** * Closes the given filesystem.<br /> * If you use VFS as singleton it is VERY dangerous to call this method * @param filesystem The FileSystem to close. */ public void _closeFileSystem(FileSystem filesystem) { FileProvider provider = providers.get(filesystem.getRootName().getScheme()); if (provider != null) { ((AbstractFileProvider) provider).closeFileSystem(filesystem); } } /** * This is an internal class because it needs access to the private member * providers. */ final class VfsStreamHandlerFactory implements URLStreamHandlerFactory { public URLStreamHandler createURLStreamHandler(final String protocol) { FileProvider provider = providers.get(protocol); if (provider != null) { return new DefaultURLStreamHandler(context); } // Route all other calls to the default URLStreamHandlerFactory return new URLStreamHandlerProxy(); } } /** * Get the schemes currently available. * @return The array of scheme names. */ public String[] getSchemes() { String[] schemes = new String[providers.size()]; providers.keySet().toArray(schemes); return schemes; } /** * Get the capabilities for a given scheme. * * @param scheme The scheme to located. * @return A Collection of capabilities. * @throws FileSystemException if the given scheme is not konwn */ public Collection<Capability> getProviderCapabilities(final String scheme) throws FileSystemException { FileProvider provider = providers.get(scheme); if (provider == null) { throw new FileSystemException("vfs.impl/unknown-scheme.error", new Object[] {scheme}); } return provider.getCapabilities(); } /** * Get the configuration builder for the given scheme. * @param scheme The scheme to locate. * @return The FileSystemConfigBuilder for the scheme. * @throws FileSystemException if the given scheme is not konwn */ public FileSystemConfigBuilder getFileSystemConfigBuilder(final String scheme) throws FileSystemException { FileProvider provider = providers.get(scheme); if (provider == null) { throw new FileSystemException("vfs.impl/unknown-scheme.error", new Object[] {scheme}); } return provider.getConfigBuilder(); } // -- OPERATIONS -- /** * Adds the specified FileOperationProvider for the specified scheme. * Several FileOperationProvider's might be registered for the same scheme. * For example, for "file" scheme we can register SvnWsOperationProvider and * CvsOperationProvider. * * @param scheme The scheme the provider should be registered for. * @param operationProvider The FileOperationProvider. * @throws FileSystemException if an error occurs adding the provider. */ public void addOperationProvider(final String scheme, final FileOperationProvider operationProvider) throws FileSystemException { addOperationProvider(new String[] {scheme}, operationProvider); } /** * @see FileSystemManager#addOperationProvider(String, * org.apache.commons.vfs2.operations.FileOperationProvider) * * @param schemes The array of schemes the provider should apply to. * @param operationProvider The FileOperationProvider. * @throws FileSystemException if an error occurs. */ public void addOperationProvider(final String[] schemes, final FileOperationProvider operationProvider) throws FileSystemException { for (int i = 0; i < schemes.length; i++) { final String scheme = schemes[i]; if (!operationProviders.containsKey(scheme)) { final List<FileOperationProvider> providers = new ArrayList<FileOperationProvider>(); operationProviders.put(scheme, providers); } final List<FileOperationProvider> providers = operationProviders.get(scheme); if (providers.contains(operationProvider)) { throw new FileSystemException( "vfs.operation/operation-provider-already-added.error", scheme); } setupComponent(operationProvider); providers.add(operationProvider); } } /** * @param scheme * the scheme for wich we want to get the list af registered * providers. * * @return the registered FileOperationProviders for the specified scheme. * If there were no providers registered for the scheme, it returns * null. * * @throws FileSystemException if an error occurs. */ public FileOperationProvider[] getOperationProviders(final String scheme) throws FileSystemException { List<?> providers = operationProviders.get(scheme); if (providers == null || providers.size() == 0) { return null; } return providers.toArray(new FileOperationProvider[] {}); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package vista; import java.awt.BorderLayout; import paneles.JPanelBusquedaCliente; import paneles.JPanelIngresoAbonos; import paneles.JPanelListadoClienteCredito; import paneles.JPanelModAbonos; import paneles.JPanelModClienteCredito; import paneles.JPanelNuevoClienteCredito; import paneles.JPanelUltimosAbonos; /** * * @author Psinergia */ public class LineaCredito extends javax.swing.JInternalFrame { /** * Creates new form LineaCredito */ public LineaCredito() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jInternalFrame1 = new javax.swing.JInternalFrame(); jPanel5 = new javax.swing.JPanel(); jLabel2 = new javax.swing.JLabel(); jPanel6 = new javax.swing.JPanel(); btnListadoClientes = new javax.swing.JButton(); BtnRegistroClientes = new javax.swing.JButton(); BtnModCliente = new javax.swing.JButton(); btnEliminar2 = new javax.swing.JButton(); BtnUltimosPagos = new javax.swing.JButton(); BtnBusquedaCliente = new javax.swing.JButton(); btnAbonos = new javax.swing.JButton(); btnAbonos1 = new javax.swing.JButton(); jInternalFrame2 = new javax.swing.JInternalFrame(); jPanel1 = new javax.swing.JPanel(); JPanelcambianteLineaCredito = new javax.swing.JPanel(); jInternalFrame1.setBackground(new java.awt.Color(88, 147, 191)); jInternalFrame1.setVisible(true); jPanel5.setBackground(new java.awt.Color(255, 153, 0)); jLabel2.setFont(new java.awt.Font("Tahoma", 0, 24)); // NOI18N jLabel2.setForeground(new java.awt.Color(0, 0, 204)); jLabel2.setText("LINEA DE CREDITO"); jPanel6.setBackground(new java.awt.Color(88, 147, 191)); btnListadoClientes.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/listado_32.png"))); // NOI18N btnListadoClientes.setText(" LISTADO CLIENTES"); btnListadoClientes.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnListadoClientesActionPerformed(evt); } }); BtnRegistroClientes.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/registro_32.png"))); // NOI18N BtnRegistroClientes.setText(" REGISTRO CLIENTES"); BtnRegistroClientes.setActionCommand("REGISTRO CLIENTES"); BtnRegistroClientes.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { BtnRegistroClientesActionPerformed(evt); } }); BtnModCliente.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/modificar32.png"))); // NOI18N BtnModCliente.setText(" MODIFICAR CLIENTE"); BtnModCliente.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { BtnModClienteActionPerformed(evt); } }); btnEliminar2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/salir32.png"))); // NOI18N btnEliminar2.setText(" SALIR"); btnEliminar2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnEliminar2ActionPerformed(evt); } }); BtnUltimosPagos.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/last_32.png"))); // NOI18N BtnUltimosPagos.setText(" ULTIMOS ABONOS"); BtnUltimosPagos.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { BtnUltimosPagosActionPerformed(evt); } }); BtnBusquedaCliente.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/buscar_32.png"))); // NOI18N BtnBusquedaCliente.setText(" BUSQUEDA CLIENTE"); BtnBusquedaCliente.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { BtnBusquedaClienteActionPerformed(evt); } }); btnAbonos.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/agregar_32.png"))); // NOI18N btnAbonos.setText(" INGRESAR ABONOS"); btnAbonos.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnAbonosActionPerformed(evt); } }); btnAbonos1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/imagenes/modificar2_32.png"))); // NOI18N btnAbonos1.setText(" MODIFICAR ABONOS"); btnAbonos1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnAbonos1ActionPerformed(evt); } }); javax.swing.GroupLayout jPanel6Layout = new javax.swing.GroupLayout(jPanel6); jPanel6.setLayout(jPanel6Layout); jPanel6Layout.setHorizontalGroup( jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel6Layout.createSequentialGroup() .addGap(22, 22, 22) .addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(btnAbonos1, javax.swing.GroupLayout.PREFERRED_SIZE, 219, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(BtnUltimosPagos, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(BtnModCliente, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(BtnRegistroClientes, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(btnListadoClientes, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(btnEliminar2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(BtnBusquedaCliente, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(btnAbonos, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 219, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap(25, Short.MAX_VALUE)) ); jPanel6Layout.setVerticalGroup( jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel6Layout.createSequentialGroup() .addContainerGap() .addComponent(BtnRegistroClientes, javax.swing.GroupLayout.PREFERRED_SIZE, 51, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(BtnModCliente, javax.swing.GroupLayout.PREFERRED_SIZE, 46, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(BtnBusquedaCliente, javax.swing.GroupLayout.PREFERRED_SIZE, 46, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(btnListadoClientes, javax.swing.GroupLayout.PREFERRED_SIZE, 41, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(btnAbonos, javax.swing.GroupLayout.PREFERRED_SIZE, 41, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(btnAbonos1, javax.swing.GroupLayout.PREFERRED_SIZE, 41, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(BtnUltimosPagos) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(btnEliminar2) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5); jPanel5.setLayout(jPanel5Layout); jPanel5Layout.setHorizontalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jPanel6, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(jPanel5Layout.createSequentialGroup() .addGap(25, 25, 25) .addComponent(jLabel2) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jPanel5Layout.setVerticalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addComponent(jLabel2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jPanel6, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); javax.swing.GroupLayout jInternalFrame1Layout = new javax.swing.GroupLayout(jInternalFrame1.getContentPane()); jInternalFrame1.getContentPane().setLayout(jInternalFrame1Layout); jInternalFrame1Layout.setHorizontalGroup( jInternalFrame1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jInternalFrame1Layout.createSequentialGroup() .addComponent(jPanel5, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE)) ); jInternalFrame1Layout.setVerticalGroup( jInternalFrame1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jPanel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); jInternalFrame2.setBackground(new java.awt.Color(255, 255, 255)); jInternalFrame2.setVisible(true); jPanel1.setBackground(new java.awt.Color(56, 98, 127)); JPanelcambianteLineaCredito.setBackground(new java.awt.Color(88, 147, 191)); javax.swing.GroupLayout JPanelcambianteLineaCreditoLayout = new javax.swing.GroupLayout(JPanelcambianteLineaCredito); JPanelcambianteLineaCredito.setLayout(JPanelcambianteLineaCreditoLayout); JPanelcambianteLineaCreditoLayout.setHorizontalGroup( JPanelcambianteLineaCreditoLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 684, Short.MAX_VALUE) ); JPanelcambianteLineaCreditoLayout.setVerticalGroup( JPanelcambianteLineaCreditoLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 515, Short.MAX_VALUE) ); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(JPanelcambianteLineaCredito, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(JPanelcambianteLineaCredito, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(15, Short.MAX_VALUE)) ); javax.swing.GroupLayout jInternalFrame2Layout = new javax.swing.GroupLayout(jInternalFrame2.getContentPane()); jInternalFrame2.getContentPane().setLayout(jInternalFrame2Layout); jInternalFrame2Layout.setHorizontalGroup( jInternalFrame2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); jInternalFrame2Layout.setVerticalGroup( jInternalFrame2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jPanel1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jInternalFrame1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jInternalFrame2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jInternalFrame1) .addComponent(jInternalFrame2) ); pack(); }// </editor-fold>//GEN-END:initComponents private void btnListadoClientesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnListadoClientesActionPerformed JPanelListadoClienteCredito nu=new JPanelListadoClienteCredito(); //sql.CargarTablaClientes(4, ""); nu.setSize(800,600); nu.setLocation(5,5); JPanelcambianteLineaCredito.removeAll(); JPanelcambianteLineaCredito.add(nu,BorderLayout.CENTER); JPanelcambianteLineaCredito.revalidate(); JPanelcambianteLineaCredito.repaint(); }//GEN-LAST:event_btnListadoClientesActionPerformed private void BtnRegistroClientesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BtnRegistroClientesActionPerformed JPanelNuevoClienteCredito nu=new JPanelNuevoClienteCredito(); nu.setSize(800,600); nu.setLocation(5,5); JPanelcambianteLineaCredito.removeAll(); JPanelcambianteLineaCredito.add(nu,BorderLayout.CENTER); JPanelcambianteLineaCredito.revalidate(); JPanelcambianteLineaCredito.repaint(); }//GEN-LAST:event_BtnRegistroClientesActionPerformed private void BtnModClienteActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BtnModClienteActionPerformed JPanelModClienteCredito nu=new JPanelModClienteCredito(); nu.setSize(800,600); nu.setLocation(5,5); JPanelcambianteLineaCredito.removeAll(); JPanelcambianteLineaCredito.add(nu,BorderLayout.CENTER); JPanelcambianteLineaCredito.revalidate(); JPanelcambianteLineaCredito.repaint(); }//GEN-LAST:event_BtnModClienteActionPerformed private void btnEliminar2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnEliminar2ActionPerformed dispose(); }//GEN-LAST:event_btnEliminar2ActionPerformed private void BtnUltimosPagosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BtnUltimosPagosActionPerformed JPanelUltimosAbonos nu=new JPanelUltimosAbonos(); // sql.CargarTablaClientes(4, ""); nu.setSize(800,600); nu.setLocation(5,5); JPanelcambianteLineaCredito.removeAll(); JPanelcambianteLineaCredito.add(nu,BorderLayout.CENTER); JPanelcambianteLineaCredito.revalidate(); JPanelcambianteLineaCredito.repaint(); }//GEN-LAST:event_BtnUltimosPagosActionPerformed private void BtnBusquedaClienteActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BtnBusquedaClienteActionPerformed JPanelBusquedaCliente nu=new JPanelBusquedaCliente(); nu.setSize(800,600); nu.setLocation(5,5); JPanelcambianteLineaCredito.removeAll(); JPanelcambianteLineaCredito.add(nu,BorderLayout.CENTER); JPanelcambianteLineaCredito.revalidate(); JPanelcambianteLineaCredito.repaint(); //sql.CargarTablaClientes(1, ""); }//GEN-LAST:event_BtnBusquedaClienteActionPerformed private void btnAbonosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnAbonosActionPerformed JPanelIngresoAbonos nu=new JPanelIngresoAbonos(); nu.setSize(800,600); nu.setLocation(1,1); JPanelcambianteLineaCredito.removeAll(); JPanelcambianteLineaCredito.add(nu,BorderLayout.CENTER); JPanelcambianteLineaCredito.revalidate(); JPanelcambianteLineaCredito.repaint(); }//GEN-LAST:event_btnAbonosActionPerformed private void btnAbonos1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnAbonos1ActionPerformed JPanelModAbonos nu=new JPanelModAbonos(); nu.setSize(800,600); nu.setLocation(1,1); JPanelcambianteLineaCredito.removeAll(); JPanelcambianteLineaCredito.add(nu,BorderLayout.CENTER); JPanelcambianteLineaCredito.revalidate(); JPanelcambianteLineaCredito.repaint(); }//GEN-LAST:event_btnAbonos1ActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables public javax.swing.JButton BtnBusquedaCliente; public javax.swing.JButton BtnModCliente; public javax.swing.JButton BtnRegistroClientes; public javax.swing.JButton BtnUltimosPagos; public static javax.swing.JPanel JPanelcambianteLineaCredito; public javax.swing.JButton btnAbonos; public javax.swing.JButton btnAbonos1; public javax.swing.JButton btnEliminar2; public javax.swing.JButton btnListadoClientes; private javax.swing.JInternalFrame jInternalFrame1; private javax.swing.JInternalFrame jInternalFrame2; private javax.swing.JLabel jLabel2; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel5; private javax.swing.JPanel jPanel6; // End of variables declaration//GEN-END:variables }
import com.maxeler.correlation.correlationService; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.List; import java.util.Random; /** Correlation Dynamic example. */ public final class CorrelationClient { /** Maximal number of variables. */ private static final int CORRELATION_MAX_NUM_VARIABLES = 6000; /** Maximal number of Time series. */ private static final int CORRELATION_MAX_NUM_TIMESERIES = 6000; /** Maximal number of top scores. */ private static final int CORRELATION_NUM_TOP_SCORES = 10; /** Number of pipes. */ private static final int CORRELATION_NUM_PIPES = 12; /** PCIe alignment. */ private static final int CORRELATION_PCIE_ALIGNMENT = 16; /** Number of vectors per burst. */ private static final int CORRELATION_NUM_VECTORS_PER_BURST = 2; /** For anything other than ISCA this should be 384. */ private static final int BURST_SIZE = 192; /** Number of nano seconds in one second. */ private static final int NUM_OF_NANO_SECONDS = 1000000000; /** Size of double in bits. */ private static final int SIZE_OF_DOUBLE_IN_BITS = 64; /** Size of int in bits. */ private static final int SIZE_OF_INT_IN_BITS = 32; /** Size of double in bytes. */ private static final int SIZE_OF_DOUBLE_IN_BYTES = 8; /** Size of int in bytes. */ private static final int SIZE_OF_INT_IN_BYTES = 4; /** Size of megabyte in bytes. */ private static final int SIZE_OF_MEGABYTE = 1000000; /** Server port. */ private static final int PORT = 9090; /** Utility classes should not have a public or default constructor. */ private CorrelationClient() { } /** * Generates random data. * * @param numberOfRows Number of rows * @param numberOfColumns Number of columns * * @return Random data */ public static double[][] randomData(final int numberOfRows, final int numberOfColumns) { Random rn = new Random(); final int minimum = 0; final int maximum = NUM_OF_NANO_SECONDS; final int range = maximum - minimum + 1; double[][] data = new double[numberOfRows][numberOfColumns]; // Generate input data for (int i = 0; i < numberOfRows; i++) { for (int j = 0; j < numberOfColumns; j++) { data[i][j] = ((double) (rn.nextInt(range) + minimum)) / maximum; } } return data; } /** * Calculate number of bursts for initializing LMem. * * @param numTimeseries Number of Time series * * @return Number of bursts */ public static int calcNumBursts(final int numTimeseries) { int numVectors = 0; for (int i = 1; i <= numTimeseries; ++i) { numVectors += (i + (CORRELATION_NUM_PIPES - 1)) / CORRELATION_NUM_PIPES; } return (numVectors + (CORRELATION_NUM_VECTORS_PER_BURST - 1)) / CORRELATION_NUM_VECTORS_PER_BURST; } /** * Precalculates and reorders data for the DFE. * * @param data Data for correlation * @param sizeTimeseries Size of Time series * @param numTimeseries Number of Time series * @param precalculations Precalculations * @param dataPairs Data pairs */ public static void prepareDataForDfe( final double[][] data, final int sizeTimeseries, final int numTimeseries, final double[] precalculations, final double[] dataPairs) { final int numTimesteps = sizeTimeseries; final double windowSize = (double) sizeTimeseries; if (numTimeseries > CORRELATION_MAX_NUM_TIMESERIES) { System.out.println("Number of Time series should be less or equal to " + CORRELATION_MAX_NUM_TIMESERIES + ". Terminating!"); System.exit(-1); } if (windowSize < 2) { System.out.println( "Window size must be equal or greater than 2. Terminating!"); System.exit(-1); } if (numTimesteps > sizeTimeseries) { System.out.println( "Number of Time steps should be less or equal to" + " size of Time series. Terminating!"); System.exit(-1); } double oldVal; double newVal; double[][] sums = new double[numTimesteps][numTimeseries]; double[][] sumsSq = new double[numTimesteps][numTimeseries]; double[][] inv = new double[numTimesteps][numTimeseries]; // 2 DFE input streams: precalculations and data pairs for (int i = 0; i < numTimesteps; i++) { for (int j = 0; j < numTimeseries; j++) { oldVal = 0; if (i > windowSize) { oldVal = data[j][i - (int) windowSize]; } newVal = data[j][i]; if (i == 0) { sums[i][j] = newVal; sumsSq[i][j] = newVal * newVal; } else { sums[i][j] = sums[i - 1][j] + newVal - oldVal; sumsSq[i][j] = sumsSq[i - 1][j] + newVal * newVal - oldVal * oldVal; } inv[i][j] = 1 / Math.sqrt((int) windowSize * sumsSq[i][j] - sums[i][j] * sums[i][j]); //Precalculations REORDERED in DFE ORDER precalculations[2 * i * numTimeseries + 2 * j] = sums[i][j]; precalculations[2 * i * numTimeseries + 2 * j + 1] = inv[i][j]; //Data pairs REORDERED in DFE ORDER dataPairs[2 * i * numTimeseries + 2 * j] = newVal; dataPairs[2 * i * numTimeseries + 2 * j + 1] = oldVal; } } } /** * Calculates number of correlations. * * @param numTimeseries Number of Time series * * @return Number of correlations */ public static int calcNumCorrelations(final int numTimeseries) { return (numTimeseries * (numTimeseries - 1)) / 2; } /** * Calculates index of correlationDfe between i and j. * * @param row the first series * @param column the second series * * @return Index */ public static int calcIndex(final int row, final int column) { if (row == column) { System.out.println("row and j must not be the same!"); return -1; } int smaller = row; int bigger = column; if (row < column) { smaller = column; bigger = row; } return (smaller * (smaller - 1)) / 2 + bigger; } /** * Calculates correlations on DFE. * * @param data Data for correlation * @param numTimeseries Number of Time series * @param sizeTimeseries Size of Time series * * @return Correlations */ public static double[] correlateDfe( final double[][] data, final int numTimeseries, final int sizeTimeseries) { final int numOfCorrelations = calcNumCorrelations(numTimeseries); double[] correlations = new double[numOfCorrelations]; DecimalFormat timeFormat = new DecimalFormat("#0.00000"); double startTime = System.nanoTime(); // Make socket TTransport transport = new TSocket("localhost", PORT); // Wrap in a protocol TProtocol protocol = new TBinaryProtocol(transport); // Create a client to use the protocol encoder correlationService.Client client = new correlationService.Client(protocol); double estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Createing a client:\t\t\t\t " + timeFormat.format(estimatedTime) + "s"); try { // Connect! startTime = System.nanoTime(); transport.open(); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Opening connection:\t\t\t\t " + timeFormat.format(estimatedTime) + "s"); // Initialize maxfile startTime = System.nanoTime(); final long maxfile = client.correlation_init(); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Initializing maxfile:\t\t\t " + timeFormat.format(estimatedTime) + "s"); // Load DFE startTime = System.nanoTime(); final long engine = client.max_load(maxfile, "*"); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Loading DFE:\t\t\t\t " + timeFormat.format(estimatedTime) + "s"); final int numTimesteps = sizeTimeseries; final double windowSize = (double) sizeTimeseries; final int numBursts = calcNumBursts(numTimeseries); // Get loop length startTime = System.nanoTime(); List<Integer> loopLength = new ArrayList<Integer>(); loopLength.add(client.correlation_get_CorrelationKernel_loopLength()); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Geting Correlation Kernel loopLength:\t " + timeFormat.format(estimatedTime) + "s"); // Prepare data for DFE startTime = System.nanoTime(); int burstSize = BURST_SIZE; List<Integer> inMemLoad = new ArrayList<Integer>(); for (int i = 0; i < numBursts * burstSize; i++) { inMemLoad.add(0); } double[] precalculations = new double[2 * numTimeseries * numTimesteps]; double[] dataPairs = new double[2 * numTimeseries * numTimesteps]; prepareDataForDfe(data, sizeTimeseries, numTimeseries, precalculations, dataPairs); List<Double> precalculationsVec = new ArrayList<Double>(); List<Double> dataPairsVec = new ArrayList<Double>(); for (int i = 0; i < 2 * numTimeseries * numTimesteps; i++) { precalculationsVec.add(precalculations[i]); dataPairsVec.add(dataPairs[i]); } estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Data reordering time:\t\t\t " + timeFormat.format(estimatedTime) + "s"); // Allocate and send input streams to server startTime = System.nanoTime(); long loopLengthSize = 1; final long addressLoopLength = client.malloc_int32_t(loopLengthSize); client.send_data_int32_t(addressLoopLength, loopLength); double loopLengthTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("\tSending LoopLength:\t\t(size = " + loopLengthSize * SIZE_OF_INT_IN_BITS + " bit)\t\t" + timeFormat.format(loopLengthTime) + "s"); startTime = System.nanoTime(); final long inMemLoadSize = numBursts * burstSize; final long addressInMemLoad = client.malloc_int32_t(inMemLoadSize); client.send_data_int32_t(addressInMemLoad, inMemLoad); final double inMemLoadTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("\tSending InMemLoad:\t\t(size = " + inMemLoadSize * SIZE_OF_INT_IN_BITS + " bit)\t" + timeFormat.format(inMemLoadTime) + "s"); startTime = System.nanoTime(); final long precalculationsVecSize = 2 * numTimeseries * numTimesteps; final long addressPrecalculations = client.malloc_double( precalculationsVecSize); client.send_data_double(addressPrecalculations, precalculationsVec); final double precalculationsVecTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("\tSending Precalculations:\t(size = " + precalculationsVecSize * SIZE_OF_DOUBLE_IN_BITS + " bit)\t" + timeFormat.format(precalculationsVecTime) + "s"); startTime = System.nanoTime(); final long dataPairsVecSize = 2 * numTimeseries * numTimesteps; final long addressDataPairs = client.malloc_double(dataPairsVecSize); client.send_data_double(addressDataPairs, dataPairsVec); final double dataPairsVecTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("\tSending DataPairs:\t\t(size = " + dataPairsVecSize * SIZE_OF_DOUBLE_IN_BITS + " bit)\t" + timeFormat.format(dataPairsVecTime) + "s"); estimatedTime = loopLengthTime + inMemLoadTime + precalculationsVecTime + dataPairsVecTime; double speed = (loopLengthSize * SIZE_OF_INT_IN_BITS + inMemLoadSize * SIZE_OF_INT_IN_BITS + precalculationsVecSize * SIZE_OF_DOUBLE_IN_BITS + dataPairsVecSize * SIZE_OF_DOUBLE_IN_BITS) / estimatedTime / SIZE_OF_MEGABYTE; System.out.println("Sending input streams to server total time:\t " + timeFormat.format(estimatedTime) + "s\t(average speed = " + timeFormat.format(speed) + "Mb/s)"); // Allocate memory for output stream on server startTime = System.nanoTime(); // for anything other than ISCA 48 should be instead of 24 final long outCorrelationSize = numTimesteps * loopLength.get(0) * CORRELATION_NUM_TOP_SCORES * CORRELATION_NUM_PIPES + numBursts * 24; final long addressOutCorrelation = client.malloc_double( outCorrelationSize); final long outIndicesSize = 2 * numTimesteps * loopLength.get(0) * CORRELATION_NUM_TOP_SCORES * CORRELATION_NUM_PIPES; final long addressOutIndices = client.malloc_int32_t( outIndicesSize); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Allocating memory for output stream on server: " + timeFormat.format(estimatedTime) + "s"); // Initialize LMem startTime = System.nanoTime(); long actions = client.max_actions_init(maxfile, "loadLMem"); client.max_set_param_uint64t(actions, "numBursts", numBursts); int test = client.max_get_offset_auto_loop_size( actions, "CorrelationKernel", "loopLength"); client.max_queue_input(actions, "in_memLoad", addressInMemLoad, numBursts * burstSize); client.max_ignore_scalar( actions, "LMemCommandsKernel", "run_cycle_count"); client.max_ignore_scalar( actions, "CorrelationKernel", "run_cycle_count"); client.max_run(engine, actions); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("LMem initialization:\t\t\t " + timeFormat.format(estimatedTime) + "s"); //Executing correlation action startTime = System.nanoTime(); actions = client.max_actions_init(maxfile, "default"); client.max_set_param_uint64t(actions, "numBursts", numBursts); client.max_set_param_uint64t(actions, "numSteps", numTimesteps); client.max_set_param_uint64t(actions, "numVariables", numTimeseries); client.max_set_param_uint64t(actions, "outputLastStep", 1); client.max_set_param_double(actions, "windowSize", windowSize); client.max_queue_input( actions, "in_precalculations", addressPrecalculations, precalculationsVecSize * SIZE_OF_DOUBLE_IN_BYTES); client.max_queue_input( actions, "in_variable_pair", addressDataPairs, dataPairsVecSize * SIZE_OF_DOUBLE_IN_BYTES); client.max_queue_output( actions, "out_correlation", addressOutCorrelation, outCorrelationSize * SIZE_OF_DOUBLE_IN_BYTES); client.max_queue_output( actions, "out_indices", addressOutIndices, outIndicesSize * SIZE_OF_INT_IN_BYTES); client.max_run(engine, actions); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Correlation time:\t\t\t\t " + timeFormat.format(estimatedTime) + "s"); // Unload DFE startTime = System.nanoTime(); client.max_unload(engine); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Unloading DFE:\t\t\t\t " + timeFormat.format(estimatedTime) + "s"); // Get output stream from server startTime = System.nanoTime(); List<Double> outCorrelation = new ArrayList<Double>(); outCorrelation = client.receive_data_double(addressOutCorrelation, outCorrelationSize); final double outCorrelationTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("\tGet output stream Correlation:\t(size = " + outCorrelationSize * SIZE_OF_DOUBLE_IN_BITS + " bit)\t" + timeFormat.format(outCorrelationTime) + "s"); startTime = System.nanoTime(); List<Integer> outIndices = new ArrayList<Integer>(); outIndices = client.receive_data_int32_t(addressOutIndices, outIndicesSize); final double outIndicesTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("\tGet output stream outIndices:\t(size = " + outIndicesSize * SIZE_OF_INT_IN_BITS + " bit)\t" + timeFormat.format(outIndicesTime) + "s"); startTime = System.nanoTime(); loopLengthSize = 1; loopLength = client.receive_data_int32_t(addressLoopLength, loopLengthSize); loopLengthTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("\tGet output stream loopLength:\t(size = " + loopLengthSize * SIZE_OF_INT_IN_BITS + " bit)\t\t" + timeFormat.format(loopLengthTime) + "s"); estimatedTime = outCorrelationTime + outIndicesTime + loopLengthTime; speed = (outCorrelationSize * SIZE_OF_DOUBLE_IN_BITS + outIndicesSize * SIZE_OF_INT_IN_BITS + loopLengthSize * SIZE_OF_INT_IN_BITS) / estimatedTime / SIZE_OF_MEGABYTE; System.out.println("Sending input streams to server total time:\t " + timeFormat.format(estimatedTime) + "s\t(average speed = " + timeFormat.format(speed) + "Mb/s)"); // Free allocated memory for streams on server startTime = System.nanoTime(); client.free(addressLoopLength); client.free(addressInMemLoad); client.free(addressPrecalculations); client.free(addressDataPairs); client.free(addressOutCorrelation); client.free(addressOutIndices); client.free(actions); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Freeing allocated memory for streams on server: " + timeFormat.format(estimatedTime) + "s"); // Free allocated maxfile data startTime = System.nanoTime(); client.correlation_free(); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Freeing allocated maxfile data:\t\t " + timeFormat.format(estimatedTime) + "s"); // Close! startTime = System.nanoTime(); transport.close(); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Closing connection:\t\t\t\t " + timeFormat.format(estimatedTime) + "s"); // Store data startTime = System.nanoTime(); final int start = (numTimesteps - 1) * loopLength.get(0) * CORRELATION_NUM_TOP_SCORES * CORRELATION_NUM_PIPES; int position = 0; int index = 0; for (int i = 0; i < numTimeseries; i++) { for (int j = 0; j < i; j++) { correlations[index + j] = outCorrelation.get(start + position + j); } index += i; position += ((i / CORRELATION_NUM_PIPES) + 1) * CORRELATION_NUM_PIPES; } estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("Storing time:\t\t\t\t " + timeFormat.format(estimatedTime) + "s"); } catch (TException x) { x.printStackTrace(); System.exit(-1); } return correlations; } /** * Calculates correlations on CPU. * * @param data Data for correlation * @param numTimeseries Number of Time series * @param sizeTimeseries Size of Time series * * @return Correlations */ public static double[] correlateCpu( final double[][] data, final int numTimeseries, final double sizeTimeseries) { final double windowSize = sizeTimeseries; final double numTimesteps = sizeTimeseries; double[] sums = new double[numTimeseries]; double[] sumsSq = new double[numTimeseries]; for (int i = 0; i < numTimeseries; i++) { sums[i] = 0; sumsSq[i] = 0; } double[] sumsXtimesY = new double[calcNumCorrelations(numTimeseries)]; for (int i = 0; i < calcNumCorrelations(numTimeseries); i++) { sumsXtimesY[i] = 0; } final int numOfCorrelations = calcNumCorrelations(numTimeseries); double[] correlations = new double[numOfCorrelations]; for (int k = 0; k < numTimesteps; k++) { int indexCorrelation = 0; for (int i = 0; i < numTimeseries; i++) { double oldVal = 0; if (k >= windowSize) { oldVal = data[i][k - (int) windowSize]; } double newVal = data[i][k]; sums[i] += newVal - oldVal; sumsSq[i] += newVal * newVal - oldVal * oldVal; } for (int i = 0; i < numTimeseries; i++) { double oldX = 0; if (k >= windowSize) { oldX = data[i][k - (int) windowSize]; } double newX = data[i][k]; for (int j = 0; j < i; j++) { double oldY = 0; if (k >= windowSize) { oldY = data[j][k - (int) windowSize]; } double newY = data[j][k]; sumsXtimesY[indexCorrelation] += newX * newY - oldX * oldY; double numerator = (windowSize * sumsXtimesY[indexCorrelation] - sums[i] * sums[j]); double denominator = ((1 / Math.sqrt(windowSize * sumsSq[i] - sums[i] * sums[i])) * (1 / Math.sqrt(windowSize * sumsSq[j] - sums[j] * sums[j]))); correlations[indexCorrelation] = numerator * denominator; indexCorrelation += 1; } } } return correlations; } /** * Calculates indices step. * * @param numTimeseries Number of Time series * @param sizeTimeseries Size of Time series * * @return Indices step */ public static int[] calculateIndicesStep(final int numTimeseries, final int sizeTimeseries) { final double numTimesteps = sizeTimeseries; final int numOfCorrelations = calcNumCorrelations(numTimeseries); int[] indicesStep = new int[2 * numOfCorrelations]; for (int k = 0; k < numTimesteps; k++) { int indexCorrelation = 0; for (int i = 0; i < numTimeseries; i++) { for (int j = 0; j < i; j++) { indicesStep[2 * indexCorrelation] = j; indicesStep[2 * indexCorrelation + 1] = i; indexCorrelation += 1; } } } return indicesStep; } /** * Checks if correlationsDfe and correlationsCpu are the same. * * @param correlationsDfe Correaltion on DFE * @param correlationsCpu Correaltion on CPU * @param numTimeseries Number of Time series * @param sizeTimeseries Size of Time series */ public static void check( final double[] correlationsDfe, final double[] correlationsCpu, final int numTimeseries, final int sizeTimeseries) { int failed = 0; int[] indicesStep = calculateIndicesStep(numTimeseries, sizeTimeseries); for (int i = 0; i < numTimeseries * (numTimeseries - 1) / 2; i++) { int index = calcIndex(indicesStep[2 * i], indicesStep[2 * i + 1]); if (correlationsDfe[index] != correlationsCpu[i]) { failed++; System.out.println("correlationCpu[" + i + "]\t= " + correlationsCpu[i]); System.out.println("correlationDfe[" + index + "]\t= " + correlationsDfe[index]); } } if (failed == 0) { System.out.println("Test passed!"); } else { System.out.println("Test failed " + failed + " times."); System.exit(-1); } } /** * Calculates correlationsDfe and correlationsCpu and * checks if they return the same value. * * @param args Command line arguments */ public static void main(final String[] args) { if (args.length != 2) { System.out.println("Usage: ant -DstreamSize=<stream size> " + "-DnumberOfStreams=<number of streams>"); System.exit(-1); } final int sizeTimeseries = Integer.parseInt(args[0]); final int numTimeseries = Integer.parseInt(args[1]); double[][] data = randomData(numTimeseries, sizeTimeseries); DecimalFormat timeFormat = new DecimalFormat("#0.00000"); double startTime = System.nanoTime(); final double[] correlationsDfe = correlateDfe(data, numTimeseries, sizeTimeseries); double estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("DFE correlation total time:\t\t" + timeFormat.format(estimatedTime) + "s"); startTime = System.nanoTime(); final double[] correlationsCpu = correlateCpu(data, numTimeseries, sizeTimeseries); estimatedTime = (System.nanoTime() - startTime) / NUM_OF_NANO_SECONDS; System.out.println("CPU correlation total time:\t\t" + timeFormat.format(estimatedTime) + "s"); check(correlationsDfe, correlationsCpu, numTimeseries, sizeTimeseries); } }
/* * Copyright 2015 Textocat * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.textocat.textokit.corpus.statistics.dao.corpus; import com.google.common.collect.HashMultimap; import com.google.common.collect.SetMultimap; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.filefilter.DirectoryFileFilter; import org.apache.commons.io.filefilter.WildcardFileFilter; import org.apache.commons.lang3.StringUtils; import org.apache.uima.cas.CAS; import org.apache.uima.cas.impl.XmiCasSerializer; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.apache.uima.util.XMLSerializer; import org.apache.uima.util.XmlCasDeserializer; import org.w3c.dom.Document; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.*; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; import java.util.Set; import static org.apache.commons.io.FileUtils.forceMkdir; import static org.apache.commons.io.IOUtils.closeQuietly; import static org.apache.uima.fit.factory.TypeSystemDescriptionFactory.createTypeSystemDescriptionFromPath; public class XmiFileTreeCorpusDAO implements CorpusDAO { public static final String XMI_FILE_EXTENSION = "xmi"; // config fields private final File corpusBaseDir; // state fields // TODO encapsulate in a single object to avoid inconsistent changes private Map<UriAnnotatorPair, File> fileByURIandAnnotatorId = new HashMap<UriAnnotatorPair, File>(); private SetMultimap<URI, String> annotatorsByDocument = HashMultimap.create(); public XmiFileTreeCorpusDAO(String corpusPathString) throws URISyntaxException { corpusBaseDir = new File(corpusPathString); if (!corpusBaseDir.isDirectory()) { throw new IllegalStateException(String.format( "Corpus base dir %s does not exist!", corpusBaseDir)); } findFiles(corpusBaseDir); } private void findFiles(File corpusDirFile) throws URISyntaxException { for (File dir : listAnnotatorDirs(corpusDirFile)) { // TODO check for file extension for (File xmiFile : dir.listFiles()) { URI uri = getDocumentURI(xmiFile); String annotatorId = getAnnotatorId(xmiFile); fileByURIandAnnotatorId.put(new UriAnnotatorPair(uri, annotatorId), xmiFile); annotatorsByDocument.put(uri, getAnnotatorId(xmiFile)); } } } private File[] listAnnotatorDirs(File corpusDir) { return corpusDir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY); } private String getAnnotatorId(File xmiFile) { return dirNameToAnnotatorId(xmiFile.getParentFile()); } private String dirNameToAnnotatorId(File dir) { return dir.getName(); } private File annotatorIdToDir(String annotatorId) { return new File(corpusBaseDir, annotatorId); } private URI getDocumentURI(File xmiFile) throws URISyntaxException { return new URI(FilenameUtils.removeExtension(xmiFile.getName())); } private String getDocumentFilename(URI docUri) { // sanity check if (StringUtils.isBlank(docUri.getPath())) { throw new IllegalStateException(String.format( "Unexpected doc URI: %s", docUri)); } return String.format("%s.%s", docUri.getPath(), XMI_FILE_EXTENSION); } @Override public Set<URI> getDocuments() throws URISyntaxException { return annotatorsByDocument.keySet(); } @Override public Set<String> getAnnotatorIds(URI docURI) throws FileNotFoundException { if (annotatorsByDocument.containsKey(docURI)) { return annotatorsByDocument.get(docURI); } else { throw new FileNotFoundException(); } } @Override public void getDocumentCas(URI docURI, String annotatorId, CAS aCAS) throws SAXException, IOException { if (fileByURIandAnnotatorId.containsKey(new UriAnnotatorPair(docURI, annotatorId))) { FileInputStream xmiFileIn = new FileInputStream( fileByURIandAnnotatorId.get(new UriAnnotatorPair( docURI, annotatorId))); XmlCasDeserializer.deserialize(xmiFileIn, aCAS); closeQuietly(xmiFileIn); } else { throw new FileNotFoundException(String.format( "There is no document '%s' annotated by '%s'", docURI, annotatorId)); } } @Override public boolean hasDocument(URI docURI, String annotatorId) { return fileByURIandAnnotatorId.containsKey( new UriAnnotatorPair(docURI, annotatorId)); } @Override public void persist(URI docUri, String annotatorId, CAS cas) throws IOException, SAXException { File annotatorDir = annotatorIdToDir(annotatorId); forceMkdir(annotatorDir); File docFile = new File(annotatorDir, getDocumentFilename(docUri)); try { serializeCAS(cas, docFile); } catch (IOException ex) { // clean FileUtils.forceDelete(docFile); throw ex; } catch (SAXException ex) { // clean FileUtils.forceDelete(docFile); throw ex; } // update in-memory state fileByURIandAnnotatorId.put(new UriAnnotatorPair(docUri, annotatorId), docFile); annotatorsByDocument.put(docUri, annotatorId); } static private void serializeCAS(CAS cas, File outFile) throws IOException, SAXException { OutputStream out = null; try { out = FileUtils.openOutputStream(outFile); XmiCasSerializer xcs = new XmiCasSerializer(cas.getTypeSystem()); XMLSerializer ser = new XMLSerializer(out, true); xcs.serialize(cas, ser.getContentHandler()); } finally { IOUtils.closeQuietly(out); } } static public TypeSystemDescription getTypeSystem(String corpusPathString) throws SAXException, IOException, ParserConfigurationException { for (File f : new File(corpusPathString) .listFiles((FileFilter) new WildcardFileFilter("*.xml"))) { if (getXMLRootElement(f).equals("typeSystemDescription")) { return createTypeSystemDescriptionFromPath(f.toURI().toString()); } } throw new FileNotFoundException(); } static private String getXMLRootElement(File xmlFile) throws SAXException, IOException, ParserConfigurationException { DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(xmlFile); // optional, but recommended // read this - // http://stackoverflow.com/questions/13786607/normalization-in-dom-parsing-with-java-how-does-it-work doc.getDocumentElement().normalize(); return doc.getDocumentElement().getNodeName(); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.lang.documentation; import com.intellij.codeInsight.documentation.DocumentationManagerProtocol; import com.intellij.codeInsight.documentation.DocumentationManagerUtil; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.*; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.HtmlChunk; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDocCommentBase; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.SearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.concurrency.annotations.RequiresBackgroundThread; import com.intellij.util.concurrency.annotations.RequiresReadLock; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import java.util.function.Consumer; /** * Contributes content to the following IDE features: * <ul> * <li>Quick Documentation (invoked via explicit action or shown on mouse hover)</li> * <li>Navigation info (shown in editor on Ctrl/Cmd+mouse hover)</li> * <li>Rendered representation of documentation comments</li> * </ul> * <p> * Extend {@link AbstractDocumentationProvider}. * <p> * Language-specific instance should be registered in {@code com.intellij.lang.documentationProvider} extension point; otherwise use * {@code com.intellij.documentationProvider}. * </p> * * @see com.intellij.lang.LanguageDocumentation * @see DocumentationProviderEx * @see ExternalDocumentationProvider * @see ExternalDocumentationHandler */ public interface DocumentationProvider { /** * Please use {@code com.intellij.lang.documentationProvider} instead of this for language-specific documentation. */ ExtensionPointName<DocumentationProvider> EP_NAME = ExtensionPointName.create("com.intellij.documentationProvider"); /** * Returns the text to show in the Ctrl-hover popup for the specified element. * * @param element the element for which the documentation is requested (for example, if the mouse is over * a method reference, this will be the method to which the reference is resolved). * @param originalElement the element under the mouse cursor * @return the documentation to show, or {@code null} if the provider can't provide any documentation for this element. Documentation can contain * HTML markup. If HTML special characters need to be shown in popup, they should be properly escaped. */ @Nullable default @Nls String getQuickNavigateInfo(PsiElement element, PsiElement originalElement) { return null; } /** * Returns the list of possible URLs to show as external documentation for the specified element. * * @param element the element for which the documentation is requested (for example, if the mouse is over * a method reference, this will be the method to which the reference is resolved). * @param originalElement the element under the mouse cursor * @return the list of URLs to open in the browser or to use for showing documentation internally ({@link ExternalDocumentationProvider}). * If the list contains a single URL, it will be opened. * If the list contains multiple URLs, the user will be prompted to choose one of them. * For {@link ExternalDocumentationProvider}, first URL, yielding non-empty result in * {@link ExternalDocumentationProvider#fetchExternalDocumentation(Project, PsiElement, List, boolean)} will be used. */ @Nullable default List<String> getUrlFor(PsiElement element, PsiElement originalElement) { return null; } /** * <p>Callback for asking the doc provider for the complete documentation. * Underlying implementation may be time-consuming, that's why this method is expected not to be called from EDT.</p> * * <p>One can use {@link DocumentationMarkup} to get proper content layout. Typical sample will look like this: * <pre> * DEFINITION_START + definition + DEFINITION_END + * CONTENT_START + main description + CONTENT_END + * SECTIONS_START + * SECTION_HEADER_START + section name + * SECTION_SEPARATOR + "&lt;p&gt;" + section content + SECTION_END + * ... + * SECTIONS_END * </pre> * </p> * To show different content on mouse hover in editor, {@link #generateHoverDoc(PsiElement, PsiElement)} should be implemented. * * @param element the element for which the documentation is requested (for example, if the mouse is over * a method reference, this will be the method to which the reference is resolved). * @param originalElement the element under the mouse cursor * @return target element's documentation, or {@code null} if provider is unable to generate documentation * for the given element */ @Nullable default @Nls String generateDoc(PsiElement element, @Nullable PsiElement originalElement) { return null; } /** * Same as {@link #generateDoc(PsiElement, PsiElement)}, but used for documentation showed on mouse hover in editor. * <p> * At the moment it's only invoked to get initial on-hover documentation. If user navigates any link in that documentation, * {@link #generateDoc(PsiElement, PsiElement)} will be used to fetch corresponding content. */ @Nullable default @Nls String generateHoverDoc(@NotNull PsiElement element, @Nullable PsiElement originalElement) { return generateDoc(element, originalElement); } /** * @deprecated Override {@link #generateRenderedDoc(PsiDocCommentBase)} instead */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.2") default @Nls @Nullable String generateRenderedDoc(@NotNull PsiElement element) { return null; } /** * This is used to display rendered documentation in editor, in place of corresponding documentation comment's text. * * @see #collectDocComments(PsiFile, Consumer) */ @ApiStatus.Experimental default @Nls @Nullable String generateRenderedDoc(@NotNull PsiDocCommentBase comment) { PsiElement target = comment.getOwner(); return generateRenderedDoc(target == null ? comment : target); } /** * This defines documentation comments in file, which can be rendered in place. HTML content to be displayed will be obtained using * {@link #generateRenderedDoc(PsiDocCommentBase)} method. * <p> * To support cases, when rendered fragment doesn't have representing {@code PsiDocCommentBase} element (e.g. for the sequence of line * comments in languages not having a block comment concept), fake elements (not existing in the {@code file}) might be returned. In such * a case, {@link #findDocComment(PsiFile, TextRange)} should also be implemented by the documentation provider, for the rendered * documentation view to work correctly. */ @ApiStatus.Experimental default void collectDocComments(@NotNull PsiFile file, @NotNull Consumer<? super @NotNull PsiDocCommentBase> sink) { } /** * This method is needed to support rendered representation of documentation comments in editor. It should return doc comment located at * the provided text range in a file. Overriding the default implementation only makes sense for languages which use fake * {@code PsiDocCommentBase} implementations (e.g. in cases when rendered view is provided for a set of adjacent line comments, and * there's no real {@code PsiDocCommentBase} element in a file representing the range to render). * * @see #collectDocComments(PsiFile, Consumer) */ @ApiStatus.Experimental default @Nullable PsiDocCommentBase findDocComment(@NotNull PsiFile file, @NotNull TextRange range) { PsiDocCommentBase comment = PsiTreeUtil.getParentOfType(file.findElementAt(range.getStartOffset()), PsiDocCommentBase.class, false); return comment == null || !range.equals(comment.getTextRange()) ? null : comment; } @Nullable default PsiElement getDocumentationElementForLookupItem(PsiManager psiManager, Object object, PsiElement element) { return null; } /** * Returns the target element for a link in a documentation comment. The link needs to use the * {@link DocumentationManagerProtocol#PSI_ELEMENT_PROTOCOL} protocol. * * @param psiManager the PSI manager for the project in which the documentation is requested. * @param link the text of the link, not including the protocol. * @param context the element from which the navigation is performed. * @return the navigation target, or {@code null} if the link couldn't be resolved. * @see DocumentationManagerUtil#createHyperlink(StringBuilder, String, String, boolean) */ @Nullable default PsiElement getDocumentationElementForLink(PsiManager psiManager, String link, PsiElement context) { return null; } /** * Override this method if standard platform's choice for target PSI element to show documentation for (element either declared or * referenced at target offset) isn't suitable for your language. For example, it could be a keyword where there's no * {@link com.intellij.psi.PsiReference}, but for which users might benefit from context help. * * @param targetOffset equals to caret offset for 'Quick Documentation' action, and to offset under mouse cursor for documentation shown * on mouse hover * @param contextElement the leaf PSI element in {@code file} at target offset * @return target PSI element to show documentation for, or {@code null} if it should be determined by standard platform's logic (default * behaviour) */ @Nullable default PsiElement getCustomDocumentationElement(@NotNull final Editor editor, @NotNull final PsiFile file, @Nullable PsiElement contextElement, int targetOffset) { //noinspection deprecation return (this instanceof DocumentationProviderEx) ? ((DocumentationProviderEx)this).getCustomDocumentationElement(editor, file, contextElement) : null; } }
package com.heyzap.sdk; import org.json.JSONException; import org.json.JSONObject; import android.content.Context; import android.graphics.drawable.Drawable; import android.text.Html; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.WindowManager; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import com.heyzap.http.RequestParams; public class LeaderboardScoreDialogTop extends ClickableToast { private Context context; private String gamePackage; private String score; private String displayScore; private String levelId; private boolean fromSdk; private View.OnClickListener showInGameOverlayOrLaunchLeaderboardActivity; private JSONObject response; private Runnable autoHide; private View wrapper; private String bestDisplayScore = null; private long shownAt; private boolean personalBest; private Drawable gameIcon; public LeaderboardScoreDialogTop(Context context, JSONObject response, String gamePackage, String score, String displayScore, String levelId) { this(context, response, gamePackage, score, displayScore, levelId, null, null); } public LeaderboardScoreDialogTop(Context context, JSONObject response, String gamePackage, String score, String displayScore, String levelId, String personalBestFromPhone, Drawable gameIcon) { super(context); this.response = response; this.gamePackage = gamePackage; this.context = context; this.score = score; this.displayScore = displayScore; this.levelId = levelId; this.gameIcon = gameIcon; if (response == null && personalBestFromPhone != null) { // we need to submit the score silently this.bestDisplayScore = personalBestFromPhone; RequestParams requestParams = LeaderboardScoreLauncher.getNewScoreRequestParams(score, displayScore, levelId); SDKRestClient.post(context, "/in_game_api/leaderboard/new_score", requestParams, new SDKResponseHandler() { @Override public void onSuccess(JSONObject response) { try { String bestScore = response.getString("best_score"); String bestDisplayScore = response.getString("best_display_score"); JSONObject level = response.getJSONObject("level"); String levelId = level.getString("id"); boolean lowestScoreFirst = level.getBoolean("lowest_score_first"); LeaderboardScoreLauncher.saveLeaderboardInfoOnPhone(LeaderboardScoreDialogTop.this.context, Float.parseFloat(bestScore), bestDisplayScore, levelId, lowestScoreFirst, true); } catch (JSONException e) { e.printStackTrace(); } } @Override public void onFailure(Throwable e) { } }); } else { try { bestDisplayScore = response.getString("best_display_score"); } catch (JSONException e) { e.printStackTrace(); } } this.setContentView(Rzap.layout("leaderboard_score_dialog_top")); showInGameOverlayOrLaunchLeaderboardActivity = new View.OnClickListener() { @Override public void onClick(View v) { // This is what this class does in the Heyzap SDK: HeyzapAnalytics.trackEvent(getContext(), "score-overlay-clicked-top"); HeyzapLib.showInGameOverlay(LeaderboardScoreDialogTop.this.context, null, "top"); LeaderboardScoreDialogTop.this.hide(); } }; wrapper = findViewById(Rzap.id("wrapper")); wrapper.setOnClickListener(showInGameOverlayOrLaunchLeaderboardActivity); // Use portrait width, regardless of orientation RelativeLayout container = (RelativeLayout) findViewById(Rzap.id("container")); FrameLayout.LayoutParams p = (FrameLayout.LayoutParams) container.getLayoutParams(); WindowManager wm = (WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE); DisplayMetrics metrics = new DisplayMetrics(); wm.getDefaultDisplay().getMetrics(metrics); p.width = Math.min(metrics.widthPixels, metrics.heightPixels); container.setLayoutParams(p); final Animation slide = AnimationUtils.loadAnimation(getContext(), Rzap.anim("slide_from_top")); Logger.log("starting slide in"); wrapper.startAnimation(slide); autoHide = new Runnable() { @Override public void run() { LeaderboardScoreDialogTop.this.hide(); } }; wrapper.postDelayed(autoHide, 7000); } public void setFromSdk(boolean fromSdk) { this.fromSdk = fromSdk; } @Override public WindowManager.LayoutParams getWmParams() { WindowManager.LayoutParams params = super.getWmParams(); params.gravity = Gravity.TOP; params.width = WindowManager.LayoutParams.FILL_PARENT; params.verticalMargin = 0.0f; params.horizontalMargin = 0.0f; params.flags &= ~WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN; params.flags |= WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH; return params; } public void hide() { wrapper.removeCallbacks(autoHide); slideUp(new Runnable() { @Override public void run() { LeaderboardScoreDialogTop.super.hide(); } }); } public void slideUp(Runnable after) { slide(Rzap.anim("slide_up"), after); } public void slide(int anim, Runnable after) { Animation animation = AnimationUtils.loadAnimation(getContext(), anim); wrapper.startAnimation(animation); if (after != null) { wrapper.postDelayed(after, (int) animation.getDuration()); } } private void setupViews() { TextView titleView = (TextView) this.findViewById(Rzap.id("title")); TextView ctaView = (TextView) this.findViewById(Rzap.id("cta")); ImageView pictureView = (ImageView) this.findViewById(Rzap.id("picture")); titleView.setText(Html.fromHtml(String.format("You scored <font color='#52a600'>%s</font>!", displayScore))); if (personalBest) { ctaView.setText(Html.fromHtml(String.format("<font color='#52a600'>New personal best!</font> Sign in to save.", displayScore))); } else { ctaView.setText(Html.fromHtml(String.format("Personal best: <font color='#52a600'>%s</font>. Sign in to save.", bestDisplayScore))); } ctaView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 12); pictureView.setBackgroundResource(Rzap.drawable("icon_default_people")); if (gameIcon != null) { pictureView.setImageDrawable(gameIcon); } } @Override public void show() { HeyzapAnalytics.trackEvent(getContext(), "score-overlay-shown-top"); shownAt = System.currentTimeMillis(); setupViews(); super.show(); } @Override public boolean onTouchEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_OUTSIDE && System.currentTimeMillis() > shownAt + 1000) { this.hide(); return false; } else { return super.onTouchEvent(event); } } public void setPersonalBest(boolean personalBest) { this.personalBest = personalBest; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.web.controllers; import org.apache.geode.internal.lang.StringUtils; import org.apache.geode.management.internal.cli.i18n.CliStrings; import org.apache.geode.management.internal.cli.util.CommandStringBuilder; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; /** * The MiscellaneousCommandsController class implements GemFire Management REST API web service * endpoints for the Gfsh Miscellaneous Commands. * <p/> * * @see org.apache.geode.management.internal.cli.commands.MiscellaneousCommands * @see org.apache.geode.management.internal.web.controllers.AbstractCommandsController * @see org.springframework.stereotype.Controller * @see org.springframework.web.bind.annotation.PathVariable * @see org.springframework.web.bind.annotation.RequestMapping * @see org.springframework.web.bind.annotation.RequestMethod * @see org.springframework.web.bind.annotation.RequestParam * @see org.springframework.web.bind.annotation.ResponseBody * @since GemFire 8.0 */ @Controller("miscellaneousController") @RequestMapping(AbstractCommandsController.REST_API_VERSION) @SuppressWarnings("unused") public class MiscellaneousCommandsController extends AbstractCommandsController { // TODO determine whether Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/stacktraces") @ResponseBody public String exportStackTraces( @RequestParam(value = CliStrings.EXPORT_STACKTRACE__FILE, required = false) final String file, @RequestParam(value = CliStrings.GROUP, required = false) final String groupName, @RequestParam(value = CliStrings.MEMBER, required = false) final String memberNameId, @RequestParam(value = CliStrings.EXPORT_STACKTRACE__FAIL__IF__FILE__PRESENT, required = false) final boolean failIfFilePresent) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.EXPORT_STACKTRACE); if (hasValue(file)) { command.addOption(CliStrings.EXPORT_STACKTRACE__FILE, decode(file)); } if (hasValue(groupName)) { command.addOption(CliStrings.GROUP, groupName); } if (hasValue(memberNameId)) { command.addOption(CliStrings.MEMBER, memberNameId); } if (hasValue(failIfFilePresent)) { command.addOption(CliStrings.EXPORT_STACKTRACE__FAIL__IF__FILE__PRESENT, String.valueOf(failIfFilePresent)); } return processCommand(command.toString()); } // TODO add Async functionality @RequestMapping(method = RequestMethod.POST, value = "/gc") @ResponseBody public String gc( @RequestParam(value = CliStrings.GROUP, required = false) final String[] groups) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.GC); if (hasValue(groups)) { command.addOption(CliStrings.GROUP, StringUtils.join(groups, StringUtils.COMMA_DELIMITER)); } return processCommand(command.toString()); } // TODO add Async functionality @RequestMapping(method = RequestMethod.POST, value = "/members/{member}/gc") @ResponseBody public String gc(@PathVariable("member") final String memberNameId) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.GC); command.addOption(CliStrings.MEMBER, decode(memberNameId)); return processCommand(command.toString()); } // TODO add Async functionality @RequestMapping(method = RequestMethod.GET, value = "/netstat") @ResponseBody public String netstat( @RequestParam(value = CliStrings.MEMBER, required = false) final String[] members, @RequestParam(value = CliStrings.GROUP, required = false) final String group, @RequestParam(value = CliStrings.NETSTAT__FILE, required = false) final String file, @RequestParam(value = CliStrings.NETSTAT__WITHLSOF, defaultValue = "false") final Boolean withLsof) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.NETSTAT); addCommandOption(null, command, CliStrings.MEMBER, members); addCommandOption(null, command, CliStrings.GROUP, group); addCommandOption(null, command, CliStrings.NETSTAT__FILE, file); addCommandOption(null, command, CliStrings.NETSTAT__WITHLSOF, withLsof); return processCommand(command.toString()); } // TODO determine if Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/deadlocks") @ResponseBody public String showDeadLock( @RequestParam(CliStrings.SHOW_DEADLOCK__DEPENDENCIES__FILE) final String dependenciesFile) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHOW_DEADLOCK); command.addOption(CliStrings.SHOW_DEADLOCK__DEPENDENCIES__FILE, decode(dependenciesFile)); return processCommand(command.toString()); } // TODO determine if Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/members/{member}/log") @ResponseBody public String showLog(@PathVariable("member") final String memberNameId, @RequestParam(value = CliStrings.SHOW_LOG_LINE_NUM, defaultValue = "0") final Integer lines) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHOW_LOG); command.addOption(CliStrings.MEMBER, decode(memberNameId)); command.addOption(CliStrings.SHOW_LOG_LINE_NUM, String.valueOf(lines)); return processCommand(command.toString()); } // TODO determine if Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/metrics") @ResponseBody public String showMetrics( @RequestParam(value = CliStrings.MEMBER, required = false) final String memberNameId, @RequestParam(value = CliStrings.SHOW_METRICS__REGION, required = false) final String regionNamePath, @RequestParam(value = CliStrings.SHOW_METRICS__FILE, required = false) final String file, @RequestParam(value = CliStrings.SHOW_METRICS__CACHESERVER__PORT, required = false) final String cacheServerPort, @RequestParam(value = CliStrings.SHOW_METRICS__CATEGORY, required = false) final String[] categories) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHOW_METRICS); if (hasValue(memberNameId)) { command.addOption(CliStrings.MEMBER, memberNameId); } if (hasValue(regionNamePath)) { command.addOption(CliStrings.SHOW_METRICS__REGION, regionNamePath); } if (hasValue(file)) { command.addOption(CliStrings.SHOW_METRICS__FILE, file); } if (hasValue(cacheServerPort)) { command.addOption(CliStrings.SHOW_METRICS__CACHESERVER__PORT, cacheServerPort); } if (hasValue(categories)) { command.addOption(CliStrings.SHOW_METRICS__CATEGORY, StringUtils.join(categories, StringUtils.COMMA_DELIMITER)); } return processCommand(command.toString()); } @RequestMapping(method = RequestMethod.POST, value = "/shutdown") @ResponseBody public String shutdown( @RequestParam(value = CliStrings.SHUTDOWN__TIMEOUT, defaultValue = "-1") final Integer timeout, @RequestParam(value = CliStrings.INCLUDE_LOCATORS, defaultValue = "false") final boolean includeLocators) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHUTDOWN); command.addOption(CliStrings.SHUTDOWN__TIMEOUT, String.valueOf(timeout)); command.addOption(CliStrings.INCLUDE_LOCATORS, String.valueOf(includeLocators)); return processCommand(command.toString()); } // TODO determine whether the {groups} and {members} path variables corresponding to the --groups // and --members // command-line options in the 'change loglevel' Gfsh command actually accept multiple values, // and... // TODO if so, then change the groups and members method parameters to String[] types. // TODO If not, then these options should be renamed! @RequestMapping(method = RequestMethod.POST, value = "/groups/{groups}/loglevel") @ResponseBody public String changeLogLevelForGroups(@PathVariable("groups") final String groups, @RequestParam(value = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL, required = true) final String logLevel) { return internalChangeLogLevel(groups, null, logLevel); } @RequestMapping(method = RequestMethod.POST, value = "/members/{members}/loglevel") @ResponseBody public String changeLogLevelForMembers(@PathVariable("members") final String members, @RequestParam(value = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL, required = true) final String logLevel) { return internalChangeLogLevel(null, members, logLevel); } @RequestMapping(method = RequestMethod.POST, value = "/members/{members}/groups/{groups}/loglevel") @ResponseBody public String changeLogLevelForMembersAndGroups(@PathVariable("members") final String members, @PathVariable("groups") final String groups, @RequestParam(value = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL) final String logLevel) { return internalChangeLogLevel(groups, members, logLevel); } // NOTE since "logLevel" is "required", then just set the option; no need to validate it's value. private String internalChangeLogLevel(final String groups, final String members, final String logLevel) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.CHANGE_LOGLEVEL); command.addOption(CliStrings.CHANGE_LOGLEVEL__LOGLEVEL, decode(logLevel)); if (hasValue(groups)) { command.addOption(CliStrings.GROUP, decode(groups)); } if (hasValue(members)) { command.addOption(CliStrings.MEMBER, decode(members)); } return processCommand(command.toString()); } }
package net.jitix.doclayer.lib.api; import java.net.InetAddress; import java.sql.SQLException; import java.sql.Savepoint; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.regex.Pattern; import net.jitix.doclayer.lib.core.connection.ConnectionException; import net.jitix.doclayer.lib.core.connection.ConnectionFactory; import net.jitix.doclayer.lib.core.connection.InitializationException; import net.jitix.doclayer.lib.core.connection.Session; import net.jitix.doclayer.lib.core.dbaccess.DBUtil; import org.apache.log4j.Logger; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import com.google.gson.JsonSyntaxException; /** * This class represents an instance of docLayer library. Each instance is * backed by a connection pool which is used to connect to the DB and perform * CRUD operations. * * An application can have multiple instances of docLayer object, each having a * separate connection factory. * * The methods are Thread safe, and multiple threads can share the same instance * of docLayer object. * * @author Soumyajit B (jit@jitix.net) * */ public class DocLayer { private static final Logger logger = Logger.getLogger(DocLayer.class); // patterns for validation private static Pattern collectionNamePattern = Pattern.compile("[A-Za-z0-9]+"); private static Pattern indexFieldNamePattern = Pattern.compile("(^[a-zA-Z][a-zA-Z0-9\\$_\\-]*$)|(^[\\$_][a-zA-Z][a-zA-Z0-9\\$_\\-]*$)"); //JSON parser private static JsonParser jsonParser=new JsonParser(); // private instance of connection factory - one per instance of DocLayer private ConnectionFactory connectionFactory; public DocLayer(InetAddress dbHost, int dbPort, String dbName, String userName, String password, int connectionPoolSize) throws InitializationException, ConnectionException { this.connectionFactory = new ConnectionFactory(dbHost, dbPort, dbName, userName, password, connectionPoolSize); } /** * Creates a new collection, making an entry in the 'collections' table and * creating the necessary per-collection tables. * * @param collectionName * A valid name for the collection. The collectionName can be * alphanumeric without special characters and be between 4 and * 64 characters. * @param indexFields * An array of field names which should be indexed for this * collection. The field names must be alphanumeric and be * between 1 and 64 characters. * @return ID of the newly created collection * @throws IllegalArgumentException * If the arguments are invalid * @throws DocLayerException * If any database/system error is encountered */ public int createCollection(String collectionName, String[] indexFields) throws IllegalArgumentException, DocLayerException { logger.info("Creating new collection "+collectionName+" with index fields: "+Arrays.toString(indexFields)); // validate and if it is invalid then throw illegal argument exception logger.debug("Validating collection name"); validateCollectionName(collectionName); logger.debug("Validating index field names"); for (String indexField : indexFields) { validateIndexFieldName(indexField); } Session session = null; try { // get new session from factory logger.debug("Getting new session from factory"); session = this.connectionFactory.getNewSession(); // create savepoint for transaction String savePointName="createCollection@" + Thread.currentThread().getId(); logger.debug("Creating transaction savepoint: "+savePointName); Savepoint sp = session.getConnection().setSavepoint(savePointName); try { // check if collection already exists logger.debug("Checking if collection "+collectionName+" already exists"); if (DBUtil.getCollectionId(collectionName, session.getConnection()) != -1) { throw new DocLayerException("Collection " + collectionName + " already exists"); } // make new entry in collections table int newCollectionId = DBUtil.createNewCollectionEntry(collectionName, session.getConnection()); logger.info("New collection's Id is "+newCollectionId); // make new entries in collection indexes table logger.debug("Creating new entries in collection_index_fields table"); for (String indexField : indexFields) { DBUtil.addIndexFieldToCollection(newCollectionId, indexField, session.getConnection()); } // create collection specific tables DBUtil.createCollectionSpecificTables(newCollectionId, session.getConnection()); // commit transaction logger.debug("Committing transaction"); session.getConnection().commit(); return -1; } catch (SQLException e) { logger.error("Caught SQLException. Rolling back transaction",e); session.getConnection().rollback(sp); throw e; } catch (Exception e) { logger.error("Caught Exception. Rolling back transaction",e); session.getConnection().rollback(sp); throw new DocLayerException("System Exception: " + e.getMessage()); } } catch (SQLException e) { logger.error("SQLException while creating new collection '" + collectionName + "'", e); throw new DocLayerException("Database Exception: " + e.getMessage()); } catch (ConnectionException e) { logger.error("ConnectionException while creating new collection '" + collectionName + "'", e); throw new DocLayerException("Connection Exception: " + e.getMessage()); } finally { // close session if (session != null) { session.close(); } } } /** * Drops a collection and its documents based on the specified collection * name. * * @param collectionName * Name of the collection to be dropped * @throws IllegalArgumentException * If the arguments are invalid * @throws DocLayerException * If any database/system error is encountered */ public void dropCollection(String collectionName) throws IllegalArgumentException, DocLayerException { logger.info("Dropping collection "+collectionName); // validate and if it is invalid then throw illegal argument exception logger.debug("Validating collection name"); validateCollectionName(collectionName); Session session = null; try { // get new session from factory logger.debug("Getting new session from factory"); session = this.connectionFactory.getNewSession(); // create savepoint for transaction String savePointName="dropCollection@" + Thread.currentThread().getId(); logger.debug("Creating transaction savepoint: "+savePointName); Savepoint sp = session.getConnection().setSavepoint(savePointName); try { int collectionId = DBUtil.getCollectionId(collectionName, session.getConnection()); logger.debug("Collection Id is "+collectionId); // check if collection exists if (collectionId == -1) { throw new DocLayerException("Collection " + collectionName + " does not exist"); } // delete entry from collections table DBUtil.deleteCollectionEntry(collectionId, session.getConnection()); // drop collection specific tables DBUtil.dropCollectionSpecificTables(collectionId, session.getConnection()); // commit transaction logger.debug("Committing transaction"); session.getConnection().commit(); } catch (SQLException e) { logger.error("Caught SQLException. Rolling back transaction",e); session.getConnection().rollback(sp); throw e; } catch (Exception e) { logger.error("Caught Exception. Rolling back transaction",e); session.getConnection().rollback(sp); throw new DocLayerException("System Exception: " + e.getMessage()); } } catch (SQLException e) { logger.error("SQLException while dropping collection '" + collectionName + "'", e); throw new DocLayerException("Database Exception: " + e.getMessage()); } catch (ConnectionException e) { logger.error("ConnectionException while dropping collection '" + collectionName + "'", e); throw new DocLayerException("Connection Exception: " + e.getMessage()); } finally { // close session if (session != null) { session.close(); } } } /** * Creates a new index on the specified collection. It reindexes all the existing documents and hence it is slow. * Use of this method is not recommended when you have lots of documents already stored in this collection. * * @param collectionName * Name of the collection to be dropped * @param indexFieldName * * @throws IllegalArgumentException * @throws DocLayerException */ public void createIndexOnCollection(String collectionName, String indexFieldName) throws IllegalArgumentException, DocLayerException { logger.info("Creating index "+indexFieldName+" on collection "+collectionName); // validate and if it is invalid then throw illegal argument exception logger.debug("Validating collection name"); validateCollectionName(collectionName); Session session = null; try { // get new session from factory logger.debug("Getting new session from factory"); session = this.connectionFactory.getNewSession(); // create savepoint for transaction String savePointName="createIndex@" + Thread.currentThread().getId(); logger.debug("Creating transaction savepoint: "+savePointName); Savepoint sp = session.getConnection().setSavepoint(savePointName); try { int collectionId = DBUtil.getCollectionId(collectionName, session.getConnection()); // check if collection exists if (collectionId == -1) { throw new DocLayerException("Collection " + collectionName + " does not exist"); } // add new entry in collection indexes table DBUtil.addIndexFieldToCollection(collectionId, indexFieldName, session.getConnection()); // reindex documents with this field DBUtil.indexAllDocuments(collectionId, indexFieldName); // commit transaction logger.debug("Committing transaction"); session.getConnection().commit(); } catch (SQLException e) { logger.error("Caught SQLException. Rolling back transaction",e); session.getConnection().rollback(sp); throw e; } catch (Exception e) { logger.error("Caught Exception. Rolling back transaction",e); session.getConnection().rollback(sp); throw new DocLayerException("System Exception: " + e.getMessage()); } } catch (SQLException e) { logger.error("SQLException while creating index '"+indexFieldName+"' on collection '" + collectionName + "'", e); throw new DocLayerException("Database Exception: " + e.getMessage()); } catch (ConnectionException e) { logger.error("ConnectionException while creating index '"+indexFieldName+"' on collection '" + collectionName + "'", e); throw new DocLayerException("Connection Exception: " + e.getMessage()); } finally { // close session if (session != null) { session.close(); } } } public void dropIndexFromCollection(String collectionName, String indexFieldName) throws IllegalArgumentException, DocLayerException { logger.info("Dropping index "+indexFieldName+" from collection "+collectionName); // validate and if it is invalid then throw illegal argument exception logger.debug("Validating collection name"); validateCollectionName(collectionName); Session session = null; try { // get new session from factory logger.debug("Getting new session from factory"); session = this.connectionFactory.getNewSession(); // create savepoint for transaction String savePointName="dropIndex@" + Thread.currentThread().getId(); logger.debug("Creating transaction savepoint: "+savePointName); Savepoint sp = session.getConnection().setSavepoint(savePointName); try { int collectionId = DBUtil.getCollectionId(collectionName, session.getConnection()); // check if collection exists if (collectionId == -1) { throw new DocLayerException("Collection " + collectionName + " does not exist"); } // delete entry from collection indexes table DBUtil.removeIndexFieldFromCollection(collectionId, indexFieldName, session.getConnection()); // delete index entries in collection's document index table DBUtil.deleteCollectionDocumentIndexEntries(collectionId, indexFieldName); // commit transaction logger.debug("Committing transaction"); session.getConnection().commit(); } catch (SQLException e) { logger.error("Caught SQLException. Rolling back transaction",e); session.getConnection().rollback(sp); throw e; } catch (Exception e) { logger.error("Caught Exception. Rolling back transaction",e); session.getConnection().rollback(sp); throw new DocLayerException("System Exception: " + e.getMessage()); } } catch (SQLException e) { logger.error("SQLException while dropping index '"+indexFieldName+"' from collection '" + collectionName + "'", e); throw new DocLayerException("Database Exception: " + e.getMessage()); } catch (ConnectionException e) { logger.error("ConnectionException while dropping index '"+indexFieldName+"' from collection '" + collectionName + "'", e); throw new DocLayerException("Connection Exception: " + e.getMessage()); } finally { // close session if (session != null) { session.close(); } } } public String insertDocument(String collectionName, String documentJson, boolean upsert) throws IllegalArgumentException, DocLayerException { logger.info("Inserting document into collection "+collectionName); logger.debug("Document: "+documentJson); // validate and if it is invalid then throw illegal argument exception logger.debug("Validating collection name"); validateCollectionName(collectionName); //validate and parse JSON string logger.debug("Validating and parsing JSON string"); JsonObject document=getJsonDocument(documentJson); Session session = null; try { // get new session from factory logger.debug("Getting new session from factory"); session = this.connectionFactory.getNewSession(); // create savepoint for transaction String savePointName="insertDocument@" + Thread.currentThread().getId(); logger.debug("Creating transaction savepoint: "+savePointName); Savepoint sp = session.getConnection().setSavepoint(savePointName); try { boolean itsAnUpdate=false; int collectionId = DBUtil.getCollectionId(collectionName, session.getConnection()); // check if collection exists if (collectionId == -1) { throw new DocLayerException("Collection " + collectionName + " does not exist"); } //check if the document contains the id field String documentId=getTopLevelFieldValue(document, "_id"); if(documentId==null){ //generate new id and add to document logger.info("Generating new Id for the document"); String newId=UUID.randomUUID().toString(); document.add("_id", new JsonPrimitive(newId)); logger.info("Auto-generated Id '"+newId+"' added to document"); } else{ //check if a document exists with the same id if(DBUtil.checkIfDocumentIdExists(collectionId, documentId)){ logger.info("A document already exists with id "+documentId); if(upsert){ //set the overwrite flag to true itsAnUpdate=true; //remove entries from collection's index table corresponding to the document id DBUtil.removeIndexEntryForDocument(collectionId, documentId); } else{ throw new DocLayerException("A document with _id="+documentId+" already exists in collection "+collectionName); } } } //get list of index fields for the collection List<String> indexFields=DBUtil.getIndexFields(collectionId, session.getConnection()); //iterate over the list of index fields and add index entries for(String indexField:indexFields){ //check if the index field is present in the document logger.info("Applying index field "+indexField+" on document"); String indexFieldValue=getTopLevelFieldValue(document, indexField); if(indexFieldValue!=null){ DBUtil.indexDocument(collectionId, documentId, indexField, indexFieldValue); } } //insert or update document entry in collection's documents table if(itsAnUpdate){ DBUtil.updateDocumentEntry(collectionId, documentId, documentJson); } else{ DBUtil.createDocumentEntry(collectionId, documentId, documentJson); } // commit transaction logger.debug("Committing transaction"); session.getConnection().commit(); return documentId; } catch (SQLException e) { logger.error("Caught SQLException. Rolling back transaction",e); session.getConnection().rollback(sp); throw e; } catch (Exception e) { logger.error("Caught Exception. Rolling back transaction",e); session.getConnection().rollback(sp); throw new DocLayerException("System Exception: " + e.getMessage()); } } catch (SQLException e) { logger.error("SQLException while inserting document into collection '" + collectionName + "'", e); throw new DocLayerException("Database Exception: " + e.getMessage()); } catch (ConnectionException e) { logger.error("ConnectionException while inserting document into collection '" + collectionName + "'", e); throw new DocLayerException("Connection Exception: " + e.getMessage()); } finally { // close session if (session != null) { session.close(); } } } public String updateDocument(String collectionName, String documentJson, boolean upsert) throws IllegalArgumentException, DocLayerException { logger.info("Updating document into collection "+collectionName); logger.debug("Document: "+documentJson); // validate and if it is invalid then throw illegal argument exception logger.debug("Validating collection name"); validateCollectionName(collectionName); //validate and parse JSON string logger.debug("Validating and parsing JSON string"); JsonObject document=getJsonDocument(documentJson); Session session = null; try { // get new session from factory logger.debug("Getting new session from factory"); session = this.connectionFactory.getNewSession(); // create savepoint for transaction String savePointName="updateDocument@" + Thread.currentThread().getId(); logger.debug("Creating transaction savepoint: "+savePointName); Savepoint sp = session.getConnection().setSavepoint(savePointName); try { boolean itsAnInsert=false; int collectionId = DBUtil.getCollectionId(collectionName, session.getConnection()); // check if collection exists if (collectionId == -1) { throw new DocLayerException("Collection " + collectionName + " does not exist"); } //check if the document contains the id field String documentId=getTopLevelFieldValue(document, "_id"); if(documentId==null){ //if the document does not have _id field if(!upsert){ throw new DocLayerException("Document to be updated does not contain _id field"); } /*else{ //upsert is set to true so it needs to be inserted itsAnInsert=true; }*/ } if(documentId==null){ //generate new id and add to document logger.info("Generating new Id for the document"); String newId=UUID.randomUUID().toString(); document.add("_id", new JsonPrimitive(newId)); logger.info("Auto-generated Id '"+newId+"' added to document"); } else{ //check if a document exists with the same id if(DBUtil.checkIfDocumentIdExists(collectionId, documentId)){ logger.info("A document already exists with id "+documentId); if(upsert){ //set the overwrite flag to true itsAnInsert=true; //remove entries from collection's index table corresponding to the document id DBUtil.removeIndexEntryForDocument(collectionId, documentId); } else{ throw new DocLayerException("A document with _id="+documentId+" already exists in collection "+collectionName); } } } //get list of index fields for the collection List<String> indexFields=DBUtil.getIndexFields(collectionId, session.getConnection()); //iterate over the list of index fields and add index entries for(String indexField:indexFields){ //check if the index field is present in the document logger.info("Applying index field "+indexField+" on document"); String indexFieldValue=getTopLevelFieldValue(document, indexField); if(indexFieldValue!=null){ DBUtil.indexDocument(collectionId, documentId, indexField, indexFieldValue); } } //insert or update document entry in collection's documents table if(itsAnInsert){ DBUtil.updateDocumentEntry(collectionId, documentId, documentJson); } else{ DBUtil.createDocumentEntry(collectionId, documentId, documentJson); } // commit transaction logger.debug("Committing transaction"); session.getConnection().commit(); return documentId; } catch (SQLException e) { logger.error("Caught SQLException. Rolling back transaction",e); session.getConnection().rollback(sp); throw e; } catch (Exception e) { logger.error("Caught Exception. Rolling back transaction",e); session.getConnection().rollback(sp); throw new DocLayerException("System Exception: " + e.getMessage()); } } catch (SQLException e) { logger.error("SQLException while inserting document into collection '" + collectionName + "'", e); throw new DocLayerException("Database Exception: " + e.getMessage()); } catch (ConnectionException e) { logger.error("ConnectionException while inserting document into collection '" + collectionName + "'", e); throw new DocLayerException("Connection Exception: " + e.getMessage()); } finally { // close session if (session != null) { session.close(); } } } public void deleteDocument(String documentJson) throws IllegalArgumentException, DocLayerException { } public void deleteDocumentById(String _id) throws IllegalArgumentException, DocLayerException { } public JsonObject getDocument(String _id) throws IllegalArgumentException, DocLayerException { return null; } public List<JsonObject> getDocuments(Query query) throws IllegalArgumentException, DocLayerException { return null; } /** * Searches the document object for a top level index field. * * If the field is found then it checks the type and validates whether * it is a JSON primitive or not. During validation if the type of the * field is not a JSON primitive then it checks the field name to determine * whether to throw an exception or not. If the field name is '_id' and * the field's type is not a JSON primitive then it throws an exception, * if the field name is something else then it returns null. * * If the field not found then it returns null. * * @param document Instance of JsonDocument * @param indexFieldName Name of the top level index field * @return String value of the field if it is present and is a JSON primitive, null if the field is not present or if the field is not named '_id' and has a value which is not a JSON primitive * @throws DocLayerException Encapsulates exceptions */ private static String getTopLevelFieldValue(JsonObject document, String indexFieldName) throws DocLayerException{ logger.debug("Checking if the JSON document contains top level index field "+indexFieldName); if(document.has(indexFieldName)){ logger.debug("JSON document contains the top level field "+indexFieldName+". Checking if the value is a JSON primitive."); JsonElement fieldValue=document.get(indexFieldName); if(fieldValue.isJsonPrimitive()){ logger.debug("Value associated with "+indexFieldName+" is JSON primitive. Getting string value."); return fieldValue.toString(); } else{ logger.debug("Value associated with "+indexFieldName+" is not JSON primitive."); if(indexFieldName.equals("_id")){ throw new DocLayerException("Document Id field has a non primitive value"); } else{ return null; } } } else{ logger.debug("JSON document does not contain top level field "+indexFieldName); return null; } } private static JsonObject getJsonDocument(String serializedDocument) throws IllegalArgumentException, DocLayerException{ try{ JsonElement parsedObject=jsonParser.parse(serializedDocument); if(!parsedObject.isJsonObject()){ throw new DocLayerException("JSON string is not a JSON object"); } else{ return parsedObject.getAsJsonObject(); } } catch(JsonSyntaxException e){ logger.error("Caught JsonSyntaxException for JSON string: "+serializedDocument,e); throw new DocLayerException("Invalid syntax in JSON string"); } catch(JsonParseException e){ logger.error("Caught JsonParseException for JSON string: "+serializedDocument,e); throw new DocLayerException("The JSON string could not be parsed"); } } private static void validateCollectionName(String collectionName) throws IllegalArgumentException { if (collectionName == null) { throw new IllegalArgumentException("Collection name is null"); } if (!collectionNamePattern.matcher(collectionName).matches()) { throw new IllegalArgumentException("Collection name " + collectionName + " is invalid"); } if (collectionName.length() < 2 || collectionName.length() > 64) { throw new IllegalArgumentException("Collection name " + collectionName + " must be between 2 and 64 characters"); } } private static void validateIndexFieldName(String indexFieldName) throws IllegalArgumentException { if (indexFieldName == null) { throw new IllegalArgumentException("Index field name is null"); } if(indexFieldName.equals("_id")){ throw new IllegalArgumentException("Index field name cannot be _id"); } if (!indexFieldNamePattern.matcher(indexFieldName).matches()) { throw new IllegalArgumentException("Index field name " + indexFieldName + " is invalid"); } if (indexFieldName.length() < 1 || indexFieldName.length() > 32) { throw new IllegalArgumentException("Index field name " + indexFieldName + " must be between 1 and 32 characters"); } } }
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.service.jobs.metadata; import static com.dremio.common.utils.Protos.listNotNull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelShuttleImpl; import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.tools.ValidationException; import com.dremio.common.utils.PathUtils; import com.dremio.exec.planner.StatelessRelShuttleImpl; import com.dremio.exec.planner.acceleration.ExpansionNode; import com.dremio.exec.planner.common.ContainerRel; import com.dremio.exec.planner.fragment.PlanningSet; import com.dremio.exec.planner.sql.handlers.SqlHandlerUtil; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.tablefunctions.ExternalQueryRelBase; import com.dremio.exec.tablefunctions.ExternalQueryScanDrel; import com.dremio.service.job.proto.JoinInfo; import com.dremio.service.job.proto.ParentDatasetInfo; import com.dremio.service.job.proto.ScanPath; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.dataset.proto.DatasetType; import com.dremio.service.namespace.dataset.proto.FieldOrigin; import com.dremio.service.namespace.dataset.proto.Origin; import com.dremio.service.namespace.dataset.proto.ParentDataset; import com.dremio.service.namespace.dataset.proto.VirtualDataset; import com.dremio.service.namespace.proto.NameSpaceContainer; import com.dremio.service.namespace.proto.NameSpaceContainer.Type; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * A description of information we use to better understand a query. */ public class QueryMetadata { // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QueryMetadata.class); private static final Set<String> RESERVED_PARENT_NAMES = ImmutableSet.of("dremio_limited_preview"); private final RelDataType rowType; private final Optional<List<SqlIdentifier>> ancestors; private final Optional<List<FieldOrigin>> fieldOrigins; @Deprecated private final Optional<List<JoinInfo>> joins; private final Optional<List<ParentDatasetInfo>> parents; private final Optional<SqlNode> sqlNode; private final Optional<List<ParentDataset>> grandParents; private final Optional<RelOptCost> cost; private final Optional<PlanningSet> planningSet; private final Optional<BatchSchema> batchSchema; private final List<ScanPath> scanPaths; private final String querySql; private final List<String> queryContext; private final List<String> sourceNames; QueryMetadata(List<SqlIdentifier> ancestors, List<FieldOrigin> fieldOrigins, List<JoinInfo> joins, List<ParentDatasetInfo> parents, SqlNode sqlNode, RelDataType rowType, List<ParentDataset> grandParents, final RelOptCost cost, final PlanningSet planningSet, BatchSchema batchSchema, List<ScanPath> scanPaths, String querySql, List<String> queryContext, List<String> sourceNames) { this.rowType = rowType; this.ancestors = Optional.fromNullable(ancestors); this.fieldOrigins = Optional.fromNullable(fieldOrigins); this.joins = Optional.fromNullable(joins); this.parents = Optional.fromNullable(parents); this.sqlNode = Optional.fromNullable(sqlNode); this.grandParents = Optional.fromNullable(grandParents); this.cost = Optional.fromNullable(cost); this.planningSet = Optional.fromNullable(planningSet); this.batchSchema = Optional.fromNullable(batchSchema); this.scanPaths = scanPaths; this.querySql = querySql; this.queryContext = queryContext; this.sourceNames = sourceNames; } @VisibleForTesting public Optional<List<String>> getReferredTables() { if (!ancestors.isPresent()) { return Optional.absent(); } Set<String> tableNames = new HashSet<>(); for (SqlIdentifier id : ancestors.get()) { if (id.names.size() > 0) { tableNames.add(id.names.get(id.names.size() - 1)); } } return Optional.<List<String>>of(new ArrayList<>(tableNames)); } public Optional<List<ParentDataset>> getGrandParents() { return grandParents; } @VisibleForTesting public Optional<SqlNode> getSqlNode() { return sqlNode; } @VisibleForTesting public Optional<List<SqlIdentifier>> getAncestors() { return ancestors; } public Optional<List<FieldOrigin>> getFieldOrigins() { return fieldOrigins; } public Optional<List<JoinInfo>> getJoins() { return joins; } public RelDataType getRowType() { return rowType; } public Optional<List<ParentDatasetInfo>> getParents() { return parents; } public Optional<BatchSchema> getBatchSchema() { return batchSchema; } public List<ScanPath> getScanPaths() { return scanPaths; } /** * Returns original cost of query past logical planning. */ public Optional<RelOptCost> getCost() { return cost; } public Optional<PlanningSet> getPlanningSet() { return planningSet; } public String getQuerySql() { return querySql; } public List<String> getQueryContext() { return queryContext; } public List<String> getSourceNames() { return sourceNames; } /** * Create a builder for QueryMetadata. * @param namespace A namespace service. If provided, ParentDatasetInfo will be extracted, otherwise it won't. * @return The builder. */ public static Builder builder(NamespaceService namespace){ return new Builder(namespace); } /** * A builder to construct query metadata. */ public static class Builder { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Builder.class); private final NamespaceService namespace; private RelDataType rowType; private RelNode logicalBefore; private RelNode logicalAfter; private RelNode prejoin; private RelNode expanded; private SqlNode sql; private RelOptCost cost; private PlanningSet planningSet; private BatchSchema batchSchema; private String querySql; private List<String> queryContext; private List<String> externalQuerySourceInfo; Builder(NamespaceService namespace){ this.namespace = namespace; } public Builder addQuerySql(String sql) { this.querySql = sql; return this; } public Builder addQueryContext(List<String> context) { this.queryContext = context; return this; } public Builder addRowType(RelDataType rowType){ this.rowType = rowType; return this; } public Builder addLogicalPlan(RelNode before, RelNode after) { this.logicalBefore = before; this.logicalAfter = after; return this; } public Builder addBatchSchema(BatchSchema schema) { this.batchSchema = schema; return this; } public Builder addPreJoinPlan(RelNode rel) { this.prejoin = rel; return this; } public Builder addExpandedPlan(RelNode rel) { this.expanded = rel; return this; } public Builder addParsedSql(SqlNode sql) { this.sql = sql; return this; } public Builder addCost(final RelOptCost cost) { this.cost = cost; return this; } public Builder addSourceNames(final List<String> sourceNames) { this.externalQuerySourceInfo = sourceNames; return this; } /** * Sets parallelized query plan. */ public Builder setPlanningSet(final PlanningSet planningSet) { this.planningSet = planningSet; return this; } public QueryMetadata build() throws ValidationException { Preconditions.checkNotNull(rowType, "The validated row type must be observed before reporting metadata."); final List<SqlIdentifier> ancestors = new ArrayList<>(); if (expanded != null) { expanded.accept(new RelShuttleImpl() { @Override public RelNode visit(RelNode other) { List<String> path = null; if (other instanceof ExpansionNode) { path = ((ExpansionNode) other).getPath().getPathComponents(); } else if (other instanceof ExternalQueryRelBase) { path = ((ExternalQueryRelBase) other).getPath().getPathComponents(); } if (path != null) { ancestors.add(new SqlIdentifier(path, SqlParserPos.ZERO)); return other; } return super.visit(other); } @Override public RelNode visit(TableScan scan) { ancestors.add(new SqlIdentifier(scan.getTable().getQualifiedName(), SqlParserPos.ZERO)); return scan; } }); } else if (sql != null) { ancestors.addAll(AncestorsVisitor.extractAncestors(sql).stream() .filter(input -> !RESERVED_PARENT_NAMES.contains(input.toString())).collect(Collectors.toList())); } List<FieldOrigin> fieldOrigins = null; if (expanded != null && rowType != null) { try { fieldOrigins = ImmutableList.copyOf(FieldOriginExtractor.getFieldOrigins(expanded, rowType)); } catch (Exception e) { // If we fail to extract the column origins, don't fail the query logger.debug("Failed to extract column origins for query: " + sql); } } // Make sure there are no duplicate column names SqlHandlerUtil.validateRowType(true, Lists.<String>newArrayList(), rowType); List<ScanPath> scanPaths = null; if (logicalAfter != null) { scanPaths = FluentIterable.from(getScans(logicalAfter)) .transform(new Function<List<String>, ScanPath>() { @Override public ScanPath apply(List<String> path) { return new ScanPath().setPathList(path); } }) .toList(); externalQuerySourceInfo = getExternalQuerySources(logicalAfter); } return new QueryMetadata( ancestors, // list of parents fieldOrigins, null, getParentsFromSql(ancestors), // convert parent to ParentDatasetInfo sql, rowType, getGrandParents(ancestors), // list of all parents to be stored with dataset cost, // query cost past logical planningSet, batchSchema, scanPaths, querySql, queryContext, externalQuerySourceInfo ); } /** * Return list of all parents for given dataset * @param parents parents of dataset from sql. * @throws NamespaceException */ private List<ParentDataset> getGrandParents(List<SqlIdentifier> parents) { if (parents == null) { return null; } final Map<NamespaceKey, Integer> parentsToLevelMap = Maps.newHashMap(); final List<NamespaceKey> parentKeys = Lists.newArrayList(); final List<ParentDataset> grandParents = Lists.newArrayList(); for (SqlIdentifier parent : parents) { final NamespaceKey parentKey = new NamespaceKey(parent.names); parentsToLevelMap.put(parentKey, 1); parentKeys.add(parentKey); } try { // add parents of parents. if (!parentKeys.isEmpty()) { for (NameSpaceContainer container : namespace.getEntities(parentKeys)) { if (container != null && container.getType() == Type.DATASET) { // missing parent if (container.getDataset() != null) { final VirtualDataset virtualDataset = container.getDataset().getVirtualDataset(); if (virtualDataset != null) { if (virtualDataset.getParentsList() != null) { // add parents of parents for (ParentDataset parentDataset : virtualDataset.getParentsList()) { final NamespaceKey parentKey = new NamespaceKey(parentDataset.getDatasetPathList()); if (!parentsToLevelMap.containsKey(parentKey)) { parentsToLevelMap.put(parentKey, parentDataset.getLevel() + 1); } } // add grand parents of parent too if (virtualDataset.getGrandParentsList() != null) { for (ParentDataset grandParentDataset : virtualDataset.getGrandParentsList()) { final NamespaceKey parentKey = new NamespaceKey(grandParentDataset.getDatasetPathList()); if (!parentsToLevelMap.containsKey(parentKey)) { parentsToLevelMap.put(parentKey, grandParentDataset.getLevel() + 1); } } } } } } } } } } catch (NamespaceException ne) { logger.error("Failed to get list of grand parents", ne); } for (Map.Entry<NamespaceKey, Integer> entry : parentsToLevelMap.entrySet()) { if (entry.getValue() > 1) { grandParents.add(new ParentDataset().setDatasetPathList(entry.getKey().getPathComponents()).setLevel(entry.getValue())); } } return grandParents; } /** * Return lists of {@link ParentDatasetInfo} from given list of directly referred tables in the query. * @return The list of directly referenced virtual or physical datasets */ private List<ParentDatasetInfo> getParentsFromSql(List<SqlIdentifier> ancestors) { if (ancestors == null) { return null; } try { final List<ParentDatasetInfo> result = new ArrayList<>(); for (SqlIdentifier sqlIdentifier : ancestors) { final NamespaceKey datasetPath = new NamespaceKey(sqlIdentifier.names); result.add(getDataset(datasetPath)); } return result; } catch (Throwable e) { logger.warn( "Failure while attempting to extract parents from dataset. This is likely due to " + "a datasource no longer being available that was used in a past job.", e); return Collections.emptyList(); } } private ParentDatasetInfo getDataset(NamespaceKey path) { // fallback String rootEntityName = path.getRoot(); List<String> cleanedPathComponents = Lists.newArrayList(); if (rootEntityName.indexOf(PathUtils.getPathDelimiter()) > -1) { final List<String> spacePathComponents = PathUtils.parseFullPath(path.getRoot()); cleanedPathComponents.addAll(spacePathComponents); List<String> pathComponents = path.getPathComponents(); for (String folderName : pathComponents.subList(1, pathComponents.size())) { cleanedPathComponents.add(folderName); } rootEntityName = spacePathComponents.get(0); } else { cleanedPathComponents.addAll(path.getPathComponents()); } // try the original path and then try the cleaned path. for(List<String> paths : Arrays.asList(path.getPathComponents(), cleanedPathComponents )) { try { List<NameSpaceContainer> containers = namespace.getEntities(Collections.singletonList(new NamespaceKey(paths))); if (!containers.isEmpty()) { final NameSpaceContainer container = containers.get(0); if(container != null && container.getType() == Type.DATASET){ DatasetConfig config = container.getDataset(); return new ParentDatasetInfo() .setDatasetPathList(config.getFullPathList()) .setType(config.getType()); } } } catch(NamespaceException | IllegalArgumentException e) { // Ignore } } //TODO we couldn't find a dataset corresponding to path, should we throw an exception instead ?? return new ParentDatasetInfo().setDatasetPathList(cleanedPathComponents); } } /** * Retrieves a list of source names referenced in the DatasetConfig. * * @param datasetConfig the DatasetConfig to inspect. * @return a list of source names found referenced in the DatasetConfig. */ public static List<String> getSources(DatasetConfig datasetConfig) { final Set<String> sources = Sets.newHashSet(); if (datasetConfig.getType() == DatasetType.VIRTUAL_DATASET) { getSourcesForVds(datasetConfig.getVirtualDataset(), sources); } else if (datasetConfig.getFullPathList() != null && datasetConfig.getFullPathList().size() > 0) { sources.add(datasetConfig.getFullPathList().get(0)); } return new ArrayList<>(sources); } /** * Checks vds for source references. It first checks for source references in the list of FieldOrigin. * Then it checks for source references with external query usage in the parents and grandparents. * * @param vds the Virtual Dataset to inspect. * @param sources the set of source names to add found source names to. */ private static void getSourcesForVds(VirtualDataset vds, Set<String> sources) { getSourcesForVdsWithFieldOriginList(vds, sources); getSourcesForVdsWithExternalQuery(vds, sources); } /** * Checks the vds for source references in the FieldOrigin list. * * @param vds the Virtual Dataset to inspect. * @param sources the set of source names to add found source names to. */ private static void getSourcesForVdsWithFieldOriginList(VirtualDataset vds, Set<String> sources) { if (vds.getFieldOriginsList() != null ) { for (FieldOrigin fieldOrigin : vds.getFieldOriginsList()) { for (Origin origin : listNotNull(fieldOrigin.getOriginsList())) { sources.add(origin.getTableList().get(0)); } } } } /** * Checks the vds for references of external query. It checks for references of external query * in the parents list and grandparents list. It adds the source name referenced to the given set * of sources if a reference to an external query dataset is found. * * @param vds the Virtual Dataset to inspect. * @param sources the set of source names to add found source names to. */ private static void getSourcesForVdsWithExternalQuery(VirtualDataset vds, Set<String> sources) { // Find sources of ParentDataset(s) that are external queries. final List<ParentDataset> parentDatasets = vds.getParentsList(); final List<ParentDataset> grandParentDatasets = vds.getGrandParentsList(); if (parentDatasets != null) { getSourcesFromParentDatasetForExternalQuery(parentDatasets, sources); } if (grandParentDatasets != null) { getSourcesFromParentDatasetForExternalQuery(grandParentDatasets, sources); } } /** * Iterates through the given list of ParentDataset. It adds the source name referenced to the * given set of sources if a reference to an external query dataset is found. * * @param parentDatasets a list of parent dataset to inspect. * @param sources the set of source names to add found source names to. */ private static void getSourcesFromParentDatasetForExternalQuery(List<ParentDataset> parentDatasets, Set<String> sources) { for (ParentDataset parentDataset : parentDatasets) { final List<String> pathList = parentDataset.getDatasetPathList(); if (pathList.size() > 1 && pathList.get(1).equalsIgnoreCase("external_query")) { sources.add(pathList.get(0)); } } } public static List<List<String>> getScans(RelNode logicalPlan) { final ImmutableList.Builder<List<String>> builder = ImmutableList.builder(); logicalPlan.accept(new StatelessRelShuttleImpl() { @Override public RelNode visit(final TableScan scan) { builder.add(scan.getTable().getQualifiedName()); return super.visit(scan); } @Override public RelNode visit(RelNode other) { if (other instanceof ContainerRel) { ContainerRel containerRel = (ContainerRel)other; containerRel.getSubTree().accept(this); } return super.visit(other); } }); return builder.build(); } /* * extracting external query source name, plus the sql string for * reflection dependency */ public static List<String> getExternalQuerySources(RelNode logicalAfter) { final ImmutableList.Builder<String> builder = ImmutableList.builder(); logicalAfter.accept(new StatelessRelShuttleImpl(){ @Override public RelNode visit(RelNode other) { if (other instanceof ExternalQueryScanDrel) { ExternalQueryScanDrel drel = (ExternalQueryScanDrel) other; builder.add(drel.getPluginId().getConfig().getName()); builder.add(drel.getSql()); } return super.visit(other); } }); return builder.build(); } }
/* * Copyright (C) 2010 Klaus Reimer <k@ailis.de> * See LICENSE.txt for licensing information. */ package de.ailis.threedee.scene; import java.nio.FloatBuffer; import java.nio.IntBuffer; import de.ailis.gramath.Color4f; import de.ailis.threedee.exceptions.LightException; import de.ailis.threedee.rendering.GL; import de.ailis.threedee.rendering.Viewport; import de.ailis.threedee.utils.BufferUtils; /** * A light node. * * @author Klaus Reimer (k@ailis.de) */ public abstract class Light extends SceneNode { /** The currently associated light id */ private int lightId = -1; /** The next free light id */ private static int nextLightId = GL.GL_LIGHT0; /** The maximum lights */ private static int maxLights = -1; /** Direction for a spot light */ private static final FloatBuffer direction = (FloatBuffer) BufferUtils .createDirectFloatBuffer(3).put(0).put(0).put(-1).rewind(); /** The ambient color of the light */ private Color4f ambientColor = Color4f.BLACK; /** The specular color of the light */ private Color4f specularColor = Color4f.WHITE; /** The diffuse color of the light */ private Color4f diffuseColor = Color4f.WHITE; /** The light position */ protected FloatBuffer position; /** * Creates a new light with default colors (White). */ public Light() { this(Color4f.BLACK, Color4f.WHITE, Color4f.WHITE); } /** * Constructs a new light with the specified color. * * @param color * The color of the light */ public Light(final Color4f color) { this(Color4f.BLACK, color, color); } /** * Constructs a new light with the specified colors. * * @param ambientColor * The ambient color * @param specularColor * The specular color * @param diffuseColor * The diffuse color */ public Light(final Color4f ambientColor, final Color4f specularColor, final Color4f diffuseColor) { this.ambientColor = ambientColor.asImmutable(); this.specularColor = specularColor.asImmutable(); this.diffuseColor = diffuseColor.asImmutable(); } /** * Returns the ambient color. * * @return The ambient color */ public Color4f getAmbientColor() { return this.ambientColor; } /** * Sets the ambient color. * * @param ambientColor * The ambient color to set */ public void setAmbientColor(final Color4f ambientColor) { this.ambientColor = ambientColor.asImmutable(); } /** * Returns the specular color. * * @return The specular color */ public Color4f getSpecularColor() { return this.specularColor; } /** * Sets the specular color. * * @param specularColor * The specularColor to set */ public void setSpecularColor(final Color4f specularColor) { this.specularColor = specularColor.asImmutable(); } /** * Returns the diffuse color. * * @return The diffuse color */ public Color4f getDiffuseColor() { return this.diffuseColor; } /** * Sets the diffuse color. * * @param diffuseColor * The diffuse color to set */ public void setDiffuseColor(final Color4f diffuseColor) { this.diffuseColor = diffuseColor.asImmutable(); } /** * Sets the light color. This sets the ambient, diffuse and specular * color to the same value. * * @param color * The color to set */ public void setColor(final Color4f color) { setAmbientColor(Color4f.BLACK); setDiffuseColor(color); setSpecularColor(Color4f.BLACK); } /** * Returns the cut off angle in degree. For a point light or a directional * light this always returns 180. * * @return The cut off angle in degree. */ public float getCutOff() { return 180f; } /** * Applies the light. * * @param viewport * The viewport */ public void apply(final Viewport viewport) { final GL gl = viewport.getGL(); final float cutOff = getCutOff(); final int index = getLightId(gl); gl.glLight(index, GL.GL_POSITION, this.position); if (cutOff < 180f) { gl.glLight(index, GL.GL_SPOT_DIRECTION, direction); gl.glLightf(index, GL.GL_SPOT_CUTOFF, cutOff); } else { gl.glLightf(index, GL.GL_SPOT_CUTOFF, 180f); } gl.glLight(index, GL.GL_AMBIENT, this.ambientColor.getBuffer()); gl.glLight(index, GL.GL_DIFFUSE, this.diffuseColor.getBuffer()); gl.glLight(index, GL.GL_SPECULAR, this.specularColor .getBuffer()); gl.glEnable(index); } /** * Removes the light. * * @param viewport * The viewport */ public void remove(final Viewport viewport) { viewport.getGL().glDisable(this.lightId); this.lightId = -1; nextLightId--; } /** * Returns the light id for this light. If it has none yet then a new * one is reserved. * * @param gl * The GL context * @return The light index */ private int getLightId(final GL gl) { // If light has already a light id then return this one int id = this.lightId; if (id >= 0) return id; // Get the maximum number of lights once if (maxLights == -1) { final IntBuffer buffer = BufferUtils.createDirectIntegerBuffer(1); gl.glGetIntegerv(GL.GL_MAX_LIGHTS, buffer); maxLights = buffer.get(0); } // Check if there is a light index available if (nextLightId >= maxLights + GL.GL_LIGHT0 -1) throw new LightException("Too many lights active (Max is " + maxLights + ")"); // Claim light index and return it id = nextLightId++; this.lightId = id; return id; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sling.commons.scheduler.impl; import static org.apache.sling.commons.scheduler.Scheduler.VALUE_RUN_ON_LEADER; import static org.apache.sling.commons.scheduler.Scheduler.VALUE_RUN_ON_SINGLE; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.apache.sling.commons.scheduler.Job; import org.apache.sling.commons.scheduler.JobContext; import org.apache.sling.testing.mock.osgi.MockOsgi; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.osgi.framework.BundleContext; import org.quartz.JobDetail; import org.quartz.JobExecutionContext; import org.quartz.JobKey; import org.quartz.SchedulerException; @RunWith(MockitoJUnitRunner.class) public class QuartzJobExecutorTest { private BundleContext context; private QuartzJobExecutor jobExecutor; private QuartzScheduler quartzScheduler; private volatile boolean isRunnablePseudoJobCompleted; @Mock private JobExecutionContext executionContext; @Before public void setUp() throws Exception { context = MockOsgi.newBundleContext(); jobExecutor = new QuartzJobExecutor(); quartzScheduler = ActivatedQuartzSchedulerFactory.create(context, "testName"); } @Test public void testRunnableJob() throws SchedulerException, InterruptedException { Thread job = new Thread(new SimpleRunnableJob()); String jobName = "testName"; Map<String, Serializable> jobConfig = new HashMap<String, Serializable>(); //Adding a job just to receive a JobDetail object which is needed for testing quartzScheduler.addJob(1L, 1L, jobName, job, jobConfig, "0 * * * * ?", true); JobDetail jobDetail = quartzScheduler.getSchedulers().get("testName").getScheduler().getJobDetail(JobKey.jobKey(jobName)); when(executionContext.getJobDetail()).thenReturn(jobDetail); isRunnablePseudoJobCompleted = false; jobExecutor.execute(executionContext); if (job.isAlive()) { synchronized (job) { if (job.isAlive()) { job.join(); } } } assertTrue(isRunnablePseudoJobCompleted); } @Test public void testJob() throws SchedulerException { Job job = new SimpleJob(); String jobName = "testName"; Map<String, Serializable> jobConfig = new HashMap<String, Serializable>(); //Adding a job just to receive a JobDetail object which is needed for testing quartzScheduler.addJob(1L, 1L, jobName, job, jobConfig, "0 * * * * ?", true); JobDetail jobDetail = quartzScheduler.getSchedulers().get("testName").getScheduler().getJobDetail(JobKey.jobKey(jobName)); when(executionContext.getJobDetail()).thenReturn(jobDetail); isRunnablePseudoJobCompleted = false; jobExecutor.execute(executionContext); assertTrue(isRunnablePseudoJobCompleted); } @Test public void testJobNotExecuted() throws SchedulerException { Job job = new SimpleJob(); String jobName = "testName"; Map<String, Serializable> jobConfig = new HashMap<String, Serializable>(); //Adding a job just to receive a JobDetail object which is needed for testing quartzScheduler.addJob(1L, 1L, jobName, job, jobConfig, "0 * * * * ?", true); JobDetail jobDetail = quartzScheduler.getSchedulers().get("testName").getScheduler().getJobDetail(JobKey.jobKey(jobName)); when(executionContext.getJobDetail()).thenReturn(jobDetail); //Job with this config should not be executed jobDetail.getJobDataMap().put(QuartzScheduler.DATA_MAP_RUN_ON, new String[]{VALUE_RUN_ON_LEADER}); isRunnablePseudoJobCompleted = false; jobExecutor.execute(executionContext); assertFalse(isRunnablePseudoJobCompleted); } @Test public void testJobNotExecutedWithTwoRunOnParams() throws SchedulerException { Job job = new SimpleJob(); String jobName = "testName"; Map<String, Serializable> jobConfig = new HashMap<String, Serializable>(); //Adding a job just to receive a JobDetail object which is needed for testing quartzScheduler.addJob(1L, 1L, jobName, job, jobConfig, "0 * * * * ?", true); JobDetail jobDetail = quartzScheduler.getSchedulers().get("testName").getScheduler().getJobDetail(JobKey.jobKey(jobName)); when(executionContext.getJobDetail()).thenReturn(jobDetail); //Job with this config should not be executed jobDetail.getJobDataMap().put(QuartzScheduler.DATA_MAP_RUN_ON, new String[]{VALUE_RUN_ON_LEADER, VALUE_RUN_ON_SINGLE}); QuartzJobExecutor.SLING_ID = "ANY STRING NOT EQUAL TO OF VALUE_RUN_ON_LEADER OR" + "VALUE_RUN_ON_SINGLE JUST A TEST CASE, NOTHING MORE"; isRunnablePseudoJobCompleted = false; jobExecutor.execute(executionContext); assertFalse(isRunnablePseudoJobCompleted); } @Test public void testJobExecutedWithTwoRunOnParams() throws SchedulerException { Job job = new SimpleJob(); String jobName = "testName"; Map<String, Serializable> jobConfig = new HashMap<String, Serializable>(); //Adding a job just to receive a JobDetail object which is needed for testing quartzScheduler.addJob(1L, 1L, jobName, job, jobConfig, "0 * * * * ?", true); JobDetail jobDetail = quartzScheduler.getSchedulers().get("testName").getScheduler().getJobDetail(JobKey.jobKey(jobName)); when(executionContext.getJobDetail()).thenReturn(jobDetail); //Job with this config should not be executed jobDetail.getJobDataMap().put(QuartzScheduler.DATA_MAP_RUN_ON, new String[]{VALUE_RUN_ON_LEADER, VALUE_RUN_ON_SINGLE}); //In this case, when SLING_ID is equal to one of values above //Job should be executed QuartzJobExecutor.SLING_ID = VALUE_RUN_ON_SINGLE; isRunnablePseudoJobCompleted = false; jobExecutor.execute(executionContext); assertTrue(isRunnablePseudoJobCompleted); } @Test public void testReferences() { String testName = "testName"; Map<String, Serializable> testMap = new HashMap<String, Serializable>(); QuartzJobExecutor.JobContextImpl underTest = new QuartzJobExecutor.JobContextImpl(testName, testMap); assertTrue(underTest.getConfiguration().equals(testMap)); assertTrue(underTest.getName().equals(testName)); } @Test public void testLazyScheduler() { assertTrue(quartzScheduler.getSchedulers().isEmpty()); } @After public void deactivateScheduler() { quartzScheduler.deactivate(context); } private class SimpleJob implements Job { @Override public void execute(JobContext context) { isRunnablePseudoJobCompleted = true; } } private class SimpleRunnableJob implements Runnable { @Override public void run() { isRunnablePseudoJobCompleted = true; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics.cardinality; import com.google.common.base.Preconditions; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.index.fielddata.BytesValues; import org.elasticsearch.index.fielddata.LongValues; import org.elasticsearch.index.fielddata.MurmurHash3Values; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource; import java.io.IOException; /** * An aggregator that computes approximate counts of unique values. */ public class CardinalityAggregator extends MetricsAggregator.SingleValue { private final int precision; private final boolean rehash; private final ValuesSource valuesSource; // Expensive to initialize, so we only initialize it when we have an actual value source @Nullable private HyperLogLogPlusPlus counts; private Collector collector; public CardinalityAggregator(String name, long estimatedBucketsCount, ValuesSource valuesSource, boolean rehash, int precision, AggregationContext context, Aggregator parent) { super(name, estimatedBucketsCount, context, parent); this.valuesSource = valuesSource; this.rehash = rehash; this.precision = precision; this.counts = valuesSource == null ? null : new HyperLogLogPlusPlus(precision, bigArrays, estimatedBucketsCount); } @Override public void setNextReader(AtomicReaderContext reader) { postCollectLastCollector(); collector = createCollector(reader); } private Collector createCollector(AtomicReaderContext reader) { // if rehash is false then the value source is either already hashed, or the user explicitly // requested not to hash the values (perhaps they already hashed the values themselves before indexing the doc) // so we can just work with the original value source as is if (!rehash) { LongValues hashValues = ((NumericValuesSource) valuesSource).longValues(); return new DirectCollector(counts, hashValues); } if (valuesSource instanceof NumericValuesSource) { NumericValuesSource source = (NumericValuesSource) valuesSource; LongValues hashValues = source.isFloatingPoint() ? MurmurHash3Values.wrap(source.doubleValues()) : MurmurHash3Values.wrap(source.longValues()); return new DirectCollector(counts, hashValues); } final BytesValues bytesValues = valuesSource.bytesValues(); if (bytesValues instanceof BytesValues.WithOrdinals) { BytesValues.WithOrdinals values = (BytesValues.WithOrdinals) bytesValues; final long maxOrd = values.ordinals().getMaxOrd(); final long ordinalsMemoryUsage = OrdinalsCollector.memoryOverhead(maxOrd); final long countsMemoryUsage = HyperLogLogPlusPlus.memoryUsage(precision); // only use ordinals if they don't increase memory usage by more than 25% if (ordinalsMemoryUsage < countsMemoryUsage / 4) { return new OrdinalsCollector(counts, values, bigArrays); } } return new DirectCollector(counts, MurmurHash3Values.wrap(bytesValues)); } @Override public boolean shouldCollect() { return valuesSource != null; } @Override public void collect(int doc, long owningBucketOrdinal) throws IOException { collector.collect(doc, owningBucketOrdinal); } private void postCollectLastCollector() { if (collector != null) { try { collector.postCollect(); collector.release(); } finally { collector = null; } } } @Override protected void doPostCollection() { postCollectLastCollector(); } @Override public double metric(long owningBucketOrd) { return counts == null ? 0 : counts.cardinality(owningBucketOrd); } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) { if (counts == null || owningBucketOrdinal >= counts.maxBucket() || counts.cardinality(owningBucketOrdinal) == 0) { return buildEmptyAggregation(); } // We need to build a copy because the returned Aggregation needs remain usable after // this Aggregator (and its HLL++ counters) is released. HyperLogLogPlusPlus copy = new HyperLogLogPlusPlus(precision, BigArrays.NON_RECYCLING_INSTANCE, 1); copy.merge(0, counts, owningBucketOrdinal); return new InternalCardinality(name, copy); } @Override public InternalAggregation buildEmptyAggregation() { return new InternalCardinality(name, null); } @Override protected void doRelease() { Releasables.release(counts, collector); } private static interface Collector extends Releasable { void collect(int doc, long bucketOrd); void postCollect(); } private static class DirectCollector implements Collector { private final LongValues hashes; private final HyperLogLogPlusPlus counts; DirectCollector(HyperLogLogPlusPlus counts, LongValues values) { this.counts = counts; this.hashes = values; } @Override public void collect(int doc, long bucketOrd) { final int valueCount = hashes.setDocument(doc); for (int i = 0; i < valueCount; ++i) { counts.collect(bucketOrd, hashes.nextValue()); } } @Override public void postCollect() { // no-op } @Override public boolean release() throws ElasticsearchException { return true; } } private static class OrdinalsCollector implements Collector { private static final long SHALLOW_FIXEDBITSET_SIZE = RamUsageEstimator.shallowSizeOfInstance(FixedBitSet.class); /** * Return an approximate memory overhead per bucket for this collector. */ public static long memoryOverhead(long maxOrd) { return RamUsageEstimator.NUM_BYTES_OBJECT_REF + SHALLOW_FIXEDBITSET_SIZE + (maxOrd + 7) / 8; // 1 bit per ord } private final BigArrays bigArrays; private final BytesValues.WithOrdinals values; private final Ordinals.Docs ordinals; private final int maxOrd; private final HyperLogLogPlusPlus counts; private ObjectArray<FixedBitSet> visitedOrds; OrdinalsCollector(HyperLogLogPlusPlus counts, BytesValues.WithOrdinals values, BigArrays bigArrays) { ordinals = values.ordinals(); Preconditions.checkArgument(ordinals.getMaxOrd() <= Integer.MAX_VALUE); maxOrd = (int) ordinals.getMaxOrd(); this.bigArrays = bigArrays; this.counts = counts; this.values = values; visitedOrds = bigArrays.newObjectArray(1); } @Override public void collect(int doc, long bucketOrd) { visitedOrds = bigArrays.grow(visitedOrds, bucketOrd + 1); FixedBitSet bits = visitedOrds.get(bucketOrd); if (bits == null) { bits = new FixedBitSet(maxOrd); visitedOrds.set(bucketOrd, bits); } final int valueCount = ordinals.setDocument(doc); for (int i = 0; i < valueCount; ++i) { bits.set((int) ordinals.nextOrd()); } } @Override public void postCollect() { final FixedBitSet allVisitedOrds = new FixedBitSet(maxOrd); for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { allVisitedOrds.or(bits); } } final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128(); final LongArray hashes = bigArrays.newLongArray(maxOrd, false); boolean success = false; try { for (int ord = allVisitedOrds.nextSetBit(0); ord != -1; ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : -1) { final BytesRef value = values.getValueByOrd(ord); org.elasticsearch.common.hash.MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); } for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { for (int ord = bits.nextSetBit(0); ord != -1; ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : -1) { counts.collect(bucket, hashes.get(ord)); } } } success = true; } finally { Releasables.release(success, hashes); } } @Override public boolean release() throws ElasticsearchException { Releasables.release(visitedOrds); return true; } } }
/** * Copyright (c) 2003 The Apereo Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/ecl2 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.contentreview.service; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.Optional; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.contentreview.dao.ContentReviewConstants; import org.sakaiproject.contentreview.dao.ContentReviewItem; import org.sakaiproject.contentreview.dao.ContentReviewItemDao; import org.sakaiproject.contentreview.exception.QueueException; import org.sakaiproject.contentreview.exception.ReportException; import org.sakaiproject.contentreview.exception.SubmissionException; import org.springframework.transaction.annotation.Transactional; import lombok.Setter; import lombok.extern.apachecommons.CommonsLog; @CommonsLog public class ContentReviewQueueServiceImpl implements ContentReviewQueueService { @Setter private ContentReviewItemDao itemDao; /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#queueContent(java.lang.Integer, java.lang.String, java.lang.String, java.lang.String, java.util.List, int) */ @Override @Transactional public void queueContent(Integer providerId, String userId, String siteId, String taskId, List<ContentResource> content) throws QueueException { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(userId, "userId cannot be null"); Objects.requireNonNull(siteId, "siteId cannot be null"); Objects.requireNonNull(taskId, "taskId cannot be null"); Objects.requireNonNull(content, "content cannot be null"); for (ContentResource resource : content) { String contentId = resource.getId(); /* * first check that this content has not been submitted before this may * not be the best way to do this - perhaps use contentId as the primary * key for now id is the primary key and so the database won't complain * if we put in repeats necessitating the check */ Optional<ContentReviewItem> existingItem = itemDao.findByProviderAndContentId(providerId, contentId); if (existingItem.isPresent()) { throw new QueueException("Content " + contentId + " is already queued"); } ContentReviewItem item = new ContentReviewItem(contentId, userId, siteId, taskId, new Date(), ContentReviewConstants.CONTENT_REVIEW_NOT_SUBMITTED_CODE, providerId); log.debug("Adding content: " + contentId + " from site " + siteId + " and user: " + userId + " for task: " + taskId + " to submission queue"); itemDao.create(item); } } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#getReviewScore(java.lang.Integer, java.lang.String, java.lang.String, java.lang.String) */ @Override @Transactional(readOnly=true) public int getReviewScore(Integer providerId, String contentId) throws QueueException, ReportException, Exception { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(contentId, "contentId cannot be null"); log.debug("Getting review score for providerId: " + providerId + " contentId: " + contentId); Optional<ContentReviewItem> matchingItem = itemDao.findByProviderAndContentId(providerId, contentId); if (!matchingItem.isPresent()) { log.debug("Content " + contentId + " has not been queued previously"); throw new QueueException("Content " + contentId + " has not been queued previously"); } ContentReviewItem item = matchingItem.get(); if (item.getStatus().compareTo(ContentReviewConstants.CONTENT_REVIEW_SUBMITTED_REPORT_AVAILABLE_CODE) != 0) { log.debug("Report not available: " + item.getStatus()); throw new ReportException("Report not available: " + item.getStatus()); } return item.getReviewScore().intValue(); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#getReviewStatus(java.lang.Integer, java.lang.String) */ @Override @Transactional(readOnly=true) public Long getReviewStatus(Integer providerId, String contentId) throws QueueException { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(contentId, "contentId cannot be null"); log.debug("Returning review status for content: " + contentId); Optional<ContentReviewItem> matchingItem = itemDao.findByProviderAndContentId(providerId, contentId); if (!matchingItem.isPresent()) { log.debug("Content " + contentId + " has not been queued previously"); throw new QueueException("Content " + contentId + " has not been queued previously"); } return matchingItem.get().getStatus(); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#getDateQueued(java.lang.Integer, java.lang.String) */ @Override @Transactional(readOnly=true) public Date getDateQueued(Integer providerId, String contentId) throws QueueException { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(contentId, "contentId cannot be null"); log.debug("Returning date queued for content: " + contentId); Optional<ContentReviewItem> matchingItem = itemDao.findByProviderAndContentId(providerId, contentId); if (!matchingItem.isPresent()) { log.debug("Content " + contentId + " has not been queued previously"); throw new QueueException("Content " + contentId + " has not been queued previously"); } return matchingItem.get().getDateQueued(); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#getDateSubmitted(java.lang.Integer, java.lang.String) */ @Override @Transactional(readOnly=true) public Date getDateSubmitted(Integer providerId, String contentId) throws QueueException, SubmissionException { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(contentId, "contentId cannot be null"); log.debug("Returning date queued for content: " + contentId); Optional<ContentReviewItem> matchingItem = itemDao.findByProviderAndContentId(providerId, contentId); if (!matchingItem.isPresent()) { log.debug("Content " + contentId + " has not been queued previously"); throw new QueueException("Content " + contentId + " has not been queued previously"); } ContentReviewItem item = matchingItem.get(); if (item.getDateSubmitted() == null) { log.debug("Content not yet submitted: " + item.getStatus()); throw new SubmissionException("Content not yet submitted: " + item.getStatus()); } return item.getDateSubmitted(); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#getAllContentReviewItems(java.lang.Integer, java.lang.String, java.lang.String) */ @Override @Transactional(readOnly=true) public List<ContentReviewItem> getContentReviewItems(Integer providerId, String siteId, String taskId) { Objects.requireNonNull(providerId, "providerId cannot be null"); return itemDao.findByProviderAnyMatching(providerId, null, null, siteId, taskId, null, null, null); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#getAllContentReviewItems(java.lang.Integer, java.lang.String, java.lang.String) */ @Override @Transactional(readOnly=true) public List<ContentReviewItem> getAllContentReviewItemsGroupedBySiteAndTask(Integer providerId) { Objects.requireNonNull(providerId, "providerId cannot be null"); log.debug("Returning list of items grouped by site and task"); return itemDao.findByProviderGroupedBySiteAndTask(providerId); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#resetUserDetailsLockedItems(java.lang.Integer, java.lang.String) */ @Override @Transactional public void resetUserDetailsLockedItems(Integer providerId, String userId) { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(userId, "userId cannot be null"); List<ContentReviewItem> lockedItems = itemDao.findByProviderAnyMatching(providerId, null, userId, null, null, null, ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_USER_DETAILS_CODE, null); for (ContentReviewItem item : lockedItems) { item.setStatus(ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_RETRY_CODE); itemDao.save(item); } } /* (non-Javadoc) * @see org.sakaiproject.contentreview.common.service.ContentReviewCommonService#removeFromQueue(java.lang.Integer, java.lang.String) */ @Override @Transactional public void removeFromQueue(Integer providerId, String contentId) { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(contentId, "contentId cannot be null"); Optional<ContentReviewItem> item = itemDao.findByProviderAndContentId(providerId, contentId); if (item.isPresent()) { itemDao.delete(item.get()); } } /* (non-Javadoc) * @see org.sakaiproject.contentreview.service.ContentReviewQueueService#getQueuedItem(java.lang.Integer, java.lang.String) */ @Override @Transactional(readOnly=true) public Optional<ContentReviewItem> getQueuedItem(Integer providerId, String contentId) { Objects.requireNonNull(providerId, "providerId cannot be null"); Objects.requireNonNull(contentId, "contentId cannot be null"); return itemDao.findByProviderAndContentId(providerId, contentId); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.service.ContentReviewQueueService#getQueuedNotSubmittedItems(java.lang.Integer) */ @Override @Transactional(readOnly=true) public List<ContentReviewItem> getQueuedNotSubmittedItems(Integer providerId) { return itemDao.findByProviderAnyMatching(providerId, null, null, null, null, null, ContentReviewConstants.CONTENT_REVIEW_NOT_SUBMITTED_CODE, null); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.service.ContentReviewQueueService#getNextItemInQueueToSubmit(java.lang.Integer) */ @Override @Transactional(readOnly=true) public Optional<ContentReviewItem> getNextItemInQueueToSubmit(Integer providerId) { Objects.requireNonNull(providerId, "providerId cannot be null"); return itemDao.findByProviderSingleItemToSubmit(providerId); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.service.ContentReviewQueueService#getAwaitingReports(java.lang.Integer) */ @Override @Transactional(readOnly=true) public List<ContentReviewItem> getAwaitingReports(Integer providerId) { Objects.requireNonNull(providerId, "providerId cannot be null"); return itemDao.findByProviderAwaitingReports(providerId); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.service.ContentReviewQueueService#update(org.sakaiproject.contentreview.dao.ContentReviewItem) */ @Override @Transactional public void update(ContentReviewItem item) { Objects.requireNonNull(item, "item cannot be null"); Objects.requireNonNull(item.getId(), "Id cannot be null"); Objects.requireNonNull(item.getProviderId(), "providerId cannot be null"); itemDao.save(item); } /* (non-Javadoc) * @see org.sakaiproject.contentreview.service.ContentReviewQueueService#delete(org.sakaiproject.contentreview.dao.ContentReviewItem) */ @Override @Transactional public void delete(ContentReviewItem item) { Objects.requireNonNull(item, "item cannot be null"); Objects.requireNonNull(item.getId(), "Id cannot be null"); Objects.requireNonNull(item.getProviderId(), "providerId cannot be null"); itemDao.delete(item); } }
/* * Copyright (c) 2015 Andrej Halaj * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.perfcake.pc4nb.ui.wizards.visuals; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.List; import javax.swing.JTable; import org.perfcake.model.Scenario; import org.perfcake.model.Scenario.Reporting.Reporter; import org.perfcake.pc4nb.model.ModelMap; import org.perfcake.pc4nb.model.ReporterModel; import org.perfcake.pc4nb.model.ReportingModel; import org.perfcake.pc4nb.ui.AbstractPC4NBView; import org.perfcake.pc4nb.ui.tableModel.ReportersTableModel; import org.perfcake.pc4nb.ui.actions.AddReporterAction; import org.perfcake.pc4nb.ui.actions.DeleteReportersAction; import org.perfcake.pc4nb.ui.actions.EditReporterAction; public final class ReportingVisualPanel extends AbstractPC4NBView { public ReportingVisualPanel() { setModel(new ReportingModel(new Scenario.Reporting())); for (Reporter reporter : ((ReportingModel) getModel()).getReporting().getReporter()) { ModelMap.getDefault().getPC4NBModelFor(reporter).addPropertyChangeListener(this); } ModelMap.getDefault().addEntry(((ReportingModel) getModel()).getReporting(), getModel()); initComponents(); addReporterButton.addActionListener(new AddReporterListener()); editReporterButton.addActionListener(new EditReporterListener()); deleteReporterButton.addActionListener(new DeleteReporterListener()); } @Override public String getName() { return "Reporting"; } public ReportersTableModel getReportersTableModel() { return reportersTableModel; } public JTable getReportersTable() { return reportersTable; } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jScrollPane2 = new javax.swing.JScrollPane(); reportersTable = new javax.swing.JTable(); addReporterButton = new javax.swing.JButton(); editReporterButton = new javax.swing.JButton(); deleteReporterButton = new javax.swing.JButton(); reportersLabel = new javax.swing.JLabel(); reportersTableModel = new ReportersTableModel(); reportersTable.setModel(reportersTableModel); jScrollPane2.setViewportView(reportersTable); org.openide.awt.Mnemonics.setLocalizedText(addReporterButton, org.openide.util.NbBundle.getMessage(ReportingVisualPanel.class, "ReportingVisualPanel.addReporterButton.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(editReporterButton, org.openide.util.NbBundle.getMessage(ReportingVisualPanel.class, "ReportingVisualPanel.editReporterButton.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(deleteReporterButton, org.openide.util.NbBundle.getMessage(ReportingVisualPanel.class, "ReportingVisualPanel.deleteReporterButton.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(reportersLabel, org.openide.util.NbBundle.getMessage(ReportingVisualPanel.class, "ReportingVisualPanel.reportersLabel.text")); // NOI18N javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(42, 42, 42) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(reportersLabel) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addGroup(layout.createSequentialGroup() .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(43, 43, 43) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(addReporterButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(editReporterButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(deleteReporterButton, javax.swing.GroupLayout.DEFAULT_SIZE, 97, Short.MAX_VALUE)) .addGap(47, 47, 47)))) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(12, 12, 12) .addComponent(reportersLabel) .addGap(18, 18, 18) .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 187, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addGap(71, 71, 71) .addComponent(addReporterButton, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(editReporterButton, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(deleteReporterButton, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap(256, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton addReporterButton; private javax.swing.JButton deleteReporterButton; private javax.swing.JButton editReporterButton; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JLabel reportersLabel; private javax.swing.JTable reportersTable; private ReportersTableModel reportersTableModel; // End of variables declaration//GEN-END:variables @Override public void propertyChange(PropertyChangeEvent evt) { ReportingModel model = (ReportingModel) getModel(); List<Reporter> reportersList = model.getReporting().getReporter(); int targetIndex; switch (evt.getPropertyName()) { case ReportingModel.PROPERTY_REPORTERS: if (evt.getNewValue() != null) { targetIndex = reportersList.indexOf(evt.getNewValue()); reportersTableModel.insertRow(targetIndex, (Reporter) evt.getNewValue()); } else if (evt.getOldValue() != null) { targetIndex = reportersTableModel.getReporters().indexOf(evt.getOldValue()); reportersTableModel.removeRow(targetIndex); } else { // error } break; case ReporterModel.PROPERTY_CLASS: case ReporterModel.PROPERTY_ENABLED: ReporterModel reporterModel = (ReporterModel) evt.getSource(); Reporter reporter = reporterModel.getReporter(); targetIndex = reportersTableModel.getReporters().indexOf(reporter); reportersTableModel.updateRow(targetIndex, reporter); break; default: break; } } private class AddReporterListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { AddReporterAction action = new AddReporterAction(getModel()); action.execute(); } } private class EditReporterListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { int selectedRow = ReportingVisualPanel.this.getReportersTable().getSelectedRow(); if (selectedRow != -1) { ReportingModel reportingModel = (ReportingModel) ReportingVisualPanel.this.getModel(); Reporter reporter = reportingModel.getReporting().getReporter().get(selectedRow); EditReporterAction action = new EditReporterAction((ReporterModel) ModelMap.getDefault().getPC4NBModelFor(reporter)); action.execute(); } } } private class DeleteReporterListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { int[] selectedRows = ReportingVisualPanel.this.getReportersTable().getSelectedRows(); ReportingModel reportingModel = (ReportingModel) ReportingVisualPanel.this.getModel(); List<Reporter> toRemove = new ArrayList<>(); for (int i = 0; i < selectedRows.length; i++) { Reporter reporter = reportingModel.getReporting().getReporter().get(selectedRows[i]); toRemove.add(reporter); } DeleteReportersAction action = new DeleteReportersAction((ReportingModel) getModel(), toRemove); action.execute(); } } }
package edu.utah.ece.async.ibiosim.synthesis.GateGenerator; import java.io.File; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.junit.Test; import org.sbolstandard.core2.ComponentDefinition; import org.sbolstandard.core2.FunctionalComponent; import org.sbolstandard.core2.ModuleDefinition; import org.sbolstandard.core2.SBOLConversionException; import org.sbolstandard.core2.SBOLDocument; import org.sbolstandard.core2.SBOLReader; import org.sbolstandard.core2.SBOLValidationException; import edu.utah.ece.async.ibiosim.dataModels.sbol.SBOLUtility; import edu.utah.ece.async.ibiosim.dataModels.util.exceptions.SBOLException; import edu.utah.ece.async.ibiosim.synthesis.TestingFiles; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.ANDGate; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.GateIdentifier; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.GeneticGate; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.NANDGate; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.NORGate; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.NOTGate; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.ORGate; import edu.utah.ece.async.ibiosim.synthesis.GeneticGates.WiredORGate; /** * Test gate types that are supported in GateIdentifier class * @author Tramy Nguyen * */ public class GateIdentifier_Test { @Test public void Test_NOT() { try { SBOLDocument inFile = SBOLReader.read(new File(TestingFiles.NOT_LibSize1)); Assert.assertEquals(1, inFile.getRootModuleDefinitions().size()); ModuleDefinition md = inFile.getRootModuleDefinitions().iterator().next(); GateIdentifier sortInstance = new GateIdentifier(inFile, md); GeneticGate gate = sortInstance.getIdentifiedGate(); Assert.assertTrue(gate instanceof NOTGate); } catch (IOException e) { e.printStackTrace(); } catch (SBOLConversionException e) { e.printStackTrace(); } catch (GateGenerationExeception e) { e.printStackTrace(); } catch (SBOLValidationException e) { e.printStackTrace(); } } @Test public void Test_OR() { try { SBOLDocument orGate = SyntheticGateExamples.createORGate(); Assert.assertEquals(1, orGate.getRootModuleDefinitions().size()); ModuleDefinition md = orGate.getRootModuleDefinitions().iterator().next(); GateIdentifier sortInstance = new GateIdentifier(orGate, md); GeneticGate gate = sortInstance.getIdentifiedGate(); Assert.assertTrue(gate instanceof ORGate); Assert.assertEquals(2, gate.getListOfInputs().size()); for(FunctionalComponent fc : gate.getListOfInputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC1_x0_protein") || fc.getDisplayId().equals("FC2_x1_protein")); } Assert.assertEquals(1, gate.getListOfOutputs().size()); for(FunctionalComponent fc : gate.getListOfOutputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC0_Y_protein")); } } catch (SBOLException e) { e.printStackTrace(); } catch (SBOLValidationException e) { e.printStackTrace(); } catch (GateGenerationExeception e) { e.printStackTrace(); } } @Test public void Test1_NOR(){ try { SBOLDocument norGate = SyntheticGateExamples.createNORGate1(); ModuleDefinition md = norGate.getRootModuleDefinitions().iterator().next(); GateIdentifier sortInstance = new GateIdentifier(norGate, md); GeneticGate gate = sortInstance.getIdentifiedGate(); Assert.assertTrue(gate instanceof NORGate); Assert.assertEquals(2, gate.getListOfInputs().size()); for(FunctionalComponent fc : gate.getListOfInputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC1_x0_protein") || fc.getDisplayId().equals("FC2_x1_protein")); } Assert.assertEquals(1, gate.getListOfOutputs().size()); for(FunctionalComponent fc : gate.getListOfOutputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC0_Y_protein")); } } catch (SBOLException e) { e.printStackTrace(); } catch (GateGenerationExeception e) { e.printStackTrace(); } catch (SBOLValidationException e) { e.printStackTrace(); } } @Test public void Test2_NOR() { try { SBOLDocument norGate = SyntheticGateExamples.createNORGate2(); ModuleDefinition md = norGate.getRootModuleDefinitions().iterator().next(); GateIdentifier sortInstance = new GateIdentifier(norGate, md); GeneticGate gate = sortInstance.getIdentifiedGate(); Assert.assertTrue(gate instanceof NORGate); Assert.assertEquals(2, gate.getListOfInputs().size()); for(FunctionalComponent fc : gate.getListOfInputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC1_x0_protein") || fc.getDisplayId().equals("FC2_x1_protein")); } Assert.assertEquals(1, gate.getListOfOutputs().size()); for(FunctionalComponent fc : gate.getListOfOutputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC0_Y_protein")); } } catch (SBOLException e) { e.printStackTrace(); } catch (GateGenerationExeception e) { e.printStackTrace(); } catch (SBOLValidationException e) { e.printStackTrace(); } } @Test public void Test3_NOR() { try { SBOLDocument norGate = SyntheticGateExamples.createNORGate3(); ModuleDefinition md = norGate.getRootModuleDefinitions().iterator().next(); GateIdentifier sortInstance = new GateIdentifier(norGate, md); GeneticGate gate = sortInstance.getIdentifiedGate(); Assert.assertTrue(gate instanceof NORGate); Assert.assertEquals(2, gate.getListOfInputs().size()); for(FunctionalComponent fc : gate.getListOfInputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC1_x0_protein") || fc.getDisplayId().equals("FC2_x1_smallMolecule")); } Assert.assertEquals(1, gate.getListOfOutputs().size()); for(FunctionalComponent fc : gate.getListOfOutputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC0_Y_protein")); } } catch (SBOLException e) { e.printStackTrace(); } catch (GateGenerationExeception e) { e.printStackTrace(); } catch (SBOLValidationException e) { e.printStackTrace(); } } @Test public void Test_NAND() { try { SBOLDocument nandGate = SyntheticGateExamples.createNANDGate(); ModuleDefinition md = nandGate.getRootModuleDefinitions().iterator().next(); GateIdentifier sortInstance = new GateIdentifier(nandGate, md); GeneticGate gate = sortInstance.getIdentifiedGate(); Assert.assertTrue(gate instanceof NANDGate); Assert.assertEquals(2, gate.getListOfInputs().size()); for(FunctionalComponent fc : gate.getListOfInputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC1_x0_protein") || fc.getDisplayId().equals("FC2_x1_protein")); } Assert.assertEquals(1, gate.getListOfOutputs().size()); for(FunctionalComponent fc : gate.getListOfOutputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC0_Y_protein")); } } catch (SBOLException e) { e.printStackTrace(); } catch (GateGenerationExeception e) { e.printStackTrace(); } catch (SBOLValidationException e) { e.printStackTrace(); } } @Test public void Test_AND() { try { SBOLDocument nandGate = SyntheticGateExamples.createANDGate(); ModuleDefinition md = nandGate.getRootModuleDefinitions().iterator().next(); GateIdentifier sortInstance = new GateIdentifier(nandGate, md); GeneticGate gate = sortInstance.getIdentifiedGate(); Assert.assertTrue(gate instanceof ANDGate); Assert.assertEquals(2, gate.getListOfInputs().size()); for(FunctionalComponent fc : gate.getListOfInputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC1_x0_protein") || fc.getDisplayId().equals("FC2_x1_protein")); } Assert.assertEquals(1, gate.getListOfOutputs().size()); for(FunctionalComponent fc : gate.getListOfOutputs()) { Assert.assertTrue(fc.getDisplayId().equals("FC0_Y_protein")); } } catch (SBOLException e) { e.printStackTrace(); } catch (GateGenerationExeception e) { e.printStackTrace(); } catch (SBOLValidationException e) { e.printStackTrace(); } } @Test public void Test1_WiredOR() throws SBOLValidationException, IOException, SBOLConversionException, GateGenerationExeception, SBOLException { SBOLDocument gate = SBOLUtility.getSBOLUtility().createSBOLDocument(); gate.read(TestingFiles.yfp1NOT_LibSize1); GateIdentifier gateType = new GateIdentifier(gate, gate.getRootModuleDefinitions().iterator().next()); List<GeneticGate> listOfNot = new ArrayList<>(); listOfNot.add(gateType.getIdentifiedGate()); GateGeneration gateGen = new GateGeneration(); List<GeneticGate> wiredGateList = gateGen.generateWiredORGates(listOfNot); Assert.assertTrue(1 == wiredGateList.size()); Assert.assertTrue(wiredGateList.get(0) instanceof WiredORGate); GeneticGate g1 = wiredGateList.get(0); Assert.assertTrue(g1 instanceof WiredORGate); List<ComponentDefinition> actualSignals = new ArrayList<>(); actualSignals.addAll(g1.getListOfInputsAsComponentDefinition()); actualSignals.addAll(g1.getListOfOutputsAsComponentDefinition()); URI yfpSignal = URI.create("https://synbiohub.programmingbiology.org/public/Eco1C1G1T1/YFP_protein/1"); for(ComponentDefinition cd : actualSignals) { Assert.assertTrue(cd.getIdentity().equals(yfpSignal)); } } @Test public void Test2_WiredOR() throws SBOLValidationException, IOException, SBOLConversionException, GateGenerationExeception, SBOLException { SBOLDocument gate = SBOLUtility.getSBOLUtility().createSBOLDocument(); gate.read(TestingFiles.yfp1NOT_LibSize1); gate.read(TestingFiles.yfp2NOT_LibSize1); GateIdentifier gateType = new GateIdentifier(gate, gate.getRootModuleDefinitions().iterator().next()); List<GeneticGate> listOfNot = new ArrayList<>(); listOfNot.add(gateType.getIdentifiedGate()); GateGeneration gateGen = new GateGeneration(); List<GeneticGate> wiredGateList = gateGen.generateWiredORGates(listOfNot); Assert.assertTrue(1 == wiredGateList.size()); Assert.assertTrue(wiredGateList.get(0) instanceof WiredORGate); GeneticGate g1 = wiredGateList.get(0); Assert.assertTrue(g1 instanceof WiredORGate); List<ComponentDefinition> actualSignals = new ArrayList<>(); actualSignals.addAll(g1.getListOfInputsAsComponentDefinition()); actualSignals.addAll(g1.getListOfOutputsAsComponentDefinition()); URI yfpSignal = URI.create("https://synbiohub.programmingbiology.org/public/Eco1C1G1T1/YFP_protein/1"); for(ComponentDefinition cd : actualSignals) { Assert.assertTrue(cd.getIdentity().equals(yfpSignal)); } } @Test public void Test3_WiredOR() throws SBOLValidationException, IOException, SBOLConversionException, GateGenerationExeception, SBOLException { SBOLDocument gate = SBOLUtility.getSBOLUtility().createSBOLDocument(); gate.read(TestingFiles.NOT_LibSize2); List<GeneticGate> listOfNot = new ArrayList<>(); for(ModuleDefinition md : gate.getRootModuleDefinitions()) { GateIdentifier gateType = new GateIdentifier(gate, md); listOfNot.add(gateType.getIdentifiedGate()); } GateGeneration gateGen = new GateGeneration(); List<GeneticGate> wiredGateList = gateGen.generateWiredORGates(listOfNot); Assert.assertTrue(1 == wiredGateList.size()); Assert.assertTrue(wiredGateList.get(0) instanceof WiredORGate); GeneticGate g1 = wiredGateList.get(0); Assert.assertTrue(g1 instanceof WiredORGate); List<ComponentDefinition> actualSignals = new ArrayList<>(); actualSignals.addAll(g1.getListOfInputsAsComponentDefinition()); actualSignals.addAll(g1.getListOfOutputsAsComponentDefinition()); URI psraSignal = URI.create("https://synbiohub.programmingbiology.org/public/Eco1C1G1T1/PsrA_protein/1"); for(ComponentDefinition cd : actualSignals) { Assert.assertTrue(cd.getIdentity().equals(psraSignal)); } } @Test public void Test4_WiredOR() throws SBOLValidationException, IOException, SBOLConversionException, GateGenerationExeception, SBOLException { SBOLDocument gate = SBOLUtility.getSBOLUtility().createSBOLDocument(); gate.read(TestingFiles.NOT_LibSize2); gate.read(TestingFiles.yfp1NOT_LibSize1); List<GeneticGate> listOfNot = new ArrayList<>(); for(ModuleDefinition md : gate.getRootModuleDefinitions()) { GateIdentifier gateType = new GateIdentifier(gate, md); listOfNot.add(gateType.getIdentifiedGate()); } GateGeneration gateGen = new GateGeneration(); List<GeneticGate> wiredGateList = gateGen.generateWiredORGates(listOfNot); Assert.assertTrue(2 == wiredGateList.size()); GeneticGate g1 = wiredGateList.get(0); GeneticGate g2 = wiredGateList.get(1); Assert.assertTrue(g1 instanceof WiredORGate); Assert.assertTrue(g2 instanceof WiredORGate); List<ComponentDefinition> actualSignals = new ArrayList<>(); actualSignals.addAll(g1.getListOfInputsAsComponentDefinition()); actualSignals.addAll(g1.getListOfOutputsAsComponentDefinition()); actualSignals.addAll(g2.getListOfOutputsAsComponentDefinition()); actualSignals.addAll(g2.getListOfOutputsAsComponentDefinition()); URI psraSignal = URI.create("https://synbiohub.programmingbiology.org/public/Eco1C1G1T1/PsrA_protein/1"); URI yfpSignal = URI.create("https://synbiohub.programmingbiology.org/public/Eco1C1G1T1/YFP_protein/1"); for(ComponentDefinition cd : actualSignals) { Assert.assertTrue(cd.getIdentity().equals(psraSignal) || cd.getIdentity().equals(yfpSignal)); } } }
/* * Copyright (C) 2012-2015 DataStax Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.driver.core; import java.nio.ByteBuffer; import java.util.List; import static java.util.concurrent.TimeUnit.NANOSECONDS; import com.google.common.annotations.Beta; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A configurable {@link LatencyTracker} that logs all executed statements. * <p> * Typically, client applications would instantiate one single query logger (using its {@link Builder}), * configure it and register it on the relevant {@link Cluster} instance, e.g.: * * <pre> * Cluster cluster = ... * QueryLogger queryLogger = QueryLogger.builder(cluster) * .withConstantThreshold(...) * .withMaxQueryStringLength(...) * .build(); * cluster.register(queryLogger); * </pre> * * Refer to the {@link Builder} documentation for more information on * configuration settings for the query logger. * <p> * Once registered, the query logger will log every {@link RegularStatement}, {@link BoundStatement} or {@link BatchStatement} * executed by the driver; * note that it will never log other types of statement, null statements nor any special statement used internally by the driver. * <p> * There is one log for each request to a Cassandra node; because the driver sometimes retries the same statement on multiple nodes, * a single statement execution (for example, a single call to {@link Session#execute(Statement)}) can produce multiple logs on * different nodes. * <p> * For more flexibility, the query logger uses 3 different {@link Logger} instances: * * <ol> * <li>{@link #NORMAL_LOGGER}: used to log normal queries, i.e., queries that completed successfully * within a configurable threshold in milliseconds.</li> * <li>{@link #SLOW_LOGGER}: used to log slow queries, i.e., queries that completed successfully * but that took longer than a configurable threshold in milliseconds to complete.</li> * <li>{@link #ERROR_LOGGER}: used to log unsuccessful queries, i.e., * queries that did not completed normally and threw an exception. * Note this this logger will also print the full stack trace of the reported exception.</li> * </ol> * * <p> * The appropriate logger is chosen according to the following algorithm: * <ol> * <li>if an exception has been thrown: use {@link #ERROR_LOGGER};</li> * <li>otherwise, if the reported latency is greater than the configured threshold in milliseconds: use {@link #SLOW_LOGGER};</li> * <li>otherwise, use {@link #NORMAL_LOGGER}.</li> * </ol> * * <p> * All loggers are activated by setting their levels to {@code DEBUG} or {@code TRACE} (including {@link #ERROR_LOGGER}). * If the level is set to {@code TRACE} and the statement being logged is a {@link BoundStatement}, * then the query parameters (if any) will be logged as well (names and actual values). * * <p> * <strong>Constant thresholds vs. Dynamic thresholds</strong> * <p> * Currently the QueryLogger can track slow queries in two different ways: * using a constant threshold in milliseconds (which is the default behavior), * or using a dynamic threshold based on per-host percentiles computed by * {@link PerHostPercentileTracker}. * <p> * <b>Note that the dynamic threshold version is currently provided as a beta preview: it hasn't been extensively * tested yet, and the API is still subject to change.</b> To use it, you must first obtain and register * an instance of {@link PerHostPercentileTracker}, then create your QueryLogger as follows: * * <pre> * Cluster cluster = ... * // create an instance of PerHostPercentileTracker and register it * PerHostPercentileTracker tracker = ...; * cluster.register(tracker); * // create an instance of QueryLogger and register it * QueryLogger queryLogger = QueryLogger.builder(cluster) * .withDynamicThreshold(tracker, ...) * .withMaxQueryStringLength(...) * .build(); * cluster.register(queryLogger); * </pre> * <p> * This class is thread-safe. * * @since 2.0.10 */ public abstract class QueryLogger implements LatencyTracker { /** * The default latency threshold in milliseconds beyond which queries are considered 'slow' * and logged as such by the driver. */ public static final long DEFAULT_SLOW_QUERY_THRESHOLD_MS = 5000; /** * The default latency percentile beyond which queries are considered 'slow' * and logged as such by the driver. */ public static final double DEFAULT_SLOW_QUERY_THRESHOLD_PERCENTILE = 99.0; /** * The default maximum length of a CQL query string that can be logged verbatim * by the driver. Query strings longer than this value will be truncated * when logged. */ public static final int DEFAULT_MAX_QUERY_STRING_LENGTH = 500; /** * The default maximum length of a query parameter value that can be logged verbatim * by the driver. Parameter values longer than this value will be truncated * when logged. */ public static final int DEFAULT_MAX_PARAMETER_VALUE_LENGTH = 50; /** * The default maximum number of query parameters that can be logged * by the driver. Queries with a number of parameters higher than this value * will not have all their parameters logged. */ public static final int DEFAULT_MAX_LOGGED_PARAMETERS = 50; // Loggers /** * The logger used to log normal queries, i.e., queries that completed successfully * within a configurable threshold in milliseconds. * <p> * This logger is activated by setting its level to {@code DEBUG} or {@code TRACE}. * Additionally, if the level is set to {@code TRACE} and the statement being logged is a {@link BoundStatement}, * then the query parameters (if any) will be logged as well (names and actual values). * <p> * The name of this logger is {@code com.datastax.driver.core.QueryLogger.NORMAL}. */ public static final Logger NORMAL_LOGGER = LoggerFactory.getLogger("com.datastax.driver.core.QueryLogger.NORMAL"); /** * The logger used to log slow queries, i.e., queries that completed successfully * but whose execution time exceeded a configurable threshold in milliseconds. * <p> * This logger is activated by setting its level to {@code DEBUG} or {@code TRACE}. * Additionally, if the level is set to {@code TRACE} and the statement being logged is a {@link BoundStatement}, * then the query parameters (if any) will be logged as well (names and actual values). * <p> * The name of this logger is {@code com.datastax.driver.core.QueryLogger.SLOW}. */ public static final Logger SLOW_LOGGER = LoggerFactory.getLogger("com.datastax.driver.core.QueryLogger.SLOW"); /** * The logger used to log unsuccessful queries, i.e., queries that did not completed normally and threw an exception. * <p> * This logger is activated by setting its level to {@code DEBUG} or {@code TRACE}. * Additionally, if the level is set to {@code TRACE} and the statement being logged is a {@link BoundStatement}, * then the query parameters (if any) will be logged as well (names and actual values). * Note this this logger will also print the full stack trace of the reported exception. * <p> * The name of this logger is {@code com.datastax.driver.core.QueryLogger.ERROR}. */ public static final Logger ERROR_LOGGER = LoggerFactory.getLogger("com.datastax.driver.core.QueryLogger.ERROR"); // Message templates private static final String NORMAL_TEMPLATE = "[%s] [%s] Query completed normally, took %s ms: %s"; private static final String SLOW_TEMPLATE_MILLIS = "[%s] [%s] Query too slow, took %s ms: %s"; private static final String SLOW_TEMPLATE_PERCENTILE = "[%s] [%s] Query too slow, took %s ms (%s percentile = %s ms): %s"; private static final String ERROR_TEMPLATE = "[%s] [%s] Query error after %s ms: %s"; @VisibleForTesting static final String TRUNCATED_OUTPUT = "... [truncated output]"; @VisibleForTesting static final String FURTHER_PARAMS_OMITTED = " [further parameters omitted]"; protected final Cluster cluster; private volatile ProtocolVersion protocolVersion; protected volatile int maxQueryStringLength; protected volatile int maxParameterValueLength; protected volatile int maxLoggedParameters; /** * Private constructor. Instances of QueryLogger should be obtained via the {@link #builder(Cluster)} method. */ private QueryLogger(Cluster cluster, int maxQueryStringLength, int maxParameterValueLength, int maxLoggedParameters) { this.cluster = cluster; this.maxQueryStringLength = maxQueryStringLength; this.maxParameterValueLength = maxParameterValueLength; this.maxLoggedParameters = maxLoggedParameters; } /** * Creates a new {@link QueryLogger.Builder} instance. * <p> * This is a convenience method for {@code new QueryLogger.Builder()}. * * @param cluster the {@link Cluster} this QueryLogger will be attached to. * @return the new QueryLogger builder. * @throws NullPointerException if {@code cluster} is {@code null}. */ public static QueryLogger.Builder builder(Cluster cluster) { if(cluster == null) throw new NullPointerException("QueryLogger.Builder: cluster parameter cannot be null"); return new QueryLogger.Builder(cluster); } /** * A QueryLogger that uses a constant threshold in milliseconds * to track slow queries. * This implementation is the default and should be preferred to {@link DynamicThresholdQueryLogger} * which is still in beta state. */ public static class ConstantThresholdQueryLogger extends QueryLogger { private volatile long slowQueryLatencyThresholdMillis; private ConstantThresholdQueryLogger(Cluster cluster, int maxQueryStringLength, int maxParameterValueLength, int maxLoggedParameters, long slowQueryLatencyThresholdMillis) { super(cluster, maxQueryStringLength, maxParameterValueLength, maxLoggedParameters); this.setSlowQueryLatencyThresholdMillis(slowQueryLatencyThresholdMillis); } /** * Return the threshold in milliseconds beyond which queries are considered 'slow' * and logged as such by the driver. * The default value is {@link #DEFAULT_SLOW_QUERY_THRESHOLD_MS}. * * @return The threshold in milliseconds beyond which queries are considered 'slow' * and logged as such by the driver. */ public long getSlowQueryLatencyThresholdMillis() { return slowQueryLatencyThresholdMillis; } /** * Set the threshold in milliseconds beyond which queries are considered 'slow' * and logged as such by the driver. * * @param slowQueryLatencyThresholdMillis Slow queries threshold in milliseconds. * It must be strictly positive. * @throws IllegalArgumentException if {@code slowQueryLatencyThresholdMillis <= 0}. */ public void setSlowQueryLatencyThresholdMillis(long slowQueryLatencyThresholdMillis) { if (slowQueryLatencyThresholdMillis <= 0) throw new IllegalArgumentException("Invalid slowQueryLatencyThresholdMillis, should be > 0, got " + slowQueryLatencyThresholdMillis); this.slowQueryLatencyThresholdMillis = slowQueryLatencyThresholdMillis; } @Override protected void maybeLogNormalOrSlowQuery(Host host, Statement statement, long latencyMs) { if (latencyMs > slowQueryLatencyThresholdMillis) { maybeLogSlowQuery(host, statement, latencyMs); } else { maybeLogNormalQuery(host, statement, latencyMs); } } protected void maybeLogSlowQuery(Host host, Statement statement, long latencyMs) { if (SLOW_LOGGER.isDebugEnabled()) { String message = String.format(SLOW_TEMPLATE_MILLIS, cluster.getClusterName(), host, latencyMs, statementAsString(statement)); logQuery(statement, null, SLOW_LOGGER, message); } } } /** * A QueryLogger that uses a dynamic threshold in milliseconds * to track slow queries. * <p> * Dynamic thresholds are based on per-host latency percentiles, as computed * by {@link PerHostPercentileTracker}. * <p> * <b>This class is currently provided as a beta preview: it hasn't been extensively tested yet, and the API is still subject * to change.</b> */ @Beta public static class DynamicThresholdQueryLogger extends QueryLogger { private volatile double slowQueryLatencyThresholdPercentile; private volatile PerHostPercentileTracker perHostPercentileLatencyTracker; private DynamicThresholdQueryLogger(Cluster cluster, int maxQueryStringLength, int maxParameterValueLength, int maxLoggedParameters, double slowQueryLatencyThresholdPercentile, PerHostPercentileTracker perHostPercentileLatencyTracker) { super(cluster, maxQueryStringLength, maxParameterValueLength, maxLoggedParameters); this.setSlowQueryLatencyThresholdPercentile(slowQueryLatencyThresholdPercentile); this.setPerHostPercentileLatencyTracker(perHostPercentileLatencyTracker); } /** * Return the {@link PerHostPercentileTracker} instance to use for recording per-host latency histograms. * Cannot be {@code null}. * * @return the {@link PerHostPercentileTracker} instance to use. */ public PerHostPercentileTracker getPerHostPercentileLatencyTracker() { return perHostPercentileLatencyTracker; } /** * Set the {@link PerHostPercentileTracker} instance to use for recording per-host latency histograms. * Cannot be {@code null}. * * @param perHostPercentileLatencyTracker the {@link PerHostPercentileTracker} instance to use. * @throws IllegalArgumentException if {@code perHostPercentileLatencyTracker == null}. */ public void setPerHostPercentileLatencyTracker(PerHostPercentileTracker perHostPercentileLatencyTracker) { if (perHostPercentileLatencyTracker == null) throw new IllegalArgumentException("perHostPercentileLatencyTracker cannot be null"); this.perHostPercentileLatencyTracker = perHostPercentileLatencyTracker; } /** * Return the threshold percentile beyond which queries are considered 'slow' * and logged as such by the driver. * The default value is {@link #DEFAULT_SLOW_QUERY_THRESHOLD_PERCENTILE}. * * @return threshold percentile beyond which queries are considered 'slow' * and logged as such by the driver. */ public double getSlowQueryLatencyThresholdPercentile() { return slowQueryLatencyThresholdPercentile; } /** * Set the threshold percentile beyond which queries are considered 'slow' * and logged as such by the driver. * * @param slowQueryLatencyThresholdPercentile Slow queries threshold percentile. * It must be comprised between 0 inclusive and 100 exclusive. * @throws IllegalArgumentException if {@code slowQueryLatencyThresholdPercentile < 0 || slowQueryLatencyThresholdPercentile >= 100}. */ public void setSlowQueryLatencyThresholdPercentile(double slowQueryLatencyThresholdPercentile) { if (slowQueryLatencyThresholdPercentile < 0.0 || slowQueryLatencyThresholdPercentile >= 100.0) throw new IllegalArgumentException("Invalid slowQueryLatencyThresholdPercentile, should be >= 0 and < 100, got " + slowQueryLatencyThresholdPercentile); this.slowQueryLatencyThresholdPercentile = slowQueryLatencyThresholdPercentile; } @Override protected void maybeLogNormalOrSlowQuery(Host host, Statement statement, long latencyMs) { long threshold = perHostPercentileLatencyTracker.getLatencyAtPercentile(host, slowQueryLatencyThresholdPercentile); if (threshold >= 0 && latencyMs > threshold) { maybeLogSlowQuery(host, statement, latencyMs, threshold); } else { maybeLogNormalQuery(host, statement, latencyMs); } } protected void maybeLogSlowQuery(Host host, Statement statement, long latencyMs, long threshold) { if (SLOW_LOGGER.isDebugEnabled()) { String message = String.format(SLOW_TEMPLATE_PERCENTILE, cluster.getClusterName(), host, latencyMs, slowQueryLatencyThresholdPercentile, threshold, statementAsString(statement)); logQuery(statement, null, SLOW_LOGGER, message); } } } /** * Helper class to build {@link QueryLogger} instances with a fluent API. */ public static class Builder { private final Cluster cluster; private int maxQueryStringLength = DEFAULT_MAX_QUERY_STRING_LENGTH; private int maxParameterValueLength = DEFAULT_MAX_PARAMETER_VALUE_LENGTH; private int maxLoggedParameters = DEFAULT_MAX_LOGGED_PARAMETERS; private long slowQueryLatencyThresholdMillis = DEFAULT_SLOW_QUERY_THRESHOLD_MS; private double slowQueryLatencyThresholdPercentile = DEFAULT_SLOW_QUERY_THRESHOLD_PERCENTILE; private PerHostPercentileTracker perHostPercentileLatencyTracker; private boolean constantThreshold = true; public Builder(Cluster cluster) { this.cluster = cluster; } /** * Enables slow query latency tracking based on constant thresholds. * <p> * Note: You should either use {@link #withConstantThreshold(long) constant thresholds} * or {@link #withDynamicThreshold(PerHostPercentileTracker, double) dynamic thresholds}, * not both. * * @param slowQueryLatencyThresholdMillis The threshold in milliseconds beyond which queries are considered 'slow' * and logged as such by the driver. * The default value is {@link #DEFAULT_SLOW_QUERY_THRESHOLD_MS} * @return this {@link Builder} instance (for method chaining). */ public Builder withConstantThreshold(long slowQueryLatencyThresholdMillis) { this.slowQueryLatencyThresholdMillis = slowQueryLatencyThresholdMillis; constantThreshold = true; return this; } /** * Enables slow query latency tracking based on dynamic thresholds. * <p> * Dynamic thresholds are based on per-host latency percentiles, as computed * by {@link PerHostPercentileTracker}. * <p> * Note: You should either use {@link #withConstantThreshold(long) constant thresholds} * or {@link #withDynamicThreshold(PerHostPercentileTracker, double) dynamic thresholds}, * not both. * <p> * <b>This feature is currently provided as a beta preview: it hasn't been extensively tested yet, and the API is still subject * to change.</b> * * @param perHostPercentileLatencyTracker the {@link PerHostPercentileTracker} instance to use for recording per-host latency histograms. * Cannot be {@code null}. * @param slowQueryLatencyThresholdPercentile Slow queries threshold percentile. * It must be comprised between 0 inclusive and 100 exclusive. * The default value is {@link #DEFAULT_SLOW_QUERY_THRESHOLD_PERCENTILE} * @return this {@link Builder} instance (for method chaining). */ @Beta public Builder withDynamicThreshold(PerHostPercentileTracker perHostPercentileLatencyTracker, double slowQueryLatencyThresholdPercentile) { this.perHostPercentileLatencyTracker = perHostPercentileLatencyTracker; this.slowQueryLatencyThresholdPercentile = slowQueryLatencyThresholdPercentile; constantThreshold = false; return this; } /** * Set the maximum length of a CQL query string that can be logged verbatim * by the driver. Query strings longer than this value will be truncated * when logged. * * @param maxQueryStringLength The maximum length of a CQL query string * that can be logged verbatim by the driver. * It must be strictly positive or {@code -1}, * in which case the query is never truncated * (use with care). * The default value is {@link #DEFAULT_MAX_QUERY_STRING_LENGTH}. * @return this {@link Builder} instance (for method chaining). */ public Builder withMaxQueryStringLength(int maxQueryStringLength) { this.maxQueryStringLength = maxQueryStringLength; return this; } /** * Set the maximum length of a query parameter value that can be logged verbatim * by the driver. Parameter values longer than this value will be truncated * when logged. * * @param maxParameterValueLength The maximum length of a query parameter value * that can be logged verbatim by the driver. * It must be strictly positive or {@code -1}, * in which case the parameter value is never truncated * (use with care). * The default value is {@link #DEFAULT_MAX_PARAMETER_VALUE_LENGTH}. * @return this {@link Builder} instance (for method chaining). */ public Builder withMaxParameterValueLength(int maxParameterValueLength) { this.maxParameterValueLength = maxParameterValueLength; return this; } /** * Set the maximum number of query parameters that can be logged * by the driver. Queries with a number of parameters higher than this value * will not have all their parameters logged. * * @param maxLoggedParameters The maximum number of query parameters that can be logged * by the driver. It must be strictly positive or {@code -1}, * in which case all parameters will be logged, regardless of their number * (use with care). * The default value is {@link #DEFAULT_MAX_LOGGED_PARAMETERS}. * @return this {@link Builder} instance (for method chaining). */ public Builder withMaxLoggedParameters(int maxLoggedParameters) { this.maxLoggedParameters = maxLoggedParameters; return this; } /** * Build the {@link QueryLogger} instance. * @return the {@link QueryLogger} instance. * @throws IllegalArgumentException if the builder is unable to build a valid instance due to incorrect settings. */ public QueryLogger build() { if(constantThreshold) { return new ConstantThresholdQueryLogger(cluster, maxQueryStringLength, maxParameterValueLength, maxLoggedParameters, slowQueryLatencyThresholdMillis); } else { return new DynamicThresholdQueryLogger(cluster, maxQueryStringLength, maxParameterValueLength, maxLoggedParameters, slowQueryLatencyThresholdPercentile, perHostPercentileLatencyTracker); } } } // Getters and Setters /** * Return the maximum length of a CQL query string that can be logged verbatim * by the driver. Query strings longer than this value will be truncated * when logged. * The default value is {@link #DEFAULT_MAX_QUERY_STRING_LENGTH}. * * @return The maximum length of a CQL query string that can be logged verbatim * by the driver. */ public int getMaxQueryStringLength() { return maxQueryStringLength; } /** * Set the maximum length of a CQL query string that can be logged verbatim * by the driver. Query strings longer than this value will be truncated * when logged. * * @param maxQueryStringLength The maximum length of a CQL query string * that can be logged verbatim by the driver. * It must be strictly positive or {@code -1}, * in which case the query is never truncated * (use with care). * @throws IllegalArgumentException if {@code maxQueryStringLength <= 0 && maxQueryStringLength != -1}. */ public void setMaxQueryStringLength(int maxQueryStringLength) { if (maxQueryStringLength <= 0 && maxQueryStringLength != -1) throw new IllegalArgumentException("Invalid maxQueryStringLength, should be > 0 or -1, got " + maxQueryStringLength); this.maxQueryStringLength = maxQueryStringLength; } /** * Return the maximum length of a query parameter value that can be logged verbatim * by the driver. Parameter values longer than this value will be truncated * when logged. * The default value is {@link #DEFAULT_MAX_PARAMETER_VALUE_LENGTH}. * * @return The maximum length of a query parameter value that can be logged verbatim * by the driver. */ public int getMaxParameterValueLength() { return maxParameterValueLength; } /** * Set the maximum length of a query parameter value that can be logged verbatim * by the driver. Parameter values longer than this value will be truncated * when logged. * * @param maxParameterValueLength The maximum length of a query parameter value * that can be logged verbatim by the driver. * It must be strictly positive or {@code -1}, * in which case the parameter value is never truncated * (use with care). * @throws IllegalArgumentException if {@code maxParameterValueLength <= 0 && maxParameterValueLength != -1}. */ public void setMaxParameterValueLength(int maxParameterValueLength) { if (maxParameterValueLength <= 0 && maxParameterValueLength != -1) throw new IllegalArgumentException("Invalid maxParameterValueLength, should be > 0 or -1, got " + maxParameterValueLength); this.maxParameterValueLength = maxParameterValueLength; } /** * Return the maximum number of query parameters that can be logged * by the driver. Queries with a number of parameters higher than this value * will not have all their parameters logged. * The default value is {@link #DEFAULT_MAX_LOGGED_PARAMETERS}. * * @return The maximum number of query parameters that can be logged * by the driver. */ public int getMaxLoggedParameters() { return maxLoggedParameters; } /** * Set the maximum number of query parameters that can be logged * by the driver. Queries with a number of parameters higher than this value * will not have all their parameters logged. * * @param maxLoggedParameters the maximum number of query parameters that can be logged * by the driver. It must be strictly positive or {@code -1}, * in which case all parameters will be logged, regardless of their number * (use with care). * @throws IllegalArgumentException if {@code maxLoggedParameters <= 0 && maxLoggedParameters != -1}. */ public void setMaxLoggedParameters(int maxLoggedParameters) { if (maxLoggedParameters <= 0 && maxLoggedParameters != -1) throw new IllegalArgumentException("Invalid maxLoggedParameters, should be > 0 or -1, got " + maxLoggedParameters); this.maxLoggedParameters = maxLoggedParameters; } /** * {@inheritDoc} */ @Override public void update(Host host, Statement statement, Exception exception, long newLatencyNanos) { long latencyMs = NANOSECONDS.toMillis(newLatencyNanos); if (exception == null) { maybeLogNormalOrSlowQuery(host, statement, latencyMs); } else { maybeLogErrorQuery(host, statement, exception, latencyMs); } } protected abstract void maybeLogNormalOrSlowQuery(Host host, Statement statement, long latencyMs); protected void maybeLogNormalQuery(Host host, Statement statement, long latencyMs) { if (NORMAL_LOGGER.isDebugEnabled()) { String message = String.format(NORMAL_TEMPLATE, cluster.getClusterName(), host, latencyMs, statementAsString(statement)); logQuery(statement, null, NORMAL_LOGGER, message); } } protected void maybeLogErrorQuery(Host host, Statement statement, Exception exception, long latencyMs) { if (ERROR_LOGGER.isDebugEnabled()) { String message = String.format(ERROR_TEMPLATE, cluster.getClusterName(), host, latencyMs, statementAsString(statement)); logQuery(statement, exception, ERROR_LOGGER, message); } } protected void logQuery(Statement statement, Exception exception, Logger logger, String message) { boolean showParameterValues = logger.isTraceEnabled(); if (showParameterValues) { StringBuilder params = new StringBuilder(); if (statement instanceof BoundStatement) { appendParameters((BoundStatement)statement, params, maxLoggedParameters); } else if (statement instanceof BatchStatement) { BatchStatement batchStatement = (BatchStatement)statement; int remaining = maxLoggedParameters; for (Statement inner : batchStatement.getStatements()) { if (inner instanceof BoundStatement) { remaining = appendParameters((BoundStatement)inner, params, remaining); } } } if (params.length() > 0) params.append("]"); logger.trace(message + params, exception); } else { logger.debug(message, exception); } } protected String statementAsString(Statement statement) { StringBuilder sb = new StringBuilder(); if (statement instanceof BatchStatement) { BatchStatement bs = (BatchStatement)statement; int statements = bs.getStatements().size(); int boundValues = countBoundValues(bs); sb.append("[" + statements + " statements, " + boundValues + " bound values] "); } else if (statement instanceof BoundStatement) { int boundValues = ((BoundStatement)statement).wrapper.values.length; sb.append("[" + boundValues + " bound values] "); } append(statement, sb, maxQueryStringLength); return sb.toString(); } protected int countBoundValues(BatchStatement bs) { int count = 0; for (Statement s : bs.getStatements()) { if (s instanceof BoundStatement) count += ((BoundStatement)s).wrapper.values.length; } return count; } protected int appendParameters(BoundStatement statement, StringBuilder buffer, int remaining) { if (remaining == 0) return 0; ColumnDefinitions metadata = statement.preparedStatement().getVariables(); int numberOfParameters = metadata.size(); if (numberOfParameters > 0) { List<ColumnDefinitions.Definition> definitions = metadata.asList(); int numberOfLoggedParameters; if (remaining == -1) { numberOfLoggedParameters = numberOfParameters; } else { numberOfLoggedParameters = remaining > numberOfParameters ? numberOfParameters : remaining; remaining -= numberOfLoggedParameters; } for (int i = 0; i < numberOfLoggedParameters; i++) { if (buffer.length() == 0) buffer.append(" ["); else buffer.append(", "); buffer.append(String.format("%s:%s", metadata.getName(i), parameterValueAsString(definitions.get(i), statement.wrapper.values[i]))); } if (numberOfLoggedParameters < numberOfParameters) { buffer.append(FURTHER_PARAMS_OMITTED); } } return remaining; } protected String parameterValueAsString(ColumnDefinitions.Definition definition, ByteBuffer raw) { String valueStr; if (raw == null || raw.remaining() == 0) { valueStr = "NULL"; } else { DataType type = definition.getType(); int maxParameterValueLength = this.maxParameterValueLength; if (type.equals(DataType.blob()) && maxParameterValueLength != -1) { // prevent large blobs from being converted to strings int maxBufferLength = Math.max(2, (maxParameterValueLength - 2) / 2); boolean bufferTooLarge = raw.remaining() > maxBufferLength; if (bufferTooLarge) { raw = (ByteBuffer)raw.duplicate().limit(maxBufferLength); } Object value = type.deserialize(raw, protocolVersion()); valueStr = type.format(value); if (bufferTooLarge) { valueStr = valueStr + TRUNCATED_OUTPUT; } } else { Object value = type.deserialize(raw, protocolVersion()); valueStr = type.format(value); if (maxParameterValueLength != -1 && valueStr.length() > maxParameterValueLength) { valueStr = valueStr.substring(0, maxParameterValueLength) + TRUNCATED_OUTPUT; } } } return valueStr; } private ProtocolVersion protocolVersion() { // Since the QueryLogger can be registered before the Cluster was initialized, we can't retrieve // it at construction time. Cache it field at first use (a volatile field is good enough since we // don't need mutual exclusion). if (protocolVersion == null) { protocolVersion = cluster.getConfiguration().getProtocolOptions().getProtocolVersionEnum(); // At least one connection was established when QueryLogger is invoked assert protocolVersion != null : "protocol version should be defined"; } return protocolVersion; } protected int append(Statement statement, StringBuilder buffer, int remaining) { if (statement instanceof StatementWrapper) statement = ((StatementWrapper)statement).getWrappedStatement(); if (statement instanceof RegularStatement) { remaining = append(((RegularStatement)statement).getQueryString().trim(), buffer, remaining); } else if (statement instanceof BoundStatement) { remaining = append(((BoundStatement)statement).preparedStatement().getQueryString().trim(), buffer, remaining); } else if (statement instanceof BatchStatement) { BatchStatement batchStatement = (BatchStatement)statement; remaining = append("BEGIN", buffer, remaining); switch (batchStatement.batchType) { case UNLOGGED: append(" UNLOGGED", buffer, remaining); break; case COUNTER: append(" COUNTER", buffer, remaining); break; } remaining = append(" BATCH", buffer, remaining); for (Statement stmt : batchStatement.getStatements()) { remaining = append(" ", buffer, remaining); remaining = append(stmt, buffer, remaining); } remaining = append(" APPLY BATCH", buffer, remaining); } else { // Unknown types of statement // Call toString() as a last resort remaining = append(statement.toString(), buffer, remaining); } if (buffer.charAt(buffer.length() - 1) != ';') { remaining = append(";", buffer, remaining); } return remaining; } protected int append(CharSequence str, StringBuilder buffer, int remaining) { if (remaining == -2) { // capacity exceeded } else if (remaining == -1) { // unlimited capacity buffer.append(str); } else if (str.length() > remaining) { buffer.append(str, 0, remaining).append(TRUNCATED_OUTPUT); remaining = -2; } else { buffer.append(str); remaining -= str.length(); } return remaining; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; /** * SimpleQueryStringParser is a query parser that acts similar to a query_string * query, but won't throw exceptions for any weird string syntax. It supports * the following: * <p/> * <ul> * <li>'{@code +}' specifies {@code AND} operation: <tt>token1+token2</tt> * <li>'{@code |}' specifies {@code OR} operation: <tt>token1|token2</tt> * <li>'{@code -}' negates a single token: <tt>-token0</tt> * <li>'{@code "}' creates phrases of terms: <tt>"term1 term2 ..."</tt> * <li>'{@code *}' at the end of terms specifies prefix query: <tt>term*</tt> * <li>'{@code (}' and '{@code)}' specifies precedence: <tt>token1 + (token2 | token3)</tt> * <li>'{@code ~}N' at the end of terms specifies fuzzy query: <tt>term~1</tt> * <li>'{@code ~}N' at the end of phrases specifies near/slop query: <tt>"term1 term2"~5</tt> * </ul> * <p/> * See: {@link XSimpleQueryParser} for more information. * <p/> * This query supports these options: * <p/> * Required: * {@code query} - query text to be converted into other queries * <p/> * Optional: * {@code analyzer} - anaylzer to be used for analyzing tokens to determine * which kind of query they should be converted into, defaults to "standard" * {@code default_operator} - default operator for boolean queries, defaults * to OR * {@code fields} - fields to search, defaults to _all if not set, allows * boosting a field with ^n */ public class SimpleQueryStringParser implements QueryParser { public static final String NAME = "simple_query_string"; @Inject public SimpleQueryStringParser(Settings settings) { } @Override public String[] names() { return new String[]{NAME, Strings.toCamelCase(NAME)}; } @Override public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); String currentFieldName = null; String queryBody = null; String queryName = null; String field = null; String minimumShouldMatch = null; Map<String, Float> fieldsAndWeights = null; BooleanClause.Occur defaultOperator = null; Analyzer analyzer = null; int flags = -1; SimpleQueryParser.Settings sqsSettings = new SimpleQueryParser.Settings(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { if ("fields".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String fField = null; float fBoost = 1; char[] text = parser.textCharacters(); int end = parser.textOffset() + parser.textLength(); for (int i = parser.textOffset(); i < end; i++) { if (text[i] == '^') { int relativeLocation = i - parser.textOffset(); fField = new String(text, parser.textOffset(), relativeLocation); fBoost = Float.parseFloat(new String(text, i + 1, parser.textLength() - relativeLocation - 1)); break; } } if (fField == null) { fField = parser.text(); } if (fieldsAndWeights == null) { fieldsAndWeights = new HashMap<>(); } if (Regex.isSimpleMatchPattern(fField)) { for (String fieldName : parseContext.mapperService().simpleMatchToIndexNames(fField)) { fieldsAndWeights.put(fieldName, fBoost); } } else { MapperService.SmartNameFieldMappers mappers = parseContext.smartFieldMappers(fField); if (mappers != null && mappers.hasMapper()) { fieldsAndWeights.put(mappers.mapper().names().indexName(), fBoost); } else { fieldsAndWeights.put(fField, fBoost); } } } } else { throw new QueryParsingException(parseContext.index(), "[" + NAME + "] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("query".equals(currentFieldName)) { queryBody = parser.text(); } else if ("analyzer".equals(currentFieldName)) { analyzer = parseContext.analysisService().analyzer(parser.text()); if (analyzer == null) { throw new QueryParsingException(parseContext.index(), "[" + NAME + "] analyzer [" + parser.text() + "] not found"); } } else if ("field".equals(currentFieldName)) { field = parser.text(); } else if ("default_operator".equals(currentFieldName) || "defaultOperator".equals(currentFieldName)) { String op = parser.text(); if ("or".equalsIgnoreCase(op)) { defaultOperator = BooleanClause.Occur.SHOULD; } else if ("and".equalsIgnoreCase(op)) { defaultOperator = BooleanClause.Occur.MUST; } else { throw new QueryParsingException(parseContext.index(), "[" + NAME + "] default operator [" + op + "] is not allowed"); } } else if ("flags".equals(currentFieldName)) { if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { // Possible options are: // ALL, NONE, AND, OR, PREFIX, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE, FUZZY, NEAR, SLOP flags = SimpleQueryStringFlag.resolveFlags(parser.text()); } else { flags = parser.intValue(); if (flags < 0) { flags = SimpleQueryStringFlag.ALL.value(); } } } else if ("locale".equals(currentFieldName)) { String localeStr = parser.text(); Locale locale = LocaleUtils.parse(localeStr); sqsSettings.locale(locale); } else if ("lowercase_expanded_terms".equals(currentFieldName)) { sqsSettings.lowercaseExpandedTerms(parser.booleanValue()); } else if ("lenient".equals(currentFieldName)) { sqsSettings.lenient(parser.booleanValue()); } else if ("analyze_wildcard".equals(currentFieldName)) { sqsSettings.analyzeWildcard(parser.booleanValue()); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else if ("minimum_should_match".equals(currentFieldName)) { minimumShouldMatch = parser.textOrNull(); } else { throw new QueryParsingException(parseContext.index(), "[" + NAME + "] unsupported field [" + parser.currentName() + "]"); } } } // Query text is required if (queryBody == null) { throw new QueryParsingException(parseContext.index(), "[" + NAME + "] query text missing"); } // Support specifying only a field instead of a map if (field == null) { field = currentFieldName; } // Use the default field (_all) if no fields specified if (fieldsAndWeights == null) { field = parseContext.defaultField(); } // Use standard analyzer by default if (analyzer == null) { analyzer = parseContext.mapperService().searchAnalyzer(); } if (fieldsAndWeights == null) { fieldsAndWeights = Collections.singletonMap(field, 1.0F); } SimpleQueryParser sqp = new SimpleQueryParser(analyzer, fieldsAndWeights, flags, sqsSettings); if (defaultOperator != null) { sqp.setDefaultOperator(defaultOperator); } Query query = sqp.parse(queryBody); if (queryName != null) { parseContext.addNamedQuery(queryName, query); } if (minimumShouldMatch != null && query instanceof BooleanQuery) { Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); } return query; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.storage.adapter; import org.keycloak.common.util.MultivaluedHashMap; import org.keycloak.common.util.Time; import org.keycloak.models.ClientModel; import org.keycloak.models.GroupModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.RoleModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserModelDefaultMethods; import org.keycloak.models.utils.DefaultRoles; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.models.utils.RoleUtils; import org.keycloak.storage.ReadOnlyException; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class InMemoryUserAdapter extends UserModelDefaultMethods { private Long createdTimestamp = Time.currentTimeMillis(); private boolean emailVerified; private boolean enabled; private Set<String> roleIds = new HashSet<>(); private Set<String> groupIds = new HashSet<>(); private MultivaluedHashMap<String, String> attributes = new MultivaluedHashMap<>(); private Set<String> requiredActions = new HashSet<>(); private String federationLink; private String serviceAccountClientLink; private KeycloakSession session; private RealmModel realm; private String id; private boolean readonly; public InMemoryUserAdapter(KeycloakSession session, RealmModel realm, String id) { this.session = session; this.realm = realm; this.id = id; } @Override public String getUsername() { return getFirstAttribute(UserModel.USERNAME); } @Override public void setUsername(String username) { username = username==null ? null : username.toLowerCase(); setSingleAttribute(UserModel.USERNAME, username); } public void addDefaults() { DefaultRoles.addDefaultRoles(realm, this); for (GroupModel g : realm.getDefaultGroups()) { joinGroup(g); } } public void setReadonly(boolean flag) { readonly = flag; } protected void checkReadonly() { if (readonly) throw new ReadOnlyException("In memory user model is not writable"); } @Override public String getId() { return id; } @Override public Long getCreatedTimestamp() { return createdTimestamp; } @Override public void setCreatedTimestamp(Long timestamp) { checkReadonly(); this.createdTimestamp = timestamp; } @Override public boolean isEnabled() { return enabled; } @Override public void setEnabled(boolean enabled) { checkReadonly(); this.enabled = enabled; } @Override public void setSingleAttribute(String name, String value) { checkReadonly(); if (UserModel.USERNAME.equals(name) || UserModel.EMAIL.equals(name)) { value = KeycloakModelUtils.toLowerCaseSafe(value); } attributes.putSingle(name, value); } @Override public void setAttribute(String name, List<String> values) { checkReadonly(); if (UserModel.USERNAME.equals(name) || UserModel.EMAIL.equals(name)) { String lowerCasedFirstValue = KeycloakModelUtils.toLowerCaseSafe((values != null && values.size() > 0) ? values.get(0) : null); if (lowerCasedFirstValue != null) values.set(0, lowerCasedFirstValue); } attributes.put(name, values); } @Override public void removeAttribute(String name) { checkReadonly(); attributes.remove(name); } @Override public String getFirstAttribute(String name) { return attributes.getFirst(name); } @Override public List<String> getAttribute(String name) { List<String> value = attributes.get(name); if (value == null) { return new LinkedList<>(); } return value; } @Override public Map<String, List<String>> getAttributes() { return attributes; } @Override public Set<String> getRequiredActions() { return requiredActions; } @Override public void addRequiredAction(String action) { checkReadonly(); requiredActions.add(action); } @Override public void removeRequiredAction(String action) { checkReadonly(); requiredActions.remove(action); } @Override public void addRequiredAction(RequiredAction action) { checkReadonly(); requiredActions.add(action.name()); } @Override public void removeRequiredAction(RequiredAction action) { checkReadonly(); requiredActions.remove(action.name()); } @Override public boolean isEmailVerified() { return emailVerified; } @Override public void setEmailVerified(boolean verified) { checkReadonly(); this.emailVerified = verified; } @Override public Set<GroupModel> getGroups() { if (groupIds.isEmpty()) return new HashSet<>(); Set<GroupModel> groups = new HashSet<>(); for (String id : groupIds) { groups.add(realm.getGroupById(id)); } return groups; } @Override public void joinGroup(GroupModel group) { checkReadonly(); groupIds.add(group.getId()); } @Override public void leaveGroup(GroupModel group) { checkReadonly(); groupIds.remove(group.getId()); } @Override public boolean isMemberOf(GroupModel group) { if (groupIds == null) return false; if (groupIds.contains(group.getId())) return true; Set<GroupModel> groups = getGroups(); return RoleUtils.isMember(groups, group); } @Override public String getFederationLink() { return federationLink; } @Override public void setFederationLink(String link) { checkReadonly(); this.federationLink = link; } @Override public String getServiceAccountClientLink() { return serviceAccountClientLink; } @Override public void setServiceAccountClientLink(String clientInternalId) { checkReadonly(); this.serviceAccountClientLink = clientInternalId; } @Override public Set<RoleModel> getRealmRoleMappings() { Set<RoleModel> allRoles = getRoleMappings(); // Filter to retrieve just realm roles Set<RoleModel> realmRoles = new HashSet<>(); for (RoleModel role : allRoles) { if (role.getContainer() instanceof RealmModel) { realmRoles.add(role); } } return realmRoles; } @Override public Set<RoleModel> getClientRoleMappings(ClientModel app) { Set<RoleModel> result = new HashSet<>(); Set<RoleModel> roles = getRoleMappings(); for (RoleModel role : roles) { if (app.equals(role.getContainer())) { result.add(role); } } return result; } @Override public boolean hasRole(RoleModel role) { Set<RoleModel> roles = getRoleMappings(); return RoleUtils.hasRole(roles, role) || RoleUtils.hasRoleFromGroup(getGroups(), role, true); } @Override public void grantRole(RoleModel role) { roleIds.add(role.getId()); } @Override public Set<RoleModel> getRoleMappings() { if (roleIds.isEmpty()) return new HashSet<>(); Set<RoleModel> roles = new HashSet<>(); for (String id : roleIds) { roles.add(realm.getRoleById(id)); } return roles; } @Override public void deleteRoleMapping(RoleModel role) { roleIds.remove(role.getId()); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || !(o instanceof UserModel)) return false; UserModel that = (UserModel) o; return that.getId().equals(getId()); } @Override public int hashCode() { return getId().hashCode(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.user; import java.security.Principal; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.jcr.ImportUUIDBehavior; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.PropertyDefinition; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.jackrabbit.api.JackrabbitSession; import org.apache.jackrabbit.api.security.principal.PrincipalIterator; import org.apache.jackrabbit.api.security.principal.PrincipalManager; import org.apache.jackrabbit.api.security.user.Authorizable; import org.apache.jackrabbit.api.security.user.AuthorizableExistsException; import org.apache.jackrabbit.api.security.user.Group; import org.apache.jackrabbit.api.security.user.Impersonation; import org.apache.jackrabbit.api.security.user.User; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.identifier.IdentifierManager; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.plugins.value.jcr.PartialValueFactory; import org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants; import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters; import org.apache.jackrabbit.oak.spi.security.SecurityProvider; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalImpl; import org.apache.jackrabbit.oak.spi.security.user.UserConstants; import org.apache.jackrabbit.oak.spi.security.user.util.UserUtil; import org.apache.jackrabbit.oak.spi.xml.ImportBehavior; import org.apache.jackrabbit.oak.spi.xml.NodeInfo; import org.apache.jackrabbit.oak.spi.xml.PropInfo; import org.apache.jackrabbit.oak.spi.xml.ProtectedNodeImporter; import org.apache.jackrabbit.oak.spi.xml.ProtectedPropertyImporter; import org.apache.jackrabbit.oak.spi.xml.ReferenceChangeTracker; import org.apache.jackrabbit.oak.spi.xml.TextValue; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static org.apache.jackrabbit.oak.api.Type.STRINGS; /** * {@code UserImporter} implements both {@code ode>ProtectedPropertyImporter} * and {@code ProtectedNodeImporter} and provides import facilities for protected * user and group content defined and used by this user management implementation.<p> * <p> * The importer is intended to be used by applications that import user content * extracted from another repository instance and immediately persist the * imported content using {@link javax.jcr.Session#save()}. Omitting the * save call will lead to transient, semi-validated user content and eventually * to inconsistencies. * <p> * Note the following restrictions: * <ul> * <li>The importer will only be initialized if the user manager exposed by * the session is an instance of {@code UserManagerImpl}. * </li> * <li>The importer will only be initialized if the editing session starting * this import is the same as the UserManager's Session instance. * </li> * <li>The jcr:uuid property of user and groups is defined to represent the * hashed authorizable id as calculated by the UserManager. This importer * is therefore not able to handle imports with * {@link ImportUUIDBehavior#IMPORT_UUID_CREATE_NEW}.</li> * <li>Importing user/group nodes outside of the hierarchy defined by the two * configuration options * {@link org.apache.jackrabbit.oak.spi.security.user.UserConstants#PARAM_GROUP_PATH} * and {@link org.apache.jackrabbit.oak.spi.security.user.UserConstants#PARAM_USER_PATH} * will fail upon {@code Root#commit()}. The same may * be true in case of {@link ImportUUIDBehavior#IMPORT_UUID_COLLISION_REPLACE_EXISTING} * inserting the user/group node at some other place in the node hierarchy.</li> * <li>The same commit hook will make sure that authorizables are never nested * and are created below a hierarchy of nt:AuthorizableFolder nodes. This isn't * enforced by means of node type constraints but only by the API. This importer * itself currently doesn't perform such a validation check.</li> * <li>Any attempt to import conflicting data will cause the import to fail * either immediately or upon calling {@link javax.jcr.Session#save()} with the * following exceptions: * <ul> * <li>{@code rep:members} : Group membership</li> * <li>{@code rep:impersonators} : Impersonators of a User.</li> * </ul> * The import behavior of these two properties is defined by the {@link #PARAM_IMPORT_BEHAVIOR} * configuration parameter, which can be set to * <ul> * <li>{@link ImportBehavior#NAME_IGNORE ignore}: A warning is logged.</li> * <li>{@link ImportBehavior#NAME_BESTEFFORT best effort}: A warning is logged * and the importer tries to fix the problem.</li> * <li>{@link ImportBehavior#NAME_ABORT abort}: The import is immediately * aborted with a ConstraintViolationException. (<strong>default</strong>)</li> * </ul> * </li> * </ul> */ class UserImporter implements ProtectedPropertyImporter, ProtectedNodeImporter, UserConstants { private static final Logger log = LoggerFactory.getLogger(UserImporter.class); private final int importBehavior; private Root root; private NamePathMapper namePathMapper; private ReferenceChangeTracker referenceTracker; private UserManagerImpl userManager; private IdentifierManager identifierManager; private boolean initialized = false; /** * Container used to collect group members stored in protected nodes. */ private Membership currentMembership; /** * map holding the processed memberships. this is needed as both, the property and the node importer, can provide * memberships during processing. if both would be handled only via the reference tracker {@link Membership#process()} * would remove the members from the property importer. */ private Map<String, Membership> memberships = new HashMap<>(); /** * Temporary store for the pw an imported new user to be able to call * the creation actions irrespective of the order of protected properties */ private String currentPw; /** * Remember all new principals for impersonation handling. */ private Map<String, Principal> principals = new HashMap<>(); UserImporter(ConfigurationParameters config) { importBehavior = UserUtil.getImportBehavior(config); } //----------------------------------------------< ProtectedItemImporter >--- @Override public boolean init(@NotNull Session session, @NotNull Root root, @NotNull NamePathMapper namePathMapper, boolean isWorkspaceImport, int uuidBehavior, @NotNull ReferenceChangeTracker referenceTracker, @NotNull SecurityProvider securityProvider) { if (!(session instanceof JackrabbitSession)) { log.debug("Importing protected user content requires a JackrabbitSession"); return false; } this.root = root; this.namePathMapper = namePathMapper; this.referenceTracker = referenceTracker; if (initialized) { throw new IllegalStateException("Already initialized"); } if (uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW) { log.debug("ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW isn't supported when importing users or groups."); return false; } if (!canInitUserManager((JackrabbitSession) session, isWorkspaceImport)) { return false; } userManager = new UserManagerImpl(root, new PartialValueFactory(namePathMapper), securityProvider); initialized = true; return initialized; } private static boolean canInitUserManager(@NotNull JackrabbitSession session, boolean isWorkspaceImport) { try { if (!isWorkspaceImport && session.getUserManager().isAutoSave()) { log.warn("Session import cannot handle user content: UserManager is in autosave mode."); return false; } } catch (RepositoryException e) { // failed to access user manager or to set the autosave behavior // -> return false (not initialized) as importer can't operate. log.error("Failed to initialize UserImporter: ", e); return false; } return true; } // -----------------------------------------< ProtectedPropertyImporter >--- @Override public boolean handlePropInfo(@NotNull Tree parent, @NotNull PropInfo propInfo, @NotNull PropertyDefinition def) throws RepositoryException { checkInitialized(); if (isPwdNode(parent)) { // overwrite any properties generated underneath the rep:pwd node // by "UserManagerImpl#setPassword" by the properties defined by // the XML to be imported. see OAK-1943 for the corresponding discussion. return importPwdNodeProperty(parent, propInfo, def); } else { Authorizable a = userManager.getAuthorizable(parent); if (a == null) { log.debug("Cannot handle protected PropInfo {}. Node {} doesn't represent an Authorizable.", propInfo, parent); return false; } String propName = propInfo.getName(); if (REP_AUTHORIZABLE_ID.equals(propName)) { return importAuthorizableId(parent, a, propInfo, def); } else if (REP_PRINCIPAL_NAME.equals(propName)) { return importPrincipalName(parent, a, propInfo, def); } else if (REP_PASSWORD.equals(propName)) { return importPassword(parent, a, propInfo, def); } else if (REP_IMPERSONATORS.equals(propName)) { return importImpersonators(parent, a, propInfo, def); } else if (REP_DISABLED.equals(propName)) { return importDisabled(a, propInfo, def); } else if (REP_MEMBERS.equals(propName)) { if (!a.isGroup() || !isValid(def, NT_REP_MEMBER_REFERENCES, true)) { return false; } // since group-members are references to user/groups that potentially // are to be imported later on -> postpone processing to the end. // see -> process References getMembership(a.getPath()).addMembers(propInfo.getTextValues()); return true; } // another protected property -> return false } // neither rep:pwd nor authorizable node -> not covered by this importer. return false; } @Override public void propertiesCompleted(@NotNull Tree protectedParent) throws RepositoryException { if (isCacheNode(protectedParent)) { // remove the cache if present protectedParent.remove(); } else { Authorizable a = userManager.getAuthorizable(protectedParent); if (a == null) { // not an authorizable return; } // make sure the authorizable ID property is always set even if the // authorizable defined by the imported XML didn't provide rep:authorizableID if (!protectedParent.hasProperty(REP_AUTHORIZABLE_ID)) { protectedParent.setProperty(REP_AUTHORIZABLE_ID, a.getID(), Type.STRING); } /* Execute authorizable actions for a NEW user at this point after having set the password and the principal name (all protected properties have been processed now). */ if (protectedParent.getStatus() == Tree.Status.NEW) { if (a.isGroup()) { userManager.onCreate((Group) a); } else { userManager.onCreate((User) a, currentPw); } } currentPw = null; } } @Override public void processReferences() throws RepositoryException { checkInitialized(); // add all collected memberships to the reference tracker. for (Membership m: memberships.values()) { referenceTracker.processedReference(m); } memberships.clear(); List<Object> processed = new ArrayList<>(); for (Iterator<Object> it = referenceTracker.getProcessedReferences(); it.hasNext(); ) { Object reference = it.next(); if (reference instanceof Membership) { ((Membership) reference).process(); processed.add(reference); } else if (reference instanceof Impersonators) { ((Impersonators) reference).process(); processed.add(reference); } } // successfully processed this entry of the reference tracker // -> remove from the reference tracker. referenceTracker.removeReferences(processed); } // ---------------------------------------------< ProtectedNodeImporter >--- @Override public boolean start(@NotNull Tree protectedParent) throws RepositoryException { Authorizable auth = null; if (isMemberNode(protectedParent)) { Tree groupTree = protectedParent; while (isMemberNode(groupTree)) { groupTree = groupTree.getParent(); } auth = userManager.getAuthorizable(groupTree); } else if (isMemberReferencesListNode(protectedParent)) { auth = userManager.getAuthorizable(protectedParent.getParent()); } // else: parent node is not of type rep:Members or rep:MemberReferencesList if (auth == null || !auth.isGroup()) { log.debug("Cannot handle protected node {}. It doesn't represent a valid Group, nor does any of its parents.", protectedParent); return false; } else { currentMembership = getMembership(auth.getPath()); return true; } } @Override public void startChildInfo(@NotNull NodeInfo childInfo, @NotNull List<PropInfo> propInfos) { checkState(currentMembership != null); String ntName = childInfo.getPrimaryTypeName(); //noinspection deprecation if (NT_REP_MEMBERS.equals(ntName)) { for (PropInfo prop : propInfos) { for (TextValue tv : prop.getTextValues()) { currentMembership.addMember(tv.getString()); } } } else if (NT_REP_MEMBER_REFERENCES.equals(ntName)) { for (PropInfo prop : propInfos) { if (REP_MEMBERS.equals(prop.getName())) { currentMembership.addMembers(prop.getTextValues()); } } } else { //noinspection deprecation log.warn("{} is not of type " + NT_REP_MEMBERS + " or " + NT_REP_MEMBER_REFERENCES, childInfo.getName()); } } @Override public void endChildInfo() { // nothing to do } @Override public void end(@NotNull Tree protectedParent) { currentMembership = null; } //------------------------------------------------------------< private >--- @NotNull private Membership getMembership(@NotNull String authId) { return memberships.computeIfAbsent(authId, k -> new Membership(authId)); } private void checkInitialized() { if (!initialized) { throw new IllegalStateException("Not initialized"); } } private boolean isValid(@NotNull PropertyDefinition definition, @NotNull String oakNodeTypeName, boolean multipleStatus) { return multipleStatus == definition.isMultiple() && definition.getDeclaringNodeType().isNodeType(namePathMapper.getJcrName(oakNodeTypeName)); } private boolean importAuthorizableId(@NotNull Tree parent, @NotNull Authorizable a, @NotNull PropInfo propInfo, @NotNull PropertyDefinition def) throws RepositoryException { if (!isValid(def, NT_REP_AUTHORIZABLE, false)) { return false; } String id = propInfo.getTextValue().getString(); Authorizable existing = userManager.getAuthorizable(id); if (existing == null) { String msg = "Cannot handle protected PropInfo " + propInfo + ". Invalid rep:authorizableId."; log.warn(msg); throw new ConstraintViolationException(msg); } if (a.getPath().equals(existing.getPath())) { parent.setProperty(REP_AUTHORIZABLE_ID, id); } else { throw new AuthorizableExistsException(id); } return true; } private boolean importPrincipalName(@NotNull Tree parent, @NotNull Authorizable a, @NotNull PropInfo propInfo, @NotNull PropertyDefinition def) throws RepositoryException { if (!isValid(def, NT_REP_AUTHORIZABLE, false)) { return false; } String principalName = propInfo.getTextValue().getString(); Principal principal = new PrincipalImpl(principalName); userManager.checkValidPrincipal(principal, a.isGroup()); userManager.setPrincipal(parent, principal); /* Remember principal of new user/group for further processing of impersonators */ principals.put(principalName, a.getPrincipal()); return true; } private boolean importPassword(@NotNull Tree parent, @NotNull Authorizable a, @NotNull PropInfo propInfo, @NotNull PropertyDefinition def) throws RepositoryException { if (a.isGroup() || !isValid(def, NT_REP_USER, false)) { log.warn("Unexpected authorizable or definition for property rep:password"); return false; } if (((User) a).isSystemUser()) { log.warn("System users may not have a password set."); return false; } String pw = propInfo.getTextValue().getString(); userManager.setPassword(parent, a.getID(), pw, true); currentPw = pw; return true; } private boolean importImpersonators(@NotNull Tree parent, @NotNull Authorizable a, @NotNull PropInfo propInfo, @NotNull PropertyDefinition def) { if (a.isGroup() || !isValid(def, MIX_REP_IMPERSONATABLE, true)) { log.warn("Unexpected authorizable or definition for property rep:impersonators"); return false; } // since impersonators may be imported later on, postpone processing // to the end. // see -> process References referenceTracker.processedReference(new Impersonators(parent.getPath(), propInfo.getTextValues())); return true; } private boolean importDisabled(@NotNull Authorizable a, @NotNull PropInfo propInfo, @NotNull PropertyDefinition def) throws RepositoryException { if (a.isGroup() || !isValid(def, NT_REP_USER, false)) { log.warn("Unexpected authorizable or definition for property rep:disabled"); return false; } ((User) a).disable(propInfo.getTextValue().getString()); return true; } private static boolean isMemberNode(@NotNull Tree tree) { //noinspection deprecation return tree.exists() && !tree.isRoot() && NT_REP_MEMBERS.equals(TreeUtil.getPrimaryTypeName(tree)); } private static boolean isMemberReferencesListNode(@NotNull Tree tree) { return tree.exists() && NT_REP_MEMBER_REFERENCES_LIST.equals(TreeUtil.getPrimaryTypeName(tree)); } private static boolean isPwdNode(@NotNull Tree tree) { return REP_PWD.equals(tree.getName()) && NT_REP_PASSWORD.equals(TreeUtil.getPrimaryTypeName(tree)); } private static boolean importPwdNodeProperty(@NotNull Tree parent, @NotNull PropInfo propInfo, @NotNull PropertyDefinition def) throws RepositoryException { String propName = propInfo.getName(); if (propName == null) { propName = def.getName(); if (propName == null || NodeTypeConstants.RESIDUAL_NAME.equals(propName)) { return false; } } // overwrite any properties generated underneath the rep:pwd node // by "UserManagerImpl#setPassword" by the properties defined by // the XML to be imported. see OAK-1943 for the corresponding discussion. int targetType = def.getRequiredType(); if (targetType == PropertyType.UNDEFINED) { targetType = (REP_PASSWORD_LAST_MODIFIED.equals(propName)) ? PropertyType.LONG : PropertyType.STRING; } PropertyState property; if (def.isMultiple()) { property = PropertyStates.createProperty(propName, propInfo.getValues(targetType)); } else { property = PropertyStates.createProperty(propName, propInfo.getValue(targetType)); } parent.setProperty(property); return true; } private static boolean isCacheNode(@NotNull Tree tree) { return tree.exists() && CacheConstants.REP_CACHE.equals(tree.getName()) && CacheConstants.NT_REP_CACHE.equals(TreeUtil.getPrimaryTypeName(tree)); } /** * Handling the import behavior * * @param msg The message to log a warning in case of {@link ImportBehavior#IGNORE} * or {@link ImportBehavior#BESTEFFORT} * @throws javax.jcr.nodetype.ConstraintViolationException * If the import * behavior is {@link ImportBehavior#ABORT}. */ private void handleFailure(String msg) throws ConstraintViolationException { switch (importBehavior) { case ImportBehavior.ABORT: throw new ConstraintViolationException(msg); case ImportBehavior.IGNORE: case ImportBehavior.BESTEFFORT: default: log.warn(msg); break; } } //------------------------------------------------------< inner classes >--- /** * Inner class used to postpone import of group membership to the very end * of the import. This allows to import membership of user/groups that * are only being created during this import. * * @see ImportBehavior For additional configuration options. */ private final class Membership { private final String authorizablePath; private final Set<String> members = new TreeSet<>(); Membership(String authorizablePath) { this.authorizablePath = authorizablePath; } void addMember(String id) { members.add(id); } void addMembers(List<? extends TextValue> tvs) { for (TextValue tv : tvs) { addMember(tv.getString()); } } void process() throws RepositoryException { Authorizable a = userManager.getAuthorizableByPath(authorizablePath); if (a == null || !a.isGroup()) { throw new RepositoryException(authorizablePath + " does not represent a valid group."); } Group gr = (Group) a; // 1. collect members to add and to remove. Map<String, Authorizable> toRemove = new HashMap<>(); for (Iterator<Authorizable> declMembers = gr.getDeclaredMembers(); declMembers.hasNext(); ) { Authorizable dm = declMembers.next(); toRemove.put(dm.getID(), dm); } Map<String, String> nonExisting = Maps.newHashMap(); Map<String, Authorizable> toAdd = getAuthorizablesToAdd(gr, toRemove, nonExisting); // 2. adjust members of the group if (!toRemove.isEmpty()) { Set<String> failed = gr.removeMembers(toRemove.keySet().toArray(new String[0])); if (!failed.isEmpty()) { handleFailure("Failed removing members " + Iterables.toString(failed) + " to " + gr); } } if (!toAdd.isEmpty()) { Set<String> failed = gr.addMembers(toAdd.keySet().toArray(new String[0])); if (!failed.isEmpty()) { handleFailure("Failed add members " + Iterables.toString(failed) + " to " + gr); } } // handling non-existing members in case of best-effort if (!nonExisting.isEmpty()) { log.debug("ImportBehavior.BESTEFFORT: Found {} entries of rep:members pointing to non-existing authorizables. Adding to rep:members.", nonExisting.size()); Tree groupTree = root.getTree(gr.getPath()); MembershipProvider membershipProvider = userManager.getMembershipProvider(); Set<String> memberContentIds = Sets.newHashSet(nonExisting.keySet()); Set<String> failedContentIds = membershipProvider.addMembers(groupTree, nonExisting); memberContentIds.removeAll(failedContentIds); userManager.onGroupUpdate(gr, false, true, memberContentIds, failedContentIds); } } @NotNull Map<String, Authorizable> getAuthorizablesToAdd(@NotNull Group gr, @NotNull Map<String, Authorizable> toRemove, @NotNull Map<String, String> nonExisting) throws RepositoryException { Map<String, Authorizable> toAdd = Maps.newHashMapWithExpectedSize(members.size()); for (String contentId : members) { // NOTE: no need to check for re-mapped uuids with the referenceTracker because // ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW is not supported for user/group imports (see line 189) Authorizable member = null; try { Tree n = getIdentifierManager().getTree(contentId); member = userManager.getAuthorizable(n); } catch (RepositoryException e) { // no such node or failed to retrieve authorizable // warning is logged below. } if (member != null) { if (toRemove.remove(member.getID()) == null) { toAdd.put(member.getID(), member); } // else: no need to remove from rep:members } else { handleFailure("New member of " + gr + ": No such authorizable (NodeID = " + contentId + ')'); if (importBehavior == ImportBehavior.BESTEFFORT) { log.debug("ImportBehavior.BESTEFFORT: Remember non-existing member for processing."); /* since we ignore the set of failed ids later on and don't know the real memberId => use fake memberId as value in the map */ nonExisting.put(contentId, "-"); } } } return toAdd; } @NotNull private IdentifierManager getIdentifierManager() { if (identifierManager == null) { identifierManager = new IdentifierManager(root); } return identifierManager; } } /** * Inner class used to postpone import of impersonators to the very end * of the import. This allows to import impersonation values pointing * to user that are only being created during this import. * * @see ImportBehavior For additional configuration options. */ private final class Impersonators { private final String userPath; private final Set<String> principalNames = new HashSet<>(); private Impersonators(String userPath, List<? extends TextValue> values) { this.userPath = userPath; for (TextValue v : values) { principalNames.add(v.getString()); } } private void process() throws RepositoryException { Authorizable a = userManager.getAuthorizableByOakPath(userPath); if (a == null || a.isGroup()) { throw new RepositoryException(userPath + " does not represent a valid user."); } Impersonation imp = checkNotNull(((User) a).getImpersonation()); // 1. collect principals to add and to remove. Map<String, Principal> toRemove = new HashMap<>(); for (PrincipalIterator pit = imp.getImpersonators(); pit.hasNext(); ) { Principal p = pit.nextPrincipal(); toRemove.put(p.getName(), p); } List<String> toAdd = new ArrayList<>(); for (final String principalName : principalNames) { if (toRemove.remove(principalName) == null) { // add it to the list of new impersonators to be added. toAdd.add(principalName); } // else: no need to revoke impersonation for the given principal. } // 2. adjust set of impersonators List<String> nonExisting = updateImpersonators(a, imp, toRemove, toAdd); if (!nonExisting.isEmpty()) { Tree userTree = checkNotNull(root.getTree(a.getPath())); // copy over all existing impersonators to the nonExisting list PropertyState impersonators = userTree.getProperty(REP_IMPERSONATORS); if (impersonators != null) { for (String existing : impersonators.getValue(STRINGS)) { nonExisting.add(existing); } } // and write back the complete list including those principal // names that are unknown to principal provider. userTree.setProperty(REP_IMPERSONATORS, nonExisting, Type.STRINGS); } } @NotNull private List<String> updateImpersonators(@NotNull Authorizable a, @NotNull Impersonation imp, @NotNull Map<String, Principal> toRemove, @NotNull List<String> toAdd) throws RepositoryException { for (Principal p : toRemove.values()) { if (!imp.revokeImpersonation(p)) { String principalName = p.getName(); handleFailure("Failed to revoke impersonation for " + principalName + " on " + a); } } List<String> nonExisting = new ArrayList<>(); for (String principalName : toAdd) { Principal principal = (principals.containsKey(principalName)) ? principals.get(principalName) : new PrincipalImpl(principalName); if (!imp.grantImpersonation(principal)) { handleFailure("Failed to grant impersonation for " + principalName + " on " + a); if (importBehavior == ImportBehavior.BESTEFFORT && getPrincipalManager().getPrincipal(principalName) == null) { log.debug("ImportBehavior.BESTEFFORT: Remember non-existing impersonator for special processing."); nonExisting.add(principalName); } } } return nonExisting; } @NotNull private PrincipalManager getPrincipalManager() { return userManager.getPrincipalManager(); } } }
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.pricer.rate.swap; import static com.opengamma.strata.basics.BuySell.BUY; import static com.opengamma.strata.basics.PayReceive.RECEIVE; import static com.opengamma.strata.basics.currency.Currency.GBP; import static com.opengamma.strata.basics.currency.Currency.USD; import static com.opengamma.strata.basics.date.BusinessDayConventions.MODIFIED_FOLLOWING; import static com.opengamma.strata.basics.date.HolidayCalendars.GBLO; import static com.opengamma.strata.basics.date.Tenor.TENOR_5Y; import static com.opengamma.strata.basics.index.IborIndices.GBP_LIBOR_3M; import static com.opengamma.strata.basics.index.IborIndices.USD_LIBOR_3M; import static com.opengamma.strata.basics.index.IborIndices.USD_LIBOR_6M; import static com.opengamma.strata.basics.index.PriceIndices.GB_RPI; import static com.opengamma.strata.collect.TestHelper.assertThrowsIllegalArg; import static com.opengamma.strata.collect.TestHelper.date; import static com.opengamma.strata.finance.rate.swap.type.FixedIborSwapConventions.USD_FIXED_6M_LIBOR_3M; import static com.opengamma.strata.pricer.datasets.RatesProviderDataSets.MULTI_USD; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.FIXED_EXPANDED_SWAP_LEG_PAY; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.FIXED_EXPANDED_SWAP_LEG_PAY_USD; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.FIXED_RATE_PAYMENT_PERIOD_PAY_GBP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.FIXED_RATE_PAYMENT_PERIOD_PAY_USD; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.IBOR_EXPANDED_SWAP_LEG_REC_GBP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.IBOR_RATE_OBSERVATION; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.IBOR_RATE_PAYMENT_PERIOD_REC_GBP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.INFLATION_FIXED_SWAP_LEG_PAY_GBP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.INFLATION_MONTHLY_SWAP_LEG_REC_GBP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.NOTIONAL; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.NOTIONAL_EXCHANGE_PAY_GBP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.NOTIONAL_EXCHANGE_PAY_USD; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.NOTIONAL_EXCHANGE_REC_GBP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.SWAP; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.SWAP_CROSS_CURRENCY; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.SWAP_INFLATION; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.SWAP_TRADE; import static com.opengamma.strata.pricer.rate.swap.SwapDummyData.SWAP_TRADE_CROSS_CURRENCY; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import java.time.LocalDate; import java.time.Period; import java.time.YearMonth; import org.testng.annotations.Test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.opengamma.analytics.math.interpolation.Interpolator1DFactory; import com.opengamma.strata.basics.currency.Currency; import com.opengamma.strata.basics.currency.CurrencyAmount; import com.opengamma.strata.basics.currency.MultiCurrencyAmount; import com.opengamma.strata.basics.date.BusinessDayAdjustment; import com.opengamma.strata.basics.date.DayCounts; import com.opengamma.strata.basics.date.DaysAdjustment; import com.opengamma.strata.basics.index.PriceIndex; import com.opengamma.strata.basics.interpolator.CurveInterpolator; import com.opengamma.strata.basics.schedule.Frequency; import com.opengamma.strata.basics.schedule.PeriodicSchedule; import com.opengamma.strata.basics.value.ValueSchedule; import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries; import com.opengamma.strata.finance.rate.swap.CompoundingMethod; import com.opengamma.strata.finance.rate.swap.ExpandedSwap; import com.opengamma.strata.finance.rate.swap.ExpandedSwapLeg; import com.opengamma.strata.finance.rate.swap.FixedRateCalculation; import com.opengamma.strata.finance.rate.swap.NotionalSchedule; import com.opengamma.strata.finance.rate.swap.PaymentEvent; import com.opengamma.strata.finance.rate.swap.PaymentPeriod; import com.opengamma.strata.finance.rate.swap.PaymentSchedule; import com.opengamma.strata.finance.rate.swap.RateCalculationSwapLeg; import com.opengamma.strata.finance.rate.swap.Swap; import com.opengamma.strata.finance.rate.swap.SwapLeg; import com.opengamma.strata.finance.rate.swap.SwapTrade; import com.opengamma.strata.finance.rate.swap.type.FixedIborSwapTemplate; import com.opengamma.strata.finance.rate.swap.type.IborIborSwapConvention; import com.opengamma.strata.finance.rate.swap.type.IborIborSwapTemplate; import com.opengamma.strata.finance.rate.swap.type.IborRateSwapLegConvention; import com.opengamma.strata.market.amount.CashFlow; import com.opengamma.strata.market.amount.CashFlows; import com.opengamma.strata.market.curve.Curves; import com.opengamma.strata.market.curve.InterpolatedNodalCurve; import com.opengamma.strata.market.explain.ExplainKey; import com.opengamma.strata.market.explain.ExplainMap; import com.opengamma.strata.market.sensitivity.CurveCurrencyParameterSensitivities; import com.opengamma.strata.market.sensitivity.IborRateSensitivity; import com.opengamma.strata.market.sensitivity.PointSensitivities; import com.opengamma.strata.market.sensitivity.PointSensitivityBuilder; import com.opengamma.strata.market.sensitivity.ZeroRateSensitivity; import com.opengamma.strata.market.value.ForwardPriceIndexValues; import com.opengamma.strata.market.value.PriceIndexValues; import com.opengamma.strata.pricer.datasets.RatesProviderDataSets; import com.opengamma.strata.pricer.impl.MockRatesProvider; import com.opengamma.strata.pricer.rate.ImmutableRatesProvider; import com.opengamma.strata.pricer.rate.RatesProvider; import com.opengamma.strata.pricer.sensitivity.RatesFiniteDifferenceSensitivityCalculator; /** * Tests {@link DiscountingSwapProductPricer}. */ @Test public class DiscountingSwapProductPricerTest { private static final RatesProvider MOCK_PROV = new MockRatesProvider(RatesProviderDataSets.VAL_DATE_2014_01_22); private static final DiscountingSwapProductPricer PRICER_SWAP = DiscountingSwapProductPricer.DEFAULT; private static final ImmutableRatesProvider RATES_GBP = RatesProviderDataSets.MULTI_GBP; private static final ImmutableRatesProvider RATES_GBP_USD = RatesProviderDataSets.MULTI_GBP_USD; private static final double FD_SHIFT = 1.0E-7; private static final RatesFiniteDifferenceSensitivityCalculator FINITE_DIFFERENCE_CALCULATOR = new RatesFiniteDifferenceSensitivityCalculator(FD_SHIFT); private static final double TOLERANCE_RATE = 1.0e-12; private static final double TOLERANCE_RATE_DELTA = 1.0E-6; private static final double TOLERANCE_PV = 1.0e-2; private static final CurveInterpolator INTERPOLATOR = Interpolator1DFactory.LINEAR_INSTANCE; private static final LocalDate VAL_DATE_INFLATION = date(2014, 7, 8); private static final YearMonth VAL_MONTH_INFLATION = YearMonth.of(2014, 7); private static final double CONSTANT_INDEX = 242d; private static final double START_INDEX = 218d; private static final PriceIndexValues PRICE_CURVE = ForwardPriceIndexValues.of( GB_RPI, VAL_MONTH_INFLATION, LocalDateDoubleTimeSeries.of(date(2014, 3, 31), START_INDEX), InterpolatedNodalCurve.of( Curves.prices("GB_RPI_CURVE_FLAT"), new double[] {1, 1000}, new double[] {CONSTANT_INDEX, CONSTANT_INDEX}, INTERPOLATOR)); private static final IborIborSwapConvention CONV_USD_LIBOR3M_LIBOR6M = // No compounding IborIborSwapConvention.of(IborRateSwapLegConvention.of(USD_LIBOR_3M), IborRateSwapLegConvention.of(USD_LIBOR_6M)); private static final double FIXED_RATE = 0.01; private static final double SPREAD = 0.0015; private static final double NOTIONAL_SWAP = 100_000_000; private static final SwapTrade SWAP_USD_FIXED_6M_LIBOR_3M_5Y = FixedIborSwapTemplate .of(Period.ZERO, TENOR_5Y, USD_FIXED_6M_LIBOR_3M) .toTrade(MULTI_USD.getValuationDate(), BUY, NOTIONAL_SWAP, FIXED_RATE); private static final SwapTrade SWAP_USD_LIBOR_3M_LIBOR_6M_5Y = IborIborSwapTemplate .of(Period.ZERO, TENOR_5Y, CONV_USD_LIBOR3M_LIBOR6M) .toTrade(MULTI_USD.getValuationDate(), BUY, NOTIONAL_SWAP, SPREAD); private static final DiscountingSwapProductPricer SWAP_PRODUCT_PRICER = DiscountingSwapProductPricer.DEFAULT; //------------------------------------------------------------------------- public void test_legPricer() { PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); assertEquals(pricerSwap.getLegPricer(), pricerLeg); } //------------------------------------------------------------------------- public void test_parRate_singleCurrency() { RatesProvider mockProv = mock(RatesProvider.class); when(mockProv.discountFactor(GBP, FIXED_RATE_PAYMENT_PERIOD_PAY_GBP.getPaymentDate())) .thenReturn(0.99d); when(mockProv.getValuationDate()).thenReturn(RatesProviderDataSets.VAL_DATE_2014_01_22); when(mockProv.fxRate(GBP, GBP)).thenReturn(1.0); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); double fwdRate = 0.01; double pvCpnIbor = 0.99 * fwdRate * 0.25 * 1_000_000; when(mockPeriod.presentValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, mockProv)) .thenReturn(pvCpnIbor); double pvCpnFixed = -0.99 * 0.0123d * 0.25 * 1_000_000; when(mockPeriod.presentValue(FIXED_RATE_PAYMENT_PERIOD_PAY_GBP, mockProv)) .thenReturn(pvCpnFixed); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); double pvNotional = 980_000d; when(mockEvent.presentValue(NOTIONAL_EXCHANGE_REC_GBP, mockProv)) .thenReturn(pvNotional); when(mockEvent.presentValue(NOTIONAL_EXCHANGE_PAY_GBP, mockProv)) .thenReturn(-pvNotional); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ExpandedSwap expanded = SWAP.expand(); double pvbp = pricerLeg.pvbp(FIXED_EXPANDED_SWAP_LEG_PAY, mockProv); double parRateExpected1 = -(pvCpnIbor + -pvNotional + pvNotional) / pvbp; double parRateExpected2 = fwdRate; double parRateComputed = pricerSwap.parRate(expanded, mockProv); assertEquals(parRateComputed, parRateExpected1, TOLERANCE_RATE); assertEquals(parRateComputed, parRateExpected2, TOLERANCE_RATE); } public void test_parRate_crossCurrency() { RatesProvider mockProv = mock(RatesProvider.class); when(mockProv.discountFactor(USD, FIXED_RATE_PAYMENT_PERIOD_PAY_USD.getPaymentDate())) .thenReturn(0.99d); when(mockProv.getValuationDate()).thenReturn(RatesProviderDataSets.VAL_DATE_2014_01_22); when(mockProv.fxRate(GBP, GBP)).thenReturn(1.0); when(mockProv.fxRate(USD, USD)).thenReturn(1.0); double fxGbpUsd = 1.51d; when(mockProv.fxRate(GBP, USD)).thenReturn(fxGbpUsd); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); double fwdRate = 0.01; double pvCpnIborGbp = 0.99 * fwdRate * 0.25 * 1_000_000; when(mockPeriod.presentValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, mockProv)) .thenReturn(pvCpnIborGbp); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); double pvNotionalGbp = 980_000d; when(mockEvent.presentValue(NOTIONAL_EXCHANGE_REC_GBP, mockProv)) .thenReturn(pvNotionalGbp); double pvNotionalUsd = -fxGbpUsd * 981_000d; when(mockEvent.presentValue(NOTIONAL_EXCHANGE_PAY_USD, mockProv)) .thenReturn(pvNotionalUsd); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ExpandedSwap expanded = SWAP_CROSS_CURRENCY.expand(); double pvbp = pricerLeg.pvbp(FIXED_EXPANDED_SWAP_LEG_PAY_USD, mockProv); double parRateExpected = -((pvCpnIborGbp + pvNotionalGbp) * fxGbpUsd + pvNotionalUsd) / pvbp; double parRateComputed = pricerSwap.parRate(expanded, mockProv); assertEquals(parRateComputed, parRateExpected, TOLERANCE_RATE); } public void test_parRate_bothLegFloating() { Swap swap = Swap.builder() .legs(IBOR_EXPANDED_SWAP_LEG_REC_GBP, IBOR_EXPANDED_SWAP_LEG_REC_GBP) .build(); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); assertThrowsIllegalArg(() -> pricerSwap.parRate(swap, MOCK_PROV)); } public void test_parRate_inflation() { DiscountingSwapLegPricer pricerLeg = DiscountingSwapLegPricer.DEFAULT; DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ImmutableMap<PriceIndex, PriceIndexValues> map = ImmutableMap.of(GB_RPI, PRICE_CURVE); ImmutableRatesProvider prov = ImmutableRatesProvider.builder() .valuationDate(VAL_DATE_INFLATION) .priceIndexValues(map) .discountCurves(RATES_GBP.getDiscountCurves()) .build(); double parRateComputed = pricerSwap.parRate(SWAP_INFLATION, prov); RateCalculationSwapLeg fixedLeg = RateCalculationSwapLeg.builder() .payReceive(RECEIVE) .accrualSchedule(PeriodicSchedule.builder() .startDate(date(2014, 6, 9)) .endDate(date(2019, 6, 9)) .frequency(Frequency.P12M) .businessDayAdjustment(BusinessDayAdjustment.of(MODIFIED_FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(Frequency.ofYears(5)) .paymentDateOffset(DaysAdjustment.ofBusinessDays(2, GBLO)) .compoundingMethod(CompoundingMethod.STRAIGHT) .build()) .notionalSchedule(NotionalSchedule.of(GBP, NOTIONAL)) .calculation(FixedRateCalculation.builder() .rate(ValueSchedule.of(parRateComputed)) .dayCount(DayCounts.ONE_ONE) // year fraction is always 1. .build()) .build(); Swap swapWithParRate = Swap.builder().legs(INFLATION_MONTHLY_SWAP_LEG_REC_GBP, fixedLeg).build(); double pvWithParRate = pricerSwap.presentValue(swapWithParRate, prov).getAmount(GBP).getAmount(); assertEquals(pvWithParRate, 0.0d, NOTIONAL * TOLERANCE_RATE); } public void test_parRate_inflation_periodic() { SwapLeg fixedLeg = RateCalculationSwapLeg.builder() .payReceive(RECEIVE) .accrualSchedule(PeriodicSchedule.builder() .startDate(date(2014, 6, 9)) .endDate(date(2019, 6, 9)) .frequency(Frequency.P6M) .businessDayAdjustment(BusinessDayAdjustment.of(MODIFIED_FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(Frequency.P6M) .paymentDateOffset(DaysAdjustment.ofBusinessDays(2, GBLO)) .build()) .notionalSchedule(NotionalSchedule.of(GBP, NOTIONAL)) .calculation(FixedRateCalculation.builder() .rate(ValueSchedule.of(0.04)) .dayCount(DayCounts.ACT_365F) .build()) .build(); Swap swap = Swap.builder().legs(INFLATION_MONTHLY_SWAP_LEG_REC_GBP, fixedLeg).build(); DiscountingSwapLegPricer pricerLeg = DiscountingSwapLegPricer.DEFAULT; DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ImmutableMap<PriceIndex, PriceIndexValues> map = ImmutableMap.of(GB_RPI, PRICE_CURVE); ImmutableRatesProvider prov = ImmutableRatesProvider.builder() .valuationDate(VAL_DATE_INFLATION) .priceIndexValues(map) .discountCurves(RATES_GBP.getDiscountCurves()) .build(); double parRateComputed = pricerSwap.parRate(swap, prov); SwapLeg fixedLegWithParRate = RateCalculationSwapLeg.builder() .payReceive(RECEIVE) .accrualSchedule(PeriodicSchedule.builder() .startDate(date(2014, 6, 9)) .endDate(date(2019, 6, 9)) .frequency(Frequency.P6M) .businessDayAdjustment(BusinessDayAdjustment.of(MODIFIED_FOLLOWING, GBLO)) .build()) .paymentSchedule(PaymentSchedule.builder() .paymentFrequency(Frequency.P6M) .paymentDateOffset(DaysAdjustment.ofBusinessDays(2, GBLO)) .build()) .notionalSchedule(NotionalSchedule.of(GBP, NOTIONAL)) .calculation(FixedRateCalculation.builder() .rate(ValueSchedule.of(parRateComputed)) .dayCount(DayCounts.ACT_365F) .build()) .build(); Swap swapWithParRate = Swap.builder().legs(INFLATION_MONTHLY_SWAP_LEG_REC_GBP, fixedLegWithParRate).build(); double pvWithParRate = pricerSwap.presentValue(swapWithParRate, prov).getAmount(GBP).getAmount(); assertEquals(pvWithParRate, 0.0d, NOTIONAL * TOLERANCE_RATE); } //------------------------------------------------------------------------- public void test_presentValue_singleCurrency() { PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); when(mockPeriod.presentValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, MOCK_PROV)) .thenReturn(1000d); when(mockPeriod.presentValue(FIXED_RATE_PAYMENT_PERIOD_PAY_GBP, MOCK_PROV)) .thenReturn(-500d); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); when(mockEvent.presentValue(NOTIONAL_EXCHANGE_REC_GBP, MOCK_PROV)) .thenReturn(35d); when(mockEvent.presentValue(NOTIONAL_EXCHANGE_PAY_GBP, MOCK_PROV)) .thenReturn(-30d); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ExpandedSwap expanded = SWAP.expand(); assertEquals(pricerSwap.presentValue(expanded, MOCK_PROV), MultiCurrencyAmount.of(GBP, 505d)); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals( pricerTrade.presentValue(SWAP_TRADE, MOCK_PROV), pricerSwap.presentValue(expanded, MOCK_PROV)); } public void test_presentValue_crossCurrency() { PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); when(mockPeriod.presentValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, MOCK_PROV)) .thenReturn(1000d); when(mockPeriod.presentValue(FIXED_RATE_PAYMENT_PERIOD_PAY_USD, MOCK_PROV)) .thenReturn(-500d); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); MultiCurrencyAmount expected = MultiCurrencyAmount.of(CurrencyAmount.of(GBP, 1000d), CurrencyAmount.of(USD, -500d)); ExpandedSwap expanded = SWAP_CROSS_CURRENCY.expand(); assertEquals(pricerSwap.presentValue(expanded, MOCK_PROV), expected); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals( pricerTrade.presentValue(SWAP_TRADE_CROSS_CURRENCY, MOCK_PROV), pricerSwap.presentValue(expanded, MOCK_PROV)); } public void test_presentValue_withCurrency_crossCurrency() { PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); when(mockPeriod.presentValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, MOCK_PROV)) .thenReturn(1000d); when(mockPeriod.presentValue(FIXED_RATE_PAYMENT_PERIOD_PAY_USD, MOCK_PROV)) .thenReturn(-500d); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); CurrencyAmount expected = CurrencyAmount.of(USD, 1000d * MockRatesProvider.RATE - 500d); ExpandedSwap expanded = SWAP_CROSS_CURRENCY.expand(); assertEquals(pricerSwap.presentValue(expanded, USD, MOCK_PROV), expected); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals( pricerTrade.presentValue(SWAP_TRADE_CROSS_CURRENCY, USD, MOCK_PROV), pricerSwap.presentValue(expanded, USD, MOCK_PROV)); } public void test_presentValue_inflation() { DiscountingSwapLegPricer pricerLeg = DiscountingSwapLegPricer.DEFAULT; DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ImmutableMap<PriceIndex, PriceIndexValues> map = ImmutableMap.of(GB_RPI, PRICE_CURVE); ImmutableRatesProvider prov = ImmutableRatesProvider.builder() .valuationDate(VAL_DATE_INFLATION) .priceIndexValues(map) .discountCurves(RATES_GBP.getDiscountCurves()) .build(); LocalDate paymentDate = SWAP_INFLATION.getLegs().get(0).expand().getPaymentPeriods().get(0).getPaymentDate(); double fixedRate = ((FixedRateCalculation) INFLATION_FIXED_SWAP_LEG_PAY_GBP.getCalculation()) .getRate().getInitialValue(); MultiCurrencyAmount pvComputed = pricerSwap.presentValue(SWAP_INFLATION, prov); double pvExpected = (-(CONSTANT_INDEX / START_INDEX - 1.0) + Math.pow(1 + fixedRate, 5) - 1.0) * NOTIONAL * prov.discountFactor(GBP, paymentDate); assertTrue(pvComputed.getCurrencies().size() == 1); assertEquals(pvComputed.getAmount(GBP).getAmount(), pvExpected, NOTIONAL * TOLERANCE_RATE); } //------------------------------------------------------------------------- public void test_futureValue_singleCurrency() { PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); when(mockPeriod.futureValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, MOCK_PROV)) .thenReturn(1000d); when(mockPeriod.futureValue(FIXED_RATE_PAYMENT_PERIOD_PAY_GBP, MOCK_PROV)) .thenReturn(-500d); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ExpandedSwap expanded = SWAP.expand(); assertEquals(pricerSwap.futureValue(expanded, MOCK_PROV), MultiCurrencyAmount.of(GBP, 500d)); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals( pricerTrade.futureValue(SWAP_TRADE, MOCK_PROV), pricerSwap.futureValue(expanded, MOCK_PROV)); } public void test_futureValue_crossCurrency() { PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); when(mockPeriod.futureValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, MOCK_PROV)) .thenReturn(1000d); when(mockPeriod.futureValue(FIXED_RATE_PAYMENT_PERIOD_PAY_USD, MOCK_PROV)) .thenReturn(-500d); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); MultiCurrencyAmount expected = MultiCurrencyAmount.of(CurrencyAmount.of(GBP, 1000d), CurrencyAmount.of(USD, -500d)); ExpandedSwap expanded = SWAP_CROSS_CURRENCY.expand(); assertEquals(pricerSwap.futureValue(expanded, MOCK_PROV), expected); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals( pricerTrade.futureValue(SWAP_TRADE_CROSS_CURRENCY, MOCK_PROV), pricerSwap.futureValue(expanded, MOCK_PROV)); } public void test_futureValue_inflation() { DiscountingSwapLegPricer pricerLeg = DiscountingSwapLegPricer.DEFAULT; DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ImmutableMap<PriceIndex, PriceIndexValues> map = ImmutableMap.of(GB_RPI, PRICE_CURVE); ImmutableRatesProvider prov = ImmutableRatesProvider.builder() .valuationDate(VAL_DATE_INFLATION) .priceIndexValues(map) .discountCurves(RATES_GBP.getDiscountCurves()) .build(); MultiCurrencyAmount fvComputed = pricerSwap.futureValue(SWAP_INFLATION, prov); double fixedRate = ((FixedRateCalculation) INFLATION_FIXED_SWAP_LEG_PAY_GBP.getCalculation()) .getRate().getInitialValue(); double fvExpected = (-(CONSTANT_INDEX / START_INDEX - 1.0) + Math.pow(1.0 + fixedRate, 5) - 1.0) * NOTIONAL; assertTrue(fvComputed.getCurrencies().size() == 1); assertEquals(fvComputed.getAmount(GBP).getAmount(), fvExpected, NOTIONAL * TOLERANCE_RATE); } //------------------------------------------------------------------------- public void test_accruedInterest_firstAccrualPeriod() { RatesProvider prov = new MockRatesProvider(IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getStartDate().plusDays(7)); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); when(mockPeriod.accruedInterest(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, prov)) .thenReturn(1000d); when(mockPeriod.accruedInterest(FIXED_RATE_PAYMENT_PERIOD_PAY_GBP, prov)) .thenReturn(-500d); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); assertEquals(pricerSwap.accruedInterest(SWAP, prov), MultiCurrencyAmount.of(GBP, 500d)); } public void test_accruedInterest_valDateBeforePeriod() { RatesProvider prov = new MockRatesProvider(IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getStartDate()); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); assertEquals(pricerSwap.accruedInterest(SWAP, prov), MultiCurrencyAmount.of(GBP, 0d)); } public void test_accruedInterest_valDateAfterPeriod() { RatesProvider prov = new MockRatesProvider(IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getEndDate().plusDays(1)); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); assertEquals(pricerSwap.accruedInterest(SWAP, prov), MultiCurrencyAmount.of(GBP, 0d)); } //------------------------------------------------------------------------- public void test_parRateSensitivity_singleCurrency() { ExpandedSwap expanded = SWAP.expand(); PointSensitivities point = PRICER_SWAP.parRateSensitivity(expanded, RATES_GBP).build(); CurveCurrencyParameterSensitivities prAd = RATES_GBP.curveParameterSensitivity(point); CurveCurrencyParameterSensitivities prFd = FINITE_DIFFERENCE_CALCULATOR.sensitivity( RATES_GBP, p -> CurrencyAmount.of(GBP, PRICER_SWAP.parRate(expanded, p))); assertTrue(prAd.equalWithTolerance(prFd, TOLERANCE_RATE_DELTA)); } public void test_parRateSensitivity_crossCurrency() { ExpandedSwap expanded = SWAP_CROSS_CURRENCY.expand(); PointSensitivities point = PRICER_SWAP.parRateSensitivity(expanded, RATES_GBP_USD).build(); CurveCurrencyParameterSensitivities prAd = RATES_GBP_USD.curveParameterSensitivity(point); CurveCurrencyParameterSensitivities prFd = FINITE_DIFFERENCE_CALCULATOR.sensitivity( RATES_GBP_USD, p -> CurrencyAmount.of(USD, PRICER_SWAP.parRate(expanded, p))); assertTrue(prAd.equalWithTolerance(prFd, TOLERANCE_RATE_DELTA)); } //------------------------------------------------------------------------- public void test_presentValueSensitivity() { // ibor leg IborRateSensitivity fwdSense = IborRateSensitivity.of(GBP_LIBOR_3M, IBOR_RATE_OBSERVATION.getFixingDate(), GBP, 140.0); ZeroRateSensitivity dscSense = ZeroRateSensitivity.of(GBP, IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getPaymentDate(), -162.0); PointSensitivityBuilder sensiFloating = fwdSense.combinedWith(dscSense); // fixed leg PointSensitivityBuilder sensiFixed = ZeroRateSensitivity.of(GBP, IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getPaymentDate(), 152.0); // events Currency ccy = IBOR_EXPANDED_SWAP_LEG_REC_GBP.getCurrency(); LocalDate paymentDateEvent = NOTIONAL_EXCHANGE_REC_GBP.getPaymentDate(); PointSensitivityBuilder sensiEvent = ZeroRateSensitivity.of(ccy, paymentDateEvent, -134.0); PointSensitivities expected = sensiFloating.build() .combinedWith(sensiEvent.build()) .combinedWith(sensiFixed.build()) .combinedWith(sensiEvent.build()); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); when(mockPeriod.presentValueSensitivity(IBOR_EXPANDED_SWAP_LEG_REC_GBP.getPaymentPeriods().get(0), MOCK_PROV)) .thenAnswer(t -> sensiFloating.build().toMutable()); when(mockPeriod.presentValueSensitivity(FIXED_EXPANDED_SWAP_LEG_PAY.getPaymentPeriods().get(0), MOCK_PROV)) .thenAnswer(t -> sensiFixed.build().toMutable()); when(mockEvent.presentValueSensitivity(IBOR_EXPANDED_SWAP_LEG_REC_GBP.getPaymentEvents().get(0), MOCK_PROV)) .thenAnswer(t -> sensiEvent.build().toMutable()); when(mockEvent.presentValueSensitivity(FIXED_EXPANDED_SWAP_LEG_PAY.getPaymentEvents().get(0), MOCK_PROV)) .thenAnswer(t -> sensiEvent.build().toMutable()); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); PointSensitivities res = pricerSwap.presentValueSensitivity(SWAP, MOCK_PROV).build(); assertTrue(res.equalWithTolerance(expected, TOLERANCE_RATE)); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals( pricerTrade.presentValueSensitivity(SWAP_TRADE, MOCK_PROV), pricerSwap.presentValueSensitivity(SWAP, MOCK_PROV).build()); } public void test_presentValueSensitivity_inflation() { DiscountingSwapLegPricer pricerLeg = DiscountingSwapLegPricer.DEFAULT; DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ImmutableMap<PriceIndex, PriceIndexValues> map = ImmutableMap.of(GB_RPI, PRICE_CURVE); ImmutableRatesProvider prov = ImmutableRatesProvider.builder() .valuationDate(VAL_DATE_INFLATION) .priceIndexValues(map) .discountCurves(RATES_GBP.getDiscountCurves()) .build(); PointSensitivityBuilder pvSensiComputed = pricerSwap.presentValueSensitivity(SWAP_INFLATION, prov); PointSensitivityBuilder pvSensiInflationLeg = pricerLeg.presentValueSensitivity(INFLATION_MONTHLY_SWAP_LEG_REC_GBP, prov); PointSensitivityBuilder pvSensiFixedLeg = pricerLeg.presentValueSensitivity(INFLATION_FIXED_SWAP_LEG_PAY_GBP, prov); PointSensitivityBuilder pvSensiExpected = pvSensiFixedLeg.combinedWith(pvSensiInflationLeg); assertTrue(pvSensiComputed.build().normalized() .equalWithTolerance(pvSensiExpected.build().normalized(), TOLERANCE_RATE * NOTIONAL)); } //------------------------------------------------------------------------- public void test_futureValueSensitivity() { // ibor leg PointSensitivityBuilder sensiFloating = IborRateSensitivity.of(GBP_LIBOR_3M, IBOR_RATE_OBSERVATION.getFixingDate(), GBP, 140.0); // fixed leg PointSensitivityBuilder sensiFixed = PointSensitivityBuilder.none(); // events PointSensitivityBuilder sensiEvent = PointSensitivityBuilder.none(); PointSensitivities expected = sensiFloating.build(); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); when(mockPeriod.futureValueSensitivity(IBOR_EXPANDED_SWAP_LEG_REC_GBP.getPaymentPeriods().get(0), MOCK_PROV)) .thenAnswer(t -> sensiFloating.build().toMutable()); when(mockPeriod.futureValueSensitivity(FIXED_EXPANDED_SWAP_LEG_PAY.getPaymentPeriods().get(0), MOCK_PROV)) .thenAnswer(t -> sensiFixed.build().toMutable()); when(mockEvent.futureValueSensitivity(IBOR_EXPANDED_SWAP_LEG_REC_GBP.getPaymentEvents().get(0), MOCK_PROV)) .thenAnswer(t -> sensiEvent.build().toMutable()); when(mockEvent.futureValueSensitivity(FIXED_EXPANDED_SWAP_LEG_PAY.getPaymentEvents().get(0), MOCK_PROV)) .thenAnswer(t -> sensiEvent.build().toMutable()); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); PointSensitivities res = pricerSwap.futureValueSensitivity(SWAP, MOCK_PROV).build(); assertTrue(res.equalWithTolerance(expected, TOLERANCE_RATE)); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals( pricerTrade.futureValueSensitivity(SWAP_TRADE, MOCK_PROV), pricerSwap.futureValueSensitivity(SWAP, MOCK_PROV).build()); } public void test_futureValueSensitivity_inflation() { DiscountingSwapLegPricer pricerLeg = DiscountingSwapLegPricer.DEFAULT; DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ImmutableMap<PriceIndex, PriceIndexValues> map = ImmutableMap.of(GB_RPI, PRICE_CURVE); ImmutableRatesProvider prov = ImmutableRatesProvider.builder() .valuationDate(VAL_DATE_INFLATION) .priceIndexValues(map) .discountCurves(RATES_GBP.getDiscountCurves()) .build(); PointSensitivityBuilder fvSensiComputed = pricerSwap.futureValueSensitivity(SWAP_INFLATION, prov); PointSensitivityBuilder fvSensiInflationLeg = pricerLeg.futureValueSensitivity(INFLATION_MONTHLY_SWAP_LEG_REC_GBP, prov); PointSensitivityBuilder fvSensiFixedLeg = pricerLeg.futureValueSensitivity(INFLATION_FIXED_SWAP_LEG_PAY_GBP, prov); PointSensitivityBuilder fvSensiExpected = fvSensiFixedLeg.combinedWith(fvSensiInflationLeg); assertTrue(fvSensiComputed.build().normalized() .equalWithTolerance(fvSensiExpected.build().normalized(), TOLERANCE_RATE * NOTIONAL)); } //------------------------------------------------------------------------- public void test_cashFlows() { RatesProvider mockProv = mock(RatesProvider.class); PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); double df1 = 0.98; double df2 = 0.93; double fvGBP = 1000d; double fvUSD = -500d; when(mockPeriod.futureValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, mockProv)).thenReturn(fvGBP); when(mockPeriod.futureValue(FIXED_RATE_PAYMENT_PERIOD_PAY_USD, mockProv)).thenReturn(fvUSD); when(mockProv.getValuationDate()).thenReturn(LocalDate.of(2014, 7, 1)); when(mockProv.discountFactor(IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getCurrency(), IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getPaymentDate())).thenReturn(df1); when(mockProv.discountFactor(FIXED_RATE_PAYMENT_PERIOD_PAY_USD.getCurrency(), FIXED_RATE_PAYMENT_PERIOD_PAY_USD.getPaymentDate())).thenReturn(df2); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); ExpandedSwap expanded = SWAP_CROSS_CURRENCY.expand(); CashFlows computed = pricerSwap.cashFlows(expanded, mockProv); CashFlow flowGBP = CashFlow.ofFutureValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP.getPaymentDate(), GBP, fvGBP, df1); CashFlow flowUSD = CashFlow.ofFutureValue(FIXED_RATE_PAYMENT_PERIOD_PAY_USD.getPaymentDate(), USD, fvUSD, df2); CashFlows expected = CashFlows.of(ImmutableList.of(flowGBP, flowUSD)); assertEquals(computed, expected); // test via SwapTrade DiscountingSwapTradePricer pricerTrade = new DiscountingSwapTradePricer(pricerSwap); assertEquals(pricerTrade.cashFlows(SWAP_TRADE, MOCK_PROV), pricerSwap.cashFlows(expanded, MOCK_PROV)); } //------------------------------------------------------------------------- public void test_explainPresentValue_singleCurrency() { PaymentPeriodPricer<PaymentPeriod> mockPeriod = mock(PaymentPeriodPricer.class); when(mockPeriod.presentValue(IBOR_RATE_PAYMENT_PERIOD_REC_GBP, MOCK_PROV)) .thenReturn(1000d); when(mockPeriod.presentValue(FIXED_RATE_PAYMENT_PERIOD_PAY_GBP, MOCK_PROV)) .thenReturn(-500d); PaymentEventPricer<PaymentEvent> mockEvent = mock(PaymentEventPricer.class); when(mockEvent.presentValue(NOTIONAL_EXCHANGE_REC_GBP, MOCK_PROV)) .thenReturn(35d); when(mockEvent.presentValue(NOTIONAL_EXCHANGE_PAY_GBP, MOCK_PROV)) .thenReturn(-30d); DiscountingSwapLegPricer pricerLeg = new DiscountingSwapLegPricer(mockPeriod, mockEvent); DiscountingSwapProductPricer pricerSwap = new DiscountingSwapProductPricer(pricerLeg); assertEquals(pricerSwap.presentValue(SWAP, MOCK_PROV), MultiCurrencyAmount.of(GBP, 505d)); ExplainMap explain = pricerSwap.explainPresentValue(SWAP, MOCK_PROV); assertEquals(explain.get(ExplainKey.ENTRY_TYPE).get(), "Swap"); assertEquals(explain.get(ExplainKey.LEGS).get().size(), 2); ExplainMap explainLeg0 = explain.get(ExplainKey.LEGS).get().get(0); ExpandedSwapLeg leg0 = (ExpandedSwapLeg) SWAP.getLegs().get(0); double fv0 = pricerLeg.futureValue(leg0, MOCK_PROV).getAmount(); assertEquals(explainLeg0.get(ExplainKey.ENTRY_TYPE).get(), "Leg"); assertEquals(explainLeg0.get(ExplainKey.ENTRY_INDEX).get().intValue(), 0); assertEquals(explainLeg0.get(ExplainKey.PAY_RECEIVE).get(), leg0.getPayReceive()); assertEquals(explainLeg0.get(ExplainKey.LEG_TYPE).get(), leg0.getType().toString()); assertEquals(explainLeg0.get(ExplainKey.PAYMENT_PERIODS).get().size(), 1); assertEquals(explainLeg0.get(ExplainKey.PAYMENT_EVENTS).get().size(), 1); assertEquals(explainLeg0.get(ExplainKey.FUTURE_VALUE).get().getCurrency(), leg0.getCurrency()); assertEquals(explainLeg0.get(ExplainKey.FUTURE_VALUE).get().getAmount(), fv0, TOLERANCE_RATE); ExplainMap explainLeg1 = explain.get(ExplainKey.LEGS).get().get(1); ExpandedSwapLeg leg1 = (ExpandedSwapLeg) SWAP.getLegs().get(0); double fv1 = pricerLeg.futureValue(leg1, MOCK_PROV).getAmount(); assertEquals(explainLeg1.get(ExplainKey.ENTRY_TYPE).get(), "Leg"); assertEquals(explainLeg1.get(ExplainKey.ENTRY_INDEX).get().intValue(), 1); assertEquals(explainLeg1.get(ExplainKey.PAYMENT_PERIODS).get().size(), 1); assertEquals(explainLeg1.get(ExplainKey.PAYMENT_EVENTS).get().size(), 1); assertEquals(explainLeg1.get(ExplainKey.FUTURE_VALUE).get().getCurrency(), leg1.getCurrency()); assertEquals(explainLeg1.get(ExplainKey.FUTURE_VALUE).get().getAmount(), fv1, TOLERANCE_RATE); } //------------------------------------------------------------------------- public void par_spread_fixed_ibor() { double ps = SWAP_PRODUCT_PRICER.parSpread(SWAP_USD_FIXED_6M_LIBOR_3M_5Y.getProduct(), MULTI_USD); SwapTrade swap0 = FixedIborSwapTemplate .of(Period.ZERO, TENOR_5Y, USD_FIXED_6M_LIBOR_3M) .toTrade(MULTI_USD.getValuationDate(), BUY, NOTIONAL_SWAP, FIXED_RATE + ps); CurrencyAmount pv0 = SWAP_PRODUCT_PRICER.presentValue(swap0.getProduct(), USD, MULTI_USD); assertEquals(pv0.getAmount(), 0, TOLERANCE_PV); } public void par_spread_ibor_ibor() { double ps = SWAP_PRODUCT_PRICER.parSpread(SWAP_USD_LIBOR_3M_LIBOR_6M_5Y.getProduct(), MULTI_USD); SwapTrade swap0 = IborIborSwapTemplate .of(Period.ZERO, TENOR_5Y, CONV_USD_LIBOR3M_LIBOR6M) .toTrade(MULTI_USD.getValuationDate(), BUY, NOTIONAL_SWAP, SPREAD + ps); CurrencyAmount pv0 = SWAP_PRODUCT_PRICER.presentValue(swap0.getProduct(), USD, MULTI_USD); assertEquals(pv0.getAmount(), 0, TOLERANCE_PV); } //------------------------------------------------------------------------- public void par_spread_sensitivity_fixed_ibor() { ExpandedSwap expanded = SWAP_USD_FIXED_6M_LIBOR_3M_5Y.getProduct().expand(); PointSensitivities point = PRICER_SWAP.parSpreadSensitivity(expanded, MULTI_USD).build(); CurveCurrencyParameterSensitivities prAd = MULTI_USD.curveParameterSensitivity(point); CurveCurrencyParameterSensitivities prFd = FINITE_DIFFERENCE_CALCULATOR.sensitivity( MULTI_USD, p -> CurrencyAmount.of(USD, PRICER_SWAP.parSpread(expanded, p))); assertTrue(prAd.equalWithTolerance(prFd, TOLERANCE_RATE_DELTA)); } public void par_spread_sensitivity_ibor_ibor() { ExpandedSwap expanded = SWAP_USD_LIBOR_3M_LIBOR_6M_5Y.getProduct().expand(); PointSensitivities point = PRICER_SWAP.parSpreadSensitivity(expanded, MULTI_USD).build(); CurveCurrencyParameterSensitivities prAd = MULTI_USD.curveParameterSensitivity(point); CurveCurrencyParameterSensitivities prFd = FINITE_DIFFERENCE_CALCULATOR.sensitivity( MULTI_USD, p -> CurrencyAmount.of(USD, PRICER_SWAP.parSpread(expanded, p))); assertTrue(prAd.equalWithTolerance(prFd, TOLERANCE_RATE_DELTA)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.zookeeper.policy; import java.net.InetAddress; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import static java.lang.String.format; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.Processor; import org.apache.camel.ProducerTemplate; import org.apache.camel.Route; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.zookeeper.SequenceComparator; import org.apache.camel.component.zookeeper.ZooKeeperEndpoint; import org.apache.camel.component.zookeeper.ZooKeeperMessage; import org.apache.camel.impl.JavaUuidGenerator; import org.apache.camel.impl.RoutePolicySupport; import org.apache.camel.spi.UuidGenerator; import org.apache.camel.util.ExchangeHelper; import org.apache.zookeeper.CreateMode; /** * <code>ZooKeeperRoutePolicy</code> uses the leader election capabilities of a * ZooKeeper cluster to control how routes are enabled. It is typically used in * fail-over scenarios controlling identical instances of a route across a * cluster of Camel based servers. * <p> * The policy is configured with a 'top n' number of routes that should be * allowed to start, for a master/slave scenario this would be 1. Each instance * of the policy will execute the election algorithm to obtain its position in * the hierarchy of servers, if it is within the 'top n' servers then the policy * is enabled and exchanges can be processed by the route. If not it waits for a * change in the leader hierarchy and then reruns this scenario to see if it is * now in the top n. * <p> * All instances of the policy must also be configured with the same path on the * ZooKeeper cluster where the election will be carried out. It is good practice * for this to indicate the application e.g. <tt>/someapplication/someroute/</tt> note * that these nodes should exist before using the policy. * <p> * See <a href="http://hadoop.apache.org/zookeeper/docs/current/recipes.html#sc_leaderElection"> * for more on how Leader election</a> is archived with ZooKeeper. */ public class ZooKeeperRoutePolicy extends RoutePolicySupport { private String uri; private int enabledCount; private String candidateName; private final Lock lock = new ReentrantLock(); private final CountDownLatch electionComplete = new CountDownLatch(1); private Set<Route> suspendedRoutes = new CopyOnWriteArraySet<Route>(); private AtomicBoolean shouldProcessExchanges = new AtomicBoolean(); private ProducerTemplate template; private boolean shouldStopConsumer = true; private UuidGenerator uuidGenerator = new JavaUuidGenerator(); private boolean isCandidateCreated; public ZooKeeperRoutePolicy(String uri, int enabledCount) throws Exception { this.uri = uri; this.enabledCount = enabledCount; createCandidateName(); } private void createCandidateName() throws Exception { /** UUID would be enough, also using hostname for human readability */ StringBuilder b = new StringBuilder(InetAddress.getLocalHost().getCanonicalHostName()); b.append("-").append(uuidGenerator.generateUuid()); this.candidateName = b.toString(); } @Override public void onExchangeBegin(Route route, Exchange exchange) { testAndCreateCandidateNode(route); awaitElectionResults(); if (!shouldProcessExchanges.get()) { if (shouldStopConsumer) { stopConsumer(route); } IllegalStateException e = new IllegalStateException("Zookeeper based route policy prohibits processing exchanges, stopping route and failing the exchange"); exchange.setException(e); } else { if (shouldStopConsumer) { startConsumer(route); } } } private void testAndCreateCandidateNode(Route route) { try { lock.lock(); if (!isCandidateCreated) { createCandidateNode(route.getRouteContext().getCamelContext()); isCandidateCreated = true; } } catch (Exception e) { handleException(e); } finally { lock.unlock(); } } private void awaitElectionResults() { while (electionComplete.getCount() > 0) { try { electionComplete.await(); } catch (InterruptedException e1) { } } } private void startConsumer(Route route) { try { lock.lock(); if (suspendedRoutes.contains(route)) { startConsumer(route.getConsumer()); suspendedRoutes.remove(route); } } catch (Exception e) { handleException(e); } finally { lock.unlock(); } } private void stopConsumer(Route route) { try { lock.lock(); // check that we should still suspend once the lock is acquired if (!suspendedRoutes.contains(route) && !shouldProcessExchanges.get()) { stopConsumer(route.getConsumer()); suspendedRoutes.add(route); } } catch (Exception e) { handleException(e); } finally { lock.unlock(); } } private void startAllStoppedConsumers() { try { lock.lock(); if (!suspendedRoutes.isEmpty()) { if (log.isDebugEnabled()) { log.debug(format("'%d' have been stopped previously by poilcy, restarting.", suspendedRoutes.size())); } for (Route suspended : suspendedRoutes) { startConsumer(suspended.getConsumer()); } suspendedRoutes.clear(); } } catch (Exception e) { handleException(e); } finally { lock.unlock(); } } public boolean isShouldStopConsumer() { return shouldStopConsumer; } public void setShouldStopConsumer(boolean shouldStopConsumer) { this.shouldStopConsumer = shouldStopConsumer; } private ZooKeeperEndpoint createCandidateNode(CamelContext camelContext) { this.template = camelContext.createProducerTemplate(); if (log.isInfoEnabled()) { log.info(format("Initializing ZookeeperRoutePolicy with uri '%s'", uri)); } ZooKeeperEndpoint zep = (ZooKeeperEndpoint)camelContext.getEndpoint(uri); zep.getConfiguration().setCreate(true); String fullpath = createFullPathToCandidate(zep); Exchange e = zep.createExchange(); e.setPattern(ExchangePattern.InOut); e.getIn().setHeader(ZooKeeperMessage.ZOOKEEPER_NODE, fullpath); e.getIn().setHeader(ZooKeeperMessage.ZOOKEEPER_CREATE_MODE, CreateMode.EPHEMERAL_SEQUENTIAL); template.send(zep, e); if (e.isFailed()) { log.error("Error setting up election node " + fullpath, e.getException()); } else { if (log.isInfoEnabled()) { log.info(format("Candidate node '%s' has been created", fullpath)); } try { if (zep != null) { camelContext.addRoutes(new ElectoralMonitorRoute(zep)); } } catch (Exception ex) { log.error("Error configuring ZookeeperRoutePolicy", ex); } } return zep; } private String createFullPathToCandidate(ZooKeeperEndpoint zep) { String fullpath = zep.getConfiguration().getPath(); if (!fullpath.endsWith("/")) { fullpath += "/"; } fullpath += candidateName; return fullpath; } private class ElectoralMonitorRoute extends RouteBuilder { private SequenceComparator comparator = new SequenceComparator(); private ZooKeeperEndpoint zep; public ElectoralMonitorRoute(ZooKeeperEndpoint zep) { this.zep = zep; zep.getConfiguration().setListChildren(true); zep.getConfiguration().setRepeat(true); } @Override public void configure() throws Exception { /** * TODO: this is cheap cheerful but suboptimal; it suffers from the * 'herd effect' that on any change to the candidates list every * policy instance will ask for the entire candidate list again. * This is fine for small numbers of nodes (for scenarios * like Master-Slave it is perfect) but could get noisy if * large numbers of nodes were involved. * <p> * Better would be to find the position of this node in the list and * watch the node in the position ahead node ahead of this and only * request the candidate list when its status changes. This will * require enhancing the consumer to allow custom operation lists. */ from(zep).sort(body(), comparator).process(new Processor() { @SuppressWarnings("unchecked") public void process(Exchange e) throws Exception { List<String> candidates = (List<String>)ExchangeHelper.getMandatoryInBody(e); int location = Math.abs(Collections.binarySearch(candidates, candidateName)); /** * check if the item at this location starts with this nodes * candidate name */ if (isOurCandidateAtLocationInCandidatesList(candidates, location)) { shouldProcessExchanges.set(location <= enabledCount); if (log.isDebugEnabled()) { log.debug(format("This node is number '%d' on the candidate list, route is configured for the top '%d'. Exchange processing will be %s", location, enabledCount, shouldProcessExchanges.get() ? "enabled" : "disabled")); } startAllStoppedConsumers(); } electionComplete.countDown(); } private boolean isOurCandidateAtLocationInCandidatesList(List<String> candidates, int location) { return location <= candidates.size() && candidates.get(location - 1).startsWith(candidateName); } }); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import io.crate.common.unit.TimeValue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; public class AbstractAsyncTaskTests extends ESTestCase { private static ThreadPool threadPool; @BeforeClass public static void setUpThreadPool() { threadPool = new TestThreadPool(AbstractAsyncTaskTests.class.getSimpleName()); } @AfterClass public static void tearDownThreadPool() { terminate(threadPool); } @Test public void testAutoRepeat() throws Exception { boolean shouldRunThrowException = randomBoolean(); final CyclicBarrier barrier1 = new CyclicBarrier(2); // 1 for runInternal plus 1 for the test sequence final CyclicBarrier barrier2 = new CyclicBarrier(2); // 1 for runInternal plus 1 for the test sequence final AtomicInteger count = new AtomicInteger(); AbstractAsyncTask task = new AbstractAsyncTask(logger, threadPool, TimeValue.timeValueMillis(1), true) { @Override protected boolean mustReschedule() { return true; } @Override protected void runInternal() { assertTrue("generic threadpool is configured", Thread.currentThread().getName().contains("[generic]")); try { barrier1.await(); } catch (Exception e) { fail("interrupted"); } count.incrementAndGet(); try { barrier2.await(); } catch (Exception e) { fail("interrupted"); } if (shouldRunThrowException) { throw new RuntimeException("foo"); } } @Override protected String getThreadPool() { return ThreadPool.Names.GENERIC; } }; assertFalse(task.isScheduled()); task.rescheduleIfNecessary(); assertTrue(task.isScheduled()); barrier1.await(); assertTrue(task.isScheduled()); barrier2.await(); assertEquals(1, count.get()); barrier1.reset(); barrier2.reset(); barrier1.await(); assertTrue(task.isScheduled()); task.close(); barrier2.await(); assertEquals(2, count.get()); assertTrue(task.isClosed()); assertFalse(task.isScheduled()); assertEquals(2, count.get()); } @Test public void testManualRepeat() throws Exception { boolean shouldRunThrowException = randomBoolean(); final CyclicBarrier barrier = new CyclicBarrier(2); // 1 for runInternal plus 1 for the test sequence final AtomicInteger count = new AtomicInteger(); AbstractAsyncTask task = new AbstractAsyncTask(logger, threadPool, TimeValue.timeValueMillis(1), false) { @Override protected boolean mustReschedule() { return true; } @Override protected void runInternal() { assertTrue("generic threadpool is configured", Thread.currentThread().getName().contains("[generic]")); count.incrementAndGet(); try { barrier.await(); } catch (Exception e) { fail("interrupted"); } if (shouldRunThrowException) { throw new RuntimeException("foo"); } } @Override protected String getThreadPool() { return ThreadPool.Names.GENERIC; } }; assertFalse(task.isScheduled()); task.rescheduleIfNecessary(); barrier.await(); assertEquals(1, count.get()); assertFalse(task.isScheduled()); barrier.reset(); expectThrows(TimeoutException.class, () -> barrier.await(10, TimeUnit.MILLISECONDS)); assertEquals(1, count.get()); barrier.reset(); task.rescheduleIfNecessary(); barrier.await(); assertEquals(2, count.get()); assertFalse(task.isScheduled()); assertFalse(task.isClosed()); task.close(); assertTrue(task.isClosed()); } @Test public void testCloseWithNoRun() { AbstractAsyncTask task = new AbstractAsyncTask(logger, threadPool, TimeValue.timeValueMinutes(10), true) { @Override protected boolean mustReschedule() { return true; } @Override protected void runInternal() { } }; assertFalse(task.isScheduled()); task.rescheduleIfNecessary(); assertTrue(task.isScheduled()); task.close(); assertTrue(task.isClosed()); assertFalse(task.isScheduled()); } @Test public void testChangeInterval() throws Exception { final CountDownLatch latch = new CountDownLatch(2); AbstractAsyncTask task = new AbstractAsyncTask(logger, threadPool, TimeValue.timeValueHours(1), true) { @Override protected boolean mustReschedule() { return latch.getCount() > 0; } @Override protected void runInternal() { latch.countDown(); } }; assertFalse(task.isScheduled()); task.rescheduleIfNecessary(); assertTrue(task.isScheduled()); task.setInterval(TimeValue.timeValueMillis(1)); assertTrue(task.isScheduled()); // This should only take 2 milliseconds in ideal conditions, but allow 10 seconds in case of VM stalls assertTrue(latch.await(10, TimeUnit.SECONDS)); assertBusy(() -> assertFalse(task.isScheduled())); task.close(); assertFalse(task.isScheduled()); assertTrue(task.isClosed()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.xml.slurpersupport; import groovy.lang.Buildable; import groovy.lang.Closure; import groovy.lang.GroovyObject; import groovy.lang.GroovyRuntimeException; import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation; import java.io.IOException; import java.io.Writer; import java.util.Iterator; import java.util.Map; /** * Lazy evaluated representation of child nodes. */ public class NodeChildren extends GPathResult { private int size = -1; /** * @param parent the GPathResult prior to the application of the expression creating this GPathResult * @param name if the GPathResult corresponds to something with a name, e.g. a node * @param namespacePrefix the namespace prefix if any * @param namespaceTagHints the known tag to namespace mappings */ public NodeChildren(final GPathResult parent, final String name, final String namespacePrefix, final Map<String, String> namespaceTagHints) { super(parent, name, namespacePrefix, namespaceTagHints); } /** * @param parent the GPathResult prior to the application of the expression creating this GPathResult * @param name if the GPathResult corresponds to something with a name, e.g. a node * @param namespaceTagHints the known tag to namespace mappings */ public NodeChildren(final GPathResult parent, final String name, final Map<String, String> namespaceTagHints) { this(parent, name, "*", namespaceTagHints); } /** * @param parent the GPathResult prior to the application of the expression creating this GPathResult * @param namespaceTagHints the known tag to namespace mappings */ public NodeChildren(final GPathResult parent, final Map<String, String> namespaceTagHints) { this(parent, "*", namespaceTagHints); } @Override public Iterator childNodes() { return new Iterator() { private final Iterator iter = nodeIterator(); private Iterator childIter = nextChildIter(); @Override public boolean hasNext() { return childIter != null; } @Override public Object next() { while (childIter != null) { try { if (childIter.hasNext()) { return childIter.next(); } } finally { if (!childIter.hasNext()) { childIter = nextChildIter(); } } } return null; } @Override public void remove() { throw new UnsupportedOperationException(); } private Iterator nextChildIter() { while (iter.hasNext()) { final Node node = (Node)iter.next(); final Iterator result = node.childNodes(); if (result.hasNext()) return result; } return null; } }; } @Override public Iterator iterator() { return new Iterator() { final Iterator iter = nodeIterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public Object next() { return new NodeChild((Node) iter.next(), pop(), namespaceTagHints); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } @Override public Iterator nodeIterator() { if ("*".equals(name)) { return parent.childNodes(); } else { return new NodeIterator(parent.childNodes()) { @Override protected Object getNextNode(Iterator iter) { while (iter.hasNext()) { final Node node = (Node) iter.next(); if (name.equals(node.name())) { if ("*".equals(namespacePrefix) || ("".equals(namespacePrefix) && "".equals(node.namespaceURI())) || node.namespaceURI().equals(namespaceMap.get(namespacePrefix))) { return node; } } } return null; } }; } } /** * Throws a <code>GroovyRuntimeException</code>, because it is not implemented yet. */ @Override public GPathResult parents() { // TODO Auto-generated method stub throw new GroovyRuntimeException("parents() not implemented yet"); } @Override public synchronized int size() { if (this.size == -1) { final Iterator iter = iterator(); this.size = 0; while (iter.hasNext()) { iter.next(); this.size++; } } return this.size; } @Override public String text() { final StringBuilder buf = new StringBuilder(); final Iterator iter = nodeIterator(); while (iter.hasNext()) { buf.append(((Node) iter.next()).text()); } return buf.toString(); } @Override public GPathResult find(final Closure closure) { for (Object node : this) { if (DefaultTypeTransformation.castToBoolean(closure.call(new Object[]{node}))) { return (GPathResult) node; } } return new NoChildren(this, this.name, namespaceTagHints); } @Override public GPathResult findAll(final Closure closure) { return new FilteredNodeChildren(this, closure, namespaceTagHints); } @Override public void build(final GroovyObject builder) { final Iterator iter = nodeIterator(); while (iter.hasNext()) { final Object next = iter.next(); if (next instanceof Buildable) { ((Buildable) next).build(builder); } else { ((Node) next).build(builder, namespaceMap, namespaceTagHints); } } } /* (non-Javadoc) * @see groovy.lang.Writable#writeTo(java.io.Writer) */ @Override public Writer writeTo(final Writer out) throws IOException { final Iterator iter = nodeIterator(); while (iter.hasNext()) { ((Node) iter.next()).writeTo(out); } return out; } @Override protected void replaceNode(final Closure newValue) { for (Object o : this) { final NodeChild result = (NodeChild) o; result.replaceNode(newValue); } } @Override protected void replaceBody(final Object newValue) { for (Object o : this) { final NodeChild result = (NodeChild) o; result.replaceBody(newValue); } } @Override protected void appendNode(final Object newValue) { for (Object o : this) { final NodeChild result = (NodeChild) o; result.appendNode(newValue); } } }
/* * Copyright 2006-2008 Sun Microsystems, Inc. All rights reserved. * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.tools.javac.main; import com.sun.tools.javac.util.Log; import com.sun.tools.javac.util.Options; import java.io.PrintWriter; import java.util.Arrays; import java.util.Collection; /** * TODO: describe com.sun.tools.javac.main.JavacOption * * <p><b>This is NOT part of any API supported by Sun Microsystems. * If you write code that depends on this, you do so at your own * risk. This code and its internal interfaces are subject to change * or deletion without notice.</b></p> */ public interface JavacOption { OptionKind getKind(); /** Does this option take a (separate) operand? * @return true if this option takes a separate operand */ boolean hasArg(); /** Does argument string match option pattern? * @param arg the command line argument string * @return true if {@code arg} matches this option */ boolean matches(String arg); /** Process an option with an argument. * @param options the accumulated set of analyzed options * @param option the option to be processed * @param arg the arg for the option to be processed * @return true if an error was detected */ boolean process(Options options, String option, String arg); /** Process the option with no argument. * @param options the accumulated set of analyzed options * @param option the option to be processed * @return true if an error was detected */ boolean process(Options options, String option); OptionName getName(); enum OptionKind { NORMAL, EXTENDED, HIDDEN, } enum ChoiceKind { ONEOF, ANYOF } /** This class represents an option recognized by the main program */ static class Option implements JavacOption { /** Option string. */ OptionName name; /** Documentation key for arguments. */ String argsNameKey; /** Documentation key for description. */ String descrKey; /** Suffix option (-foo=bar or -foo:bar) */ boolean hasSuffix; /** The kind of choices for this option, if any. */ ChoiceKind choiceKind; /** The choices for this option, if any. */ Collection<String> choices; Option(OptionName name, String argsNameKey, String descrKey) { this.name = name; this.argsNameKey = argsNameKey; this.descrKey = descrKey; char lastChar = name.optionName.charAt(name.optionName.length()-1); hasSuffix = lastChar == ':' || lastChar == '='; } Option(OptionName name, String descrKey) { this(name, null, descrKey); } Option(OptionName name, String descrKey, ChoiceKind choiceKind, String... choices) { this(name, descrKey, choiceKind, Arrays.asList(choices)); } Option(OptionName name, String descrKey, ChoiceKind choiceKind, Collection<String> choices) { this(name, null, descrKey); if (choiceKind == null || choices == null) throw new NullPointerException(); this.choiceKind = choiceKind; this.choices = choices; } @Override public String toString() { return name.optionName; } public boolean hasArg() { return argsNameKey != null && !hasSuffix; } public boolean matches(String option) { if (!hasSuffix) return option.equals(name.optionName); if (!option.startsWith(name.optionName)) return false; if (choices != null) { String arg = option.substring(name.optionName.length()); if (choiceKind == ChoiceKind.ONEOF) return choices.contains(arg); else { for (String a: arg.split(",+")) { if (!choices.contains(a)) return false; } } } return true; } /** Print a line of documentation describing this option, if standard. * @param out the stream to which to write the documentation */ void help(PrintWriter out) { String s = " " + helpSynopsis(); out.print(s); for (int j = Math.min(s.length(), 28); j < 29; j++) out.print(" "); Log.printLines(out, Main.getLocalizedString(descrKey)); } String helpSynopsis() { StringBuilder sb = new StringBuilder(); sb.append(name); if (argsNameKey == null) { if (choices != null) { String sep = "{"; for (String c: choices) { sb.append(sep); sb.append(c); sep = ","; } sb.append("}"); } } else { if (!hasSuffix) sb.append(" "); sb.append(Main.getLocalizedString(argsNameKey)); } return sb.toString(); } /** Print a line of documentation describing this option, if non-standard. * @param out the stream to which to write the documentation */ void xhelp(PrintWriter out) {} /** Process the option (with arg). Return true if error detected. */ public boolean process(Options options, String option, String arg) { if (options != null) { if (choices != null) { if (choiceKind == ChoiceKind.ONEOF) { // some clients like to see just one of option+choice set for (String c: choices) options.remove(option + c); String opt = option + arg; options.put(opt, opt); // some clients like to see option (without trailing ":") // set to arg String nm = option.substring(0, option.length() - 1); options.put(nm, arg); } else { // set option+word for each word in arg for (String a: arg.split(",+")) { String opt = option + a; options.put(opt, opt); } } } options.put(option, arg); } return false; } /** Process the option (without arg). Return true if error detected. */ public boolean process(Options options, String option) { if (hasSuffix) return process(options, name.optionName, option.substring(name.optionName.length())); else return process(options, option, option); } public OptionKind getKind() { return OptionKind.NORMAL; } public OptionName getName() { return name; } }; /** A nonstandard or extended (-X) option */ static class XOption extends Option { XOption(OptionName name, String argsNameKey, String descrKey) { super(name, argsNameKey, descrKey); } XOption(OptionName name, String descrKey) { this(name, null, descrKey); } XOption(OptionName name, String descrKey, ChoiceKind kind, String... choices) { super(name, descrKey, kind, choices); } XOption(OptionName name, String descrKey, ChoiceKind kind, Collection<String> choices) { super(name, descrKey, kind, choices); } @Override void help(PrintWriter out) {} @Override void xhelp(PrintWriter out) { super.help(out); } @Override public OptionKind getKind() { return OptionKind.EXTENDED; } }; /** A hidden (implementor) option */ static class HiddenOption extends Option { HiddenOption(OptionName name) { super(name, null, null); } HiddenOption(OptionName name, String argsNameKey) { super(name, argsNameKey, null); } @Override void help(PrintWriter out) {} @Override void xhelp(PrintWriter out) {} @Override public OptionKind getKind() { return OptionKind.HIDDEN; } }; }
package io.callstats.sdk; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Consumer; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang3.StringUtils; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import io.callstats.sdk.data.BridgeStatusInfo; import io.callstats.sdk.data.ConferenceInfo; import io.callstats.sdk.data.ConferenceStats; import io.callstats.sdk.data.ServerInfo; import io.callstats.sdk.data.UserInfo; import io.callstats.sdk.httpclient.CallStatsHttp2Client; import io.callstats.sdk.internal.BridgeStatusInfoQueue; import io.callstats.sdk.internal.CallStatsAuthenticator; import io.callstats.sdk.internal.CallStatsBridgeKeepAliveManager; import io.callstats.sdk.internal.CallStatsBridgeKeepAliveStatusListener; import io.callstats.sdk.internal.CallStatsConferenceAliveManager; import io.callstats.sdk.internal.CallStatsConfigProvider; import io.callstats.sdk.internal.CallStatsConst; import io.callstats.sdk.internal.CallStatsResponseStatus; import io.callstats.sdk.internal.TokenGeneratorHs256; import io.callstats.sdk.internal.listeners.CallStatsHttp2ResponseListener; import io.callstats.sdk.listeners.CallStatsInitListener; import io.callstats.sdk.listeners.CallStatsStartConferenceListener; import io.callstats.sdk.messages.BridgeStatusUpdateMessage; import io.callstats.sdk.messages.BridgeStatusUpdateResponse; import io.callstats.sdk.messages.CallStatsEventResponse; import io.callstats.sdk.messages.ConferenceSetupEvent; import io.callstats.sdk.messages.ConferenceStatsEvent; import io.callstats.sdk.messages.FabricSetupEvent; import okhttp3.Response; /** * The Class CallStats. * * @author Karthik Budigere */ public class CallStats { /** The http client. */ private CallStatsHttp2Client httpClient; /** The app id. */ private int appId; /** The bridge id. */ private String bridgeId; /** The listener. */ private CallStatsInitListener listener; /** The authenticator. */ private CallStatsAuthenticator authenticator; /** The logger. */ private static final Logger logger = Logger.getLogger("CallStats"); /** The gson. */ private Gson gson; /** The server info. */ private ServerInfo serverInfo; /** The is initialized. */ private boolean isInitialized; private BridgeStatusInfoQueue bridgeStatusInfoQueue; private Map<String, List<ConferenceStats>> conferenceStatsMap = new HashMap<String, List<ConferenceStats>>(); /** The bridge keep alive manager. */ private CallStatsBridgeKeepAliveManager bridgeKeepAliveManager; private CallStatsConferenceAliveManager conferenceKeepAliveManager; private ICallStatsTokenGenerator tokenGenerator; private CallStatsHttp2Client authHttpClient; private CallStatsHttp2Client conferenceAliveHttpClient; /** * Checks if is initialized. * * @return true, if is initialized */ public boolean isInitialized() { return isInitialized; } /** * Sets the initialized. * * @param isInitialized the new initialized */ private void setInitialized(boolean isInitialized) { this.isInitialized = isInitialized; } /** * Instantiates a new callstats. */ public CallStats() { gson = new Gson(); bridgeStatusInfoQueue = new BridgeStatusInfoQueue(); if (System.getProperty("callstats.configurationFile") != null) { CallStatsConst.CallStatsJavaSDKPropertyFileName = System.getProperty("callstats.configurationFile"); } logger.info("config file path is " + System.getProperty("callstats.configurationFile") + ":" + CallStatsConst.CallStatsJavaSDKPropertyFileName); CallStatsConst.CS_VERSION = getClass().getPackage().getImplementationVersion(); } private String getToken() { return authenticator.getToken(); } /** * Initialize callstats. * * @param appId the app id * @param appSecret the app secret * @param bridgeId the bridge id * @param serverInfo the server info * @param callStatsInitListener the call stats init listener */ public void initialize(final int appId, final String appSecret, final String bridgeId, final ServerInfo serverInfo, final CallStatsInitListener callStatsInitListener) { if (StringUtils.isBlank(appSecret)) { logger.severe("intialize: Arguments cannot be null "); throw new IllegalArgumentException("intialize: Arguments cannot be null"); } initialize(appId, new TokenGeneratorHs256(appSecret.toCharArray(), appId, bridgeId), bridgeId, serverInfo, callStatsInitListener); } /** * Initialize callstats. * * @param appId the app id * @param tokenGenerator token generator * @param bridgeId the bridge id * @param serverInfo the server info * @param callStatsInitListener the call stats init listener */ public void initialize(final int appId, ICallStatsTokenGenerator tokenGenerator, final String bridgeId, final ServerInfo serverInfo, final CallStatsInitListener callStatsInitListener) { if (appId <= 0 || StringUtils.isBlank(bridgeId) || serverInfo == null || callStatsInitListener == null) { logger.severe("intialize: Arguments cannot be null "); throw new IllegalArgumentException("intialize: Arguments cannot be null"); } this.appId = appId; this.tokenGenerator = tokenGenerator; this.bridgeId = bridgeId; this.listener = callStatsInitListener; this.serverInfo = serverInfo; CallStatsConfigProvider.init(); httpClient = new CallStatsHttp2Client(CallStatsConfigProvider.connectionTimeOut); authHttpClient = new CallStatsHttp2Client(CallStatsConfigProvider.connectionTimeOut); conferenceAliveHttpClient = new CallStatsHttp2Client(CallStatsConfigProvider.connectionTimeOut); authenticator = new CallStatsAuthenticator(appId, this.tokenGenerator, bridgeId, authHttpClient, new CallStatsInitListener() { public void onInitialized(String msg) { setInitialized(true); logger.info("SDK Initialized " + msg); startKeepAliveThread(); listener.onInitialized(msg); } public void onError(CallStatsErrors error, String errMsg) { logger.info("SDK Initialization Failed " + errMsg); listener.onError(error, errMsg);; } }); authenticator.doAuthentication(); } /** * Start the conference Alive sender * * @param originID initiator identifier * @param confID conference identifier * @param ucID ucID obtained from conference creation */ public void startConferenceAliveSender(String originID, String confID, String ucID) { if (conferenceKeepAliveManager == null) { conferenceKeepAliveManager = new CallStatsConferenceAliveManager(appId, bridgeId, authenticator.getToken(), conferenceAliveHttpClient, new CallStatsBridgeKeepAliveStatusListener() { public void onKeepAliveError(CallStatsErrors error, String errMsg) { if (error == CallStatsErrors.AUTH_ERROR) { authenticator.doAuthentication(); } } public void onSuccess() { } }); } long apiTS = System.currentTimeMillis(); FabricSetupEvent eventMessage = new FabricSetupEvent(bridgeId, originID, "jitsi", apiTS); String requestMessageString = gson.toJson(eventMessage); String url = ""; try { url = "/" + appId + "/conferences/" + URLEncoder.encode(confID, "utf-8") + "/" + ucID + "/events/fabric/setup"; } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } sendCallStatsConferenceEventMessage(url, requestMessageString, null); conferenceKeepAliveManager.startConferenceAliveSender(confID, ucID, authenticator.getToken()); } /** * Stop the conference Alive sender * * @param ucID ucID obtained from conference creation */ public void stopConferenceAliveSender(String ucID) { if (conferenceKeepAliveManager == null) { return; } conferenceKeepAliveManager.stopConferenceAliveSender(ucID); } /** * Send call stats bridge status update. * * @param bridgeStatusInfo the bridge status info */ public void sendCallStatsBridgeStatusUpdate(BridgeStatusInfo bridgeStatusInfo) { if (!isInitialized()) { bridgeStatusInfoQueue.push(bridgeStatusInfo); return; } long epoch = System.currentTimeMillis(); String token = getToken(); BridgeStatusUpdateMessage eventMessage = new BridgeStatusUpdateMessage(bridgeId, epoch, bridgeStatusInfo); String requestMessageString = gson.toJson(eventMessage); String url = "/" + appId + "/stats/bridge/status"; httpClient.sendBridgeStatistics(url, token, requestMessageString, new CallStatsHttp2ResponseListener() { public void onResponse(Response response) { int responseStatus = response.code(); BridgeStatusUpdateResponse eventResponseMessage; try { String responseString = response.body().string(); eventResponseMessage = gson.fromJson(responseString, BridgeStatusUpdateResponse.class); } catch (IOException e) { logger.log(Level.SEVERE, "IO Exception " + e.getMessage(), e); throw new RuntimeException(e); } catch (JsonSyntaxException e) { logger.log(Level.SEVERE, "Json Syntax Exception " + e.getMessage(), e); e.printStackTrace(); throw new RuntimeException(e); } logger.fine("BridgeStatusUpdate Response " + eventResponseMessage.getStatus() + ":" + eventResponseMessage.getMsg()); httpClient.setDisrupted(false); if (responseStatus == CallStatsResponseStatus.RESPONSE_STATUS_SUCCESS) { sendCallStatsBridgeStatusUpdateFromQueue(); } else if (responseStatus == CallStatsResponseStatus.INVALID_AUTHENTICATION_TOKEN) { bridgeKeepAliveManager.stopKeepAliveSender(); authenticator.doAuthentication(); } else { httpClient.setDisrupted(true); } } public void onFailure(Exception e) { logger.log(Level.SEVERE, "Response exception" + e.getMessage(), e); httpClient.setDisrupted(true); } }); } private synchronized void sendCallStatsBridgeStatusUpdateFromQueue() { if (bridgeStatusInfoQueue.getLength() < 1) return; while (bridgeStatusInfoQueue.getLength() > 0) { BridgeStatusInfo bridgeStatusInfo = bridgeStatusInfoQueue.pop(); sendCallStatsBridgeStatusUpdate(bridgeStatusInfo); } } public synchronized void sendCallStatsConferenceEvent(CallStatsConferenceEvents eventType, ConferenceInfo conferenceInfo, final CallStatsStartConferenceListener listener) { if (eventType == null || conferenceInfo == null || listener == null) { logger.severe("sendCallStatsConferenceEvent: Arguments cannot be null "); throw new IllegalArgumentException("sendCallStatsConferenceEvent: Arguments cannot be null"); } long apiTS = System.currentTimeMillis(); if (eventType == CallStatsConferenceEvents.CONFERENCE_SETUP) { ConferenceSetupEvent eventMessage = new ConferenceSetupEvent(bridgeId, conferenceInfo.getInitiatorID(), conferenceInfo.getInitiatorSiteID(), apiTS, serverInfo); String requestMessageString = gson.toJson(eventMessage); String url = ""; try { url = "/" + appId + "/conferences/" + URLEncoder.encode(conferenceInfo.getConfID(), "utf-8"); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } sendCallStatsConferenceEventMessage(url, requestMessageString, listener); } else { listener.onError(CallStatsErrors.CS_PROTO_ERROR, "Invaid Message type"); } } public synchronized void sendCallStatsConferenceEvent(CallStatsConferenceEvents eventType, UserInfo userInfo) { if (eventType == null || userInfo == null) { logger.severe("sendCallStatsConferenceEvent: Arguments cannot be null "); throw new IllegalArgumentException("sendCallStatsConferenceEvent: Arguments cannot be null"); } // long apiTS = System.currentTimeMillis(); // String token = getToken(); if (eventType == CallStatsConferenceEvents.AUDIO_MUTE || eventType == CallStatsConferenceEvents.AUDIO_UNMUTE || eventType == CallStatsConferenceEvents.VIDEO_PAUSE || eventType == CallStatsConferenceEvents.VIDEO_RESUME) { // TODO send media action event } else if (eventType == CallStatsConferenceEvents.USER_JOINED || eventType == CallStatsConferenceEvents.USER_LEFT) { // TODO send user action event } else if (eventType == CallStatsConferenceEvents.FABRIC_HOLD || eventType == CallStatsConferenceEvents.FABRIC_RESUME) { // TODO send fabric action event } else if (eventType == CallStatsConferenceEvents.CONFERENCE_SETUP_FAILED || eventType == CallStatsConferenceEvents.CONFERENCE_TERMINATED) { // TODO send conference action event } // sendCallStatsConferenceEventMessage(userInfo.getConfID(), requestMessageString, null); } private synchronized void sendCallStatsConferenceEventMessage(String url, String reqMsg, final CallStatsStartConferenceListener listener) { String token = getToken(); if (token == null) { logger.severe("sendCallStatsConferenceEvent: Not Initialized/Token Unavaialble"); return; } httpClient.sendBridgeEvents(url, token, reqMsg, new CallStatsHttp2ResponseListener() { public void onResponse(Response response) { int responseStatus = response.code(); String responseString = ""; try { responseString = response.body().string(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } logger.fine("received response " + responseString); if (responseStatus == CallStatsResponseStatus.RESPONSE_STATUS_SUCCESS) { CallStatsEventResponse eventResponseMessage; try { eventResponseMessage = gson.fromJson(responseString, CallStatsEventResponse.class); } catch (JsonSyntaxException e) { logger.log(Level.SEVERE, "Json Syntax Exception " + e.getMessage(), e); e.printStackTrace(); throw new RuntimeException(e); } logger.fine("conference event Response status is " + eventResponseMessage.getStatus() + ":" + eventResponseMessage.getUcID()); if (listener != null) { listener.onResponse(eventResponseMessage.getUcID()); } httpClient.setDisrupted(false); } else { httpClient.setDisrupted(true); } } public void onFailure(Exception e) { logger.log(Level.SEVERE, "Response exception" + e.getMessage(), e); httpClient.setDisrupted(true); } }); } public synchronized void startStatsReportingForUser(String userID, String confID) { if (userID == null || confID == null) { logger.severe("startStatsReportingForUser: Arguments cannot be null "); throw new IllegalArgumentException("startStatsReportingForUser: Arguments cannot be null"); } String key = userID + ":" + confID; List<ConferenceStats> tempStats = conferenceStatsMap.get(key); if (tempStats == null) { tempStats = new ArrayList<ConferenceStats>(); conferenceStatsMap.put(key, tempStats); } } public synchronized void stopStatsReportingForUser(String userID, String confID) { if (userID == null || confID == null) { logger.severe("stopStatsReportingForUser: Arguments cannot be null "); throw new IllegalArgumentException("stopStatsReportingForUser: Arguments cannot be null"); } String key = userID + ":" + confID; List<ConferenceStats> tempStats = conferenceStatsMap.get(key); long apiTS = System.currentTimeMillis(); if (tempStats != null && tempStats.size() > 0) { ConferenceStats conferenceStats = tempStats.get(0); ConferenceStatsEvent conferenceStatsEvent = new ConferenceStatsEvent(bridgeId, conferenceStats.getRemoteUserID(), conferenceStats.getLocalUserID(), conferenceStats.getLocalUserID(), apiTS); UserInfo info = new UserInfo(conferenceStats.getConfID(), conferenceStats.getRemoteUserID(), conferenceStats.getUcID()); tempStats.forEach(new Consumer<ConferenceStats>() { public void accept(ConferenceStats stats) { conferenceStatsEvent.addStats(stats); } }); String statsString = gson.toJson(conferenceStatsEvent); logger.fine("Stats string -" + statsString); sendCallStatsConferenceStats(statsString, info); conferenceStatsMap.remove(key); } } public synchronized void reportConferenceStats(String userID, ConferenceStats stats) { if (stats == null || userID == null) { logger.severe("sendConferenceStats: Arguments cannot be null "); throw new IllegalArgumentException("sendConferenceStats: Arguments cannot be null"); } String key = userID + ":" + stats.getConfID(); List<ConferenceStats> tempStats = conferenceStatsMap.get(key); if (tempStats == null) { // tempStats = new ArrayList<ConferenceStats>(); throw new IllegalStateException( "reportConferenceStats called without calling startStatsReportingForUser"); } else { tempStats.add(stats); conferenceStatsMap.put(key, tempStats); } } private synchronized void startKeepAliveThread() { if (bridgeKeepAliveManager == null) { bridgeKeepAliveManager = new CallStatsBridgeKeepAliveManager(appId, bridgeId, authenticator.getToken(), httpClient, new CallStatsBridgeKeepAliveStatusListener() { public void onKeepAliveError(CallStatsErrors error, String errMsg) { if (error == CallStatsErrors.AUTH_ERROR) { authenticator.doAuthentication(); } } public void onSuccess() { sendCallStatsBridgeStatusUpdateFromQueue(); } }); } bridgeKeepAliveManager.startKeepAliveSender(authenticator.getToken()); } private synchronized void sendCallStatsConferenceStats(String stats, UserInfo userInfo) { if (stats == null || userInfo == null) { logger.severe("sendCallStatsConferenceStats: Arguments cannot be null "); throw new IllegalArgumentException("sendCallStatsConferenceStats: Arguments cannot be null"); } if (userInfo.getUcID() == null) { logger.severe("sendCallStatsConferenceStats: UCID is null "); throw new IllegalArgumentException("sendCallStatsConferenceStats: UCID is null"); } String token = getToken(); if (token == null) { logger.severe("sendCallStatsConferenceStats: Not Initialized/Token Unavaialble"); return; } String url = ""; try { url = "/" + appId + "/conferences/" + URLEncoder.encode(userInfo.getConfID(), "utf-8") + "/" + userInfo.getUcID() + "/stats"; } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } httpClient.sendBridgeStats(url, token, stats, new CallStatsHttp2ResponseListener() { public void onResponse(Response response) { int responseStatus = response.code(); String responseString = ""; try { responseString = response.body().string(); } catch (IOException e1) { e1.printStackTrace(); } logger.fine("sendBridgeStats : received response " + responseString); if (responseStatus == CallStatsResponseStatus.RESPONSE_STATUS_SUCCESS) { httpClient.setDisrupted(false); } else { httpClient.setDisrupted(true); } } public void onFailure(Exception e) { logger.log(Level.SEVERE, "Response exception" + e.getMessage(), e); httpClient.setDisrupted(true); } }); } }
/* * Lantern * * Copyright (c) LanternPowered <https://www.lanternpowered.org> * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * This work is licensed under the terms of the MIT License (MIT). For * a copy, see 'LICENSE.txt' or <https://opensource.org/licenses/MIT>. */ package org.lanternpowered.server.block.entity.vanilla; import org.lanternpowered.server.block.entity.BlockEntityCreationData; import org.lanternpowered.server.block.entity.ICarrierBlockEntity; import org.lanternpowered.server.block.entity.LanternBlockEntity; import org.lanternpowered.server.block.state.BlockStateProperties; import org.lanternpowered.server.data.LocalKeyRegistry; import org.lanternpowered.server.game.Lantern; import org.lanternpowered.server.inventory.LanternItemStack; import org.lanternpowered.server.inventory.PeekedOfferTransactionResult; import org.lanternpowered.server.inventory.vanilla.VanillaInventoryArchetypes; import org.lanternpowered.server.inventory.vanilla.block.FurnaceInventory; import org.lanternpowered.server.item.recipe.IIngredient; import org.lanternpowered.server.item.recipe.fuel.IFuel; import org.lanternpowered.server.item.recipe.smelting.ISmeltingRecipe; import org.lanternpowered.server.util.function.TriConsumer; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.entity.carrier.CarrierBlockEntity; import org.spongepowered.api.block.entity.carrier.furnace.Furnace; import org.spongepowered.api.data.Keys; import org.spongepowered.api.item.inventory.Inventory; import org.spongepowered.api.item.inventory.ItemStack; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.item.inventory.type.BlockEntityInventory; import org.spongepowered.api.item.recipe.smelting.SmeltingRecipe; import org.spongepowered.api.item.recipe.smelting.SmeltingResult; import org.spongepowered.api.util.Direction; import java.util.Optional; import java.util.OptionalInt; public class LanternFurnace extends LanternBlockEntity implements Furnace, ICarrierBlockEntity { // The inventory of the furnace private final FurnaceInventory inventory; // The tick since the last pulse private long lastTick = -1; public LanternFurnace(BlockEntityCreationData creationData) { this.inventory = VanillaInventoryArchetypes.FURNACE.builder() .withCarrier(this).build(Lantern.getMinecraftPlugin()); this.inventory.enableCachedProgress(); } @Override public void registerKeys() { super.registerKeys(); final TriConsumer<LanternFurnace, Integer, Integer> clearProperty = (furnace, oldElement, newElement) -> furnace.inventory.resetCachedProgress(); final LocalKeyRegistry<LanternFurnace> c = getKeyRegistry().forHolder(LanternFurnace.class); c.register(Keys.DISPLAY_NAME); c.register(Keys.MAX_BURN_TIME, 0).minimum(0).maximum(Integer.MAX_VALUE).addChangeListener(clearProperty); c.register(Keys.PASSED_BURN_TIME, 0).minimum(0).maximum(Keys.MAX_BURN_TIME).addChangeListener(clearProperty); c.register(Keys.MAX_COOK_TIME, 0).minimum(0).maximum(Integer.MAX_VALUE).addChangeListener(clearProperty); c.register(Keys.PASSED_COOK_TIME, 0).minimum(0).maximum(Keys.MAX_COOK_TIME).addChangeListener(clearProperty); } @Override public void update(double deltaTime) { super.update(deltaTime); if (this.lastTick == -1) { this.lastTick = LanternGame.currentTimeTicks(); return; } final long ticks = LanternGame.currentTimeTicks(); long elapsed = ticks - this.lastTick; // This shouldn't happen if (elapsed == 0) { return; } this.lastTick = ticks; while (elapsed > 0) { int maxCookTime = 0; Optional<SmeltingResult> smeltingResult = Optional.empty(); Optional<SmeltingRecipe> smeltingRecipe = Optional.empty(); LanternItemStack itemStack = this.inventory.getInputSlot().getRawItemStack(); final ItemStackSnapshot inputSlotItemSnapshot = itemStack.createSnapshot(); if (!inputSlotItemSnapshot.isEmpty()) { // Check if the item can be smelted, this means finding a compatible // recipe and the output has to be empty. smeltingRecipe = Lantern.getRegistry().getSmeltingRecipeRegistry() .findMatchingRecipe(inputSlotItemSnapshot); if (smeltingRecipe.isPresent()) { final int quantity = ((ISmeltingRecipe) smeltingRecipe.get()).getIngredient().getQuantity(inputSlotItemSnapshot); if (inputSlotItemSnapshot.getQuantity() >= quantity) { smeltingResult = smeltingRecipe.get().getResult(inputSlotItemSnapshot); // Check if the item can be smelted if (smeltingResult.isPresent()) { // Check if the result could be added to the output final PeekedOfferTransactionResult peekResult = this.inventory.getOutputSlot().peekOffer( smeltingResult.get().getResult().createStack()); if (!peekResult.isEmpty()) { maxCookTime = ((ISmeltingRecipe) smeltingRecipe.get()) .getSmeltTime(inputSlotItemSnapshot).orElse(200); } } } } } // The ticks that are elapsed in this loop, limit // this to one cooking cycle, this can only happen // if actually a item is being cooked long elapsed1 = elapsed; int elapsedCookTime = require(Keys.PASSED_COOK_TIME); int remainingCookTime = maxCookTime - elapsedCookTime; if (maxCookTime > 0 && elapsed1 > remainingCookTime) { elapsed1 = remainingCookTime; } elapsed -= elapsed1; // Burn items until the furnace is burning properly int maxBurnTime = require(Keys.MAX_BURN_TIME); int elapsedBurnTime = require(Keys.PASSED_BURN_TIME); int remainingBurnTime = maxBurnTime - elapsedBurnTime; long elapsed2 = elapsed1; while (elapsed2 >= remainingBurnTime) { elapsed2 -= remainingBurnTime; // Reset the max burn time maxBurnTime = 0; // Only burn a new item if the target item can be smelted itemStack = this.inventory.getFuelSlot().getRawItemStack(); if (itemStack.isNotEmpty() && maxCookTime > 0) { // Check for the next fuel item final ItemStackSnapshot itemStackSnapshot = itemStack.createSnapshot(); final Optional<IFuel> result = Lantern.getRegistry().getFuelRegistry().findMatching(itemStackSnapshot); if (result.isPresent()) { final OptionalInt optBurnTime = result.get().getBurnTime(itemStackSnapshot); // We have a next matching burn item, check if we can poll one and then continue burning if (optBurnTime.isPresent() && this.inventory.getFuelSlot().poll(1).isNotEmpty()) { maxBurnTime = optBurnTime.getAsInt(); remainingBurnTime = maxBurnTime; elapsedBurnTime = 0; // Put the rest item in the slot, if the slot is empty if (this.inventory.getFuelSlot().size() == 0) { final IIngredient ingredient = result.get().getIngredient(); final Optional<ItemStack> remainingItem = ingredient.getRemainingItem(itemStackSnapshot); remainingItem.ifPresent(this.inventory.getFuelSlot()::setForced); } } } } if (maxBurnTime == 0) { break; } } elapsedBurnTime = maxBurnTime == 0 ? 0 : (int) (elapsedBurnTime + elapsed2); remainingBurnTime = maxBurnTime - elapsedBurnTime; offer(Keys.MAX_BURN_TIME, maxBurnTime); offer(Keys.PASSED_BURN_TIME, elapsedBurnTime); if (maxCookTime > 0) { // The furnace is still burning if (remainingBurnTime > 0) { // The item is smelted if (elapsed1 >= remainingCookTime){ offer(Keys.MAX_COOK_TIME, 0); offer(Keys.PASSED_COOK_TIME, 0); final int quantity = ((ISmeltingRecipe) smeltingRecipe.get()).getIngredient().getQuantity(inputSlotItemSnapshot); this.inventory.getOutputSlot().offer(smeltingResult.get().getResult().createStack()); this.inventory.getInputSlot().poll(quantity); // Put the rest item in the slot if (this.inventory.getInputSlot().size() == 0) { final IIngredient ingredient = ((ISmeltingRecipe) smeltingRecipe.get()).getIngredient(); final Optional<ItemStack> remainingItem = ingredient.getRemainingItem(inputSlotItemSnapshot); remainingItem.ifPresent(this.inventory.getInputSlot()::set); } } else { // Keep on smelting offer(Keys.MAX_COOK_TIME, maxCookTime); offer(Keys.PASSED_COOK_TIME, (int) (elapsedCookTime + elapsed1)); break; } } else if (elapsedCookTime > 0) { // Undo smelting progress final long time = elapsedCookTime - elapsed1 * 2; offer(Keys.MAX_COOK_TIME, time <= 0 ? 0 : maxCookTime); offer(Keys.PASSED_COOK_TIME, (int) (time <= 0 ? 0 : time)); break; } } else { offer(Keys.MAX_COOK_TIME, 0); offer(Keys.PASSED_COOK_TIME, 0); } } BlockState blockState = getLocation().getBlock(); final boolean burning = require(Keys.PASSED_BURN_TIME) < require(Keys.MAX_BURN_TIME); final boolean blockBurning = blockState.getStateProperty(BlockStateProperties.LIT).get(); if (burning != blockBurning) { getLocation().setBlock(blockState.withStateProperty(BlockStateProperties.LIT, burning).get()); } } @Override public boolean process() { final LanternItemStack itemStack = this.inventory.getInputSlot().getRawItemStack(); if (itemStack.isNotEmpty()) { // Check if the item can be smelted, this means finding a compatible // recipe and the output has to be empty. final ItemStackSnapshot itemStackSnapshot = itemStack.createSnapshot(); final Optional<SmeltingRecipe> smeltingRecipe = Lantern.getRegistry().getSmeltingRecipeRegistry() .findMatchingRecipe(itemStackSnapshot); final Optional<SmeltingResult> smeltingResult = smeltingRecipe.flatMap(recipe -> recipe.getResult(itemStackSnapshot)); // Check if the item can be smelted if (smeltingResult.isPresent()) { final int quantity = ((ISmeltingRecipe) smeltingRecipe.get()).getIngredient().getQuantity(itemStackSnapshot); if (itemStack.getQuantity() >= quantity) { final ItemStack result = smeltingResult.get().getResult().createStack(); // Check if the result could be added to the output final PeekedOfferTransactionResult peekResult = this.inventory.getOutputSlot().peekOffer(result); if (!peekResult.isEmpty()) { this.inventory.getInputSlot().poll(quantity); this.inventory.getOutputSlot().offer(result); return true; } } } } return false; } @Override public BlockEntityInventory<CarrierBlockEntity> getInventory() { return this.inventory; } @Override public Inventory getInventory(Direction from) { switch (from) { case EAST: case WEST: case SOUTH: case NORTH: return this.inventory.getFuelSlot(); case UP: return this.inventory.getInputSlot(); case DOWN: // TODO: Limited access to the fuel slot to pull out empty buckets? return this.inventory.getOutputSlot(); default: return ICarrierBlockEntity.super.getInventory(from); } } }
package com.emajliramokade.api.model.PopisKada; import com.dslplatform.patterns.*; import com.dslplatform.client.*; import com.fasterxml.jackson.annotation.*; public final class KadaIzvorPodataka implements Identifiable, java.io.Serializable { @JsonCreator public KadaIzvorPodataka( @JsonProperty("URI") final String URI, @JsonProperty("odobrena") final org.joda.time.DateTime odobrena, @JsonProperty("odbijena") final org.joda.time.DateTime odbijena, @JsonProperty("brojacSlanja") final int brojacSlanja, @JsonProperty("dodana") final org.joda.time.DateTime dodana, @JsonProperty("slikeKade") final com.emajliramokade.api.model.Resursi.SlikeKade slikeKade) { this.URI = URI; this.odobrena = odobrena; this.odbijena = odbijena; this.brojacSlanja = brojacSlanja; this.dodana = dodana; if (dodana == null) throw new IllegalArgumentException( "Property \"dodana\" cannot be null!"); this.slikeKade = slikeKade; if (slikeKade == null) throw new IllegalArgumentException( "Property \"slikeKade\" cannot be null!"); } private KadaIzvorPodataka() { this.URI = null; this.odobrena = null; this.odbijena = null; this.brojacSlanja = 0; this.dodana = null; this.slikeKade = null; } private final String URI; public String getURI() { return this.URI; } @Override public int hashCode() { return URI.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final KadaIzvorPodataka other = (KadaIzvorPodataka) obj; return URI.equals(other.URI); } @Override public String toString() { return "KadaIzvorPodataka(" + URI + ')'; } private static final long serialVersionUID = 0x0097000a; private final org.joda.time.DateTime odobrena; public org.joda.time.DateTime getOdobrena() { return this.odobrena; } private final org.joda.time.DateTime odbijena; public org.joda.time.DateTime getOdbijena() { return this.odbijena; } private final int brojacSlanja; public int getBrojacSlanja() { return this.brojacSlanja; } private final org.joda.time.DateTime dodana; public org.joda.time.DateTime getDodana() { return this.dodana; } private final com.emajliramokade.api.model.Resursi.SlikeKade slikeKade; public com.emajliramokade.api.model.Resursi.SlikeKade getSlikeKade() { return this.slikeKade; } public static class NemoderiraneKade implements java.io.Serializable, Specification<KadaIzvorPodataka> { public NemoderiraneKade() {} private static final long serialVersionUID = 0x0097000a; public java.util.List<KadaIzvorPodataka> search() throws java.io.IOException { return search(null, null, Bootstrap.getLocator()); } public java.util.List<KadaIzvorPodataka> search( final ServiceLocator locator) throws java.io.IOException { return search(null, null, locator); } public java.util.List<KadaIzvorPodataka> search( final Integer limit, final Integer offset) throws java.io.IOException { return search(limit, offset, Bootstrap.getLocator()); } public java.util.List<KadaIzvorPodataka> search( final Integer limit, final Integer offset, final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class) .search(this, limit, offset, null).get(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } public long count() throws java.io.IOException { return count(Bootstrap.getLocator()); } public long count(final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class).count(this).get() .longValue(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } } public static class OdobreneKade implements java.io.Serializable, Specification<KadaIzvorPodataka> { public OdobreneKade() {} private static final long serialVersionUID = 0x0097000a; public java.util.List<KadaIzvorPodataka> search() throws java.io.IOException { return search(null, null, Bootstrap.getLocator()); } public java.util.List<KadaIzvorPodataka> search( final ServiceLocator locator) throws java.io.IOException { return search(null, null, locator); } public java.util.List<KadaIzvorPodataka> search( final Integer limit, final Integer offset) throws java.io.IOException { return search(limit, offset, Bootstrap.getLocator()); } public java.util.List<KadaIzvorPodataka> search( final Integer limit, final Integer offset, final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class) .search(this, limit, offset, null).get(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } public long count() throws java.io.IOException { return count(Bootstrap.getLocator()); } public long count(final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class).count(this).get() .longValue(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } } public static KadaIzvorPodataka find(final String uri) throws java.io.IOException { return find(uri, null); } public static KadaIzvorPodataka find( final String uri, final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(CrudProxy.class) .read(KadaIzvorPodataka.class, uri).get(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } public static java.util.List<KadaIzvorPodataka> find( final Iterable<String> uris) throws java.io.IOException { return find(uris, Bootstrap.getLocator()); } public static java.util.List<KadaIzvorPodataka> find( final Iterable<String> uris, final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class) .find(KadaIzvorPodataka.class, uris).get(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } public static java.util.List<KadaIzvorPodataka> findAll() throws java.io.IOException { return findAll(null, null, Bootstrap.getLocator()); } public static java.util.List<KadaIzvorPodataka> findAll( final ServiceLocator locator) throws java.io.IOException { return findAll(null, null, locator); } public static java.util.List<KadaIzvorPodataka> findAll( final Integer limit, final Integer offset) throws java.io.IOException { return findAll(limit, offset, Bootstrap.getLocator()); } public static java.util.List<KadaIzvorPodataka> findAll( final Integer limit, final Integer offset, final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class) .findAll(KadaIzvorPodataka.class, limit, offset, null) .get(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } public static java.util.List<KadaIzvorPodataka> search( final Specification<KadaIzvorPodataka> specification) throws java.io.IOException { return search(specification, null, null, Bootstrap.getLocator()); } public static java.util.List<KadaIzvorPodataka> search( final Specification<KadaIzvorPodataka> specification, final ServiceLocator locator) throws java.io.IOException { return search(specification, null, null, locator); } public static java.util.List<KadaIzvorPodataka> search( final Specification<KadaIzvorPodataka> specification, final Integer limit, final Integer offset) throws java.io.IOException { return search(specification, limit, offset, Bootstrap.getLocator()); } public static java.util.List<KadaIzvorPodataka> search( final Specification<KadaIzvorPodataka> specification, final Integer limit, final Integer offset, final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class) .search(specification, limit, offset, null).get(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } public static long count() throws java.io.IOException { return count(Bootstrap.getLocator()); } public static long count(final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class).count(KadaIzvorPodataka.class) .get().longValue(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } public static long count( final Specification<KadaIzvorPodataka> specification) throws java.io.IOException { return count(specification, Bootstrap.getLocator()); } public static long count( final Specification<KadaIzvorPodataka> specification, final ServiceLocator locator) throws java.io.IOException { try { return (locator != null ? locator : Bootstrap.getLocator()) .resolve(DomainProxy.class).count(specification).get() .longValue(); } catch (final InterruptedException e) { throw new java.io.IOException(e); } catch (final java.util.concurrent.ExecutionException e) { throw new java.io.IOException(e); } } }
/* * Copyright (c) 2016, Mazen Kotb, mazenkotb@gmail.com * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package xyz.mkotb.configapi.internal.adapt; import org.bukkit.ChatColor; import org.bukkit.OfflinePlayer; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.MemorySection; import org.bukkit.configuration.serialization.ConfigurationSerializable; import xyz.mkotb.configapi.Coloured; import xyz.mkotb.configapi.RequiredField; import xyz.mkotb.configapi.comment.Self; import xyz.mkotb.configapi.ex.ClassStructureException; import xyz.mkotb.configapi.ex.InvalidConfigurationException; import xyz.mkotb.configapi.internal.InternalsHelper; import xyz.mkotb.configapi.internal.SerializableMemorySection; import xyz.mkotb.configapi.internal.adapt.impl.*; import xyz.mkotb.configapi.internal.adapt.impl.atomic.*; import xyz.mkotb.configapi.internal.adapt.impl.bukkit.*; import xyz.mkotb.configapi.internal.naming.NamingStrategy; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.*; public final class AdapterHandler { private static final Map<Class<?>, Class<?>> PRIMITIVE_BOXES = new ConcurrentHashMap<>(); private static final Set<Class<?>> FILTER_CLASSES = new HashSet<>(); private static final Map<Class<?>, ObjectAdapter<?, ?>> ADAPTERS = new ConcurrentHashMap<>(); static { PRIMITIVE_BOXES.put(boolean.class, Boolean.class); PRIMITIVE_BOXES.put(char.class, Character.class); PRIMITIVE_BOXES.put(byte.class, Byte.class); PRIMITIVE_BOXES.put(short.class, Short.class); PRIMITIVE_BOXES.put(int.class, Integer.class); PRIMITIVE_BOXES.put(long.class, Long.class); PRIMITIVE_BOXES.put(float.class, Float.class); PRIMITIVE_BOXES.put(double.class, Double.class); PRIMITIVE_BOXES.put(void.class, Void.class); ADAPTERS.put(Date.class, new DateAdapter()); ADAPTERS.put(java.sql.Date.class, new SQLDateAdapter()); ADAPTERS.put(UUID.class, new UUIDAdapter()); ADAPTERS.put(ConfigurationSection.class, new ConfigurationSectionAdapter()); ADAPTERS.put(EnchantmentAdapter.class, new EnchantmentAdapter()); ADAPTERS.put(OfflinePlayer.class, new OfflinePlayerAdapter()); ADAPTERS.put(AtomicBoolean.class, new AtomicBooleanAdapter()); ADAPTERS.put(AtomicInteger.class, new AtomicIntegerAdapter()); ADAPTERS.put(AtomicIntegerArray.class, new AtomicIntegerArrayAdapter()); ADAPTERS.put(AtomicLong.class, new AtomicLongAdapter()); ADAPTERS.put(AtomicLongArray.class, new AtomicLongArrayAdapter()); FILTER_CLASSES.addAll(ADAPTERS.keySet()); FILTER_CLASSES.addAll(PRIMITIVE_BOXES.values()); FILTER_CLASSES.add(String.class); FILTER_CLASSES.add(Map.class); FILTER_CLASSES.add(Collection.class); } private final NamingStrategy namingStrategy; private AdapterHandler(NamingStrategy strategy) { this.namingStrategy = strategy; } public static AdapterHandler create(NamingStrategy strategy) { return new AdapterHandler(strategy); } public static <I> void registerAdapter(Class<I> clazz, ObjectAdapter<I, ?> adapter) { ADAPTERS.replace(clazz, adapter); } public static boolean isSerializable(Class<?> clazz) { return (ConfigurationSerializable.class.isAssignableFrom(clazz) && ConfigurationSerializableHelper.isRegistered(clazz)) || ((FILTER_CLASSES.stream().anyMatch((e) -> e.isAssignableFrom(clazz) || clazz.equals(e)))); } public static Class<?> outClass(Class<?> clazz) { if (!isSerializable(clazz)) { return MemorySection.class; } return clazz; } public <I, O> O adaptOut(I input, Class<O> outClass) { return adaptOut(input, outClass, null); } public <I, O> O adaptOut(I input, Class<O> outClass, Class<?> type) { if (Collection.class.isAssignableFrom(input.getClass())) { CollectionAdapter adapter = CollectionAdapter.create(type, (Class<? extends Collection>) outClass, this); return outClass.cast(adapter.write((Collection) input)); } if (Map.class.isAssignableFrom(input.getClass())) { MapAdapter adapter = MapAdapter.create(type, this); return outClass.cast(adapter.write((Map) input)); } if (input.getClass().isArray()) { Class<?> cls = input.getClass(); ArrayAdapter adapter = ArrayAdapter.create(cls.getComponentType(), this); return outClass.cast(adapter.write(input)); } if (PRIMITIVE_BOXES.values().contains(outClass) || input.getClass().isPrimitive()) { return outClass.cast(input); } if (outClass == String.class) { return outClass.cast(input); } ObjectAdapter<?, ?> oldAdapter = ADAPTERS.get(input.getClass()); if (oldAdapter == null) { if (ConfigurationSerializableHelper.isRegistered(input.getClass()) && // ensure registration for deserialization purposes input instanceof ConfigurationSerializable) { MemorySection memorySection = InternalsHelper.newInstanceWithoutInit(SerializableMemorySection.class); ((ConfigurationSerializable) input).serialize().forEach(memorySection::set); return outClass.cast(memorySection); } MemorySection section = InternalsHelper.newInstanceWithoutInit(SerializableMemorySection.class); InternalsHelper.setField("map", section, new LinkedHashMap()); Field[] fields = input.getClass().getDeclaredFields(); Field selfField = null; for (Field field : fields) { if (Modifier.isTransient(field.getModifiers())) { continue; } Object value = InternalsHelper.getField(field, input); if (value != null) { Class<?> fieldType = null; Class<?> fieldClass = field.getType(); Class<?> beforeFieldClass = fieldClass; if (field.getDeclaredAnnotation(Self.class) != null) { if (!ConfigurationSection.class.isAssignableFrom(beforeFieldClass)) { throw new ClassStructureException("Field " + field.getName() + " with @Self annotation is not a " + "configuration section, is " + beforeFieldClass.getName()); } selfField = field; continue; } if (!FILTER_CLASSES.stream().anyMatch((e) -> e.isAssignableFrom(beforeFieldClass)) && !fieldClass.isArray()) { fieldClass = MemorySection.class; } if (beforeFieldClass.isPrimitive()) { fieldClass = PRIMITIVE_BOXES.get(beforeFieldClass); } if (Map.class.isAssignableFrom(fieldClass)) { fieldType = InternalsHelper.typeOf(field, 1); fieldClass = ConfigurationSection.class; } else if (Collection.class.isAssignableFrom(fieldClass)) { fieldType = InternalsHelper.typeOf(field, 0); fieldClass = Object.class; } else if (fieldClass.isArray()) { fieldClass = Object.class; } Object obj = adaptOut(value, fieldClass, fieldType); if (obj instanceof String && field.isAnnotationPresent(Coloured.class)) { Coloured annotation = field.getDeclaredAnnotation(Coloured.class); obj = translateAlternateColorCodes(annotation.value(), ChatColor.COLOR_CHAR, (String) obj); } section.set(namingStrategy.rename(field.getName()), obj); } } if (selfField != null) { ConfigurationSection selfSec = InternalsHelper.getField(selfField, input); if (selfSec != null) { selfSec.getValues(false).forEach((key, value) -> { if (!section.contains(key)) { section.set(key, value); } }); } } return outClass.cast(section); } ObjectAdapter<I, O> adapter; try { adapter = (ObjectAdapter<I, O>) oldAdapter; } catch (ClassCastException ex) { throw new ClassStructureException(outClass.getName() + " does not match registered adapter" + " for " + input.getClass().getName() + "!"); } return adapter.write(input); } public <I> I adaptIn(ConfigurationSection section, String key, Class<I> inClass) { return adaptIn(section, key, inClass, null); } public <I, O> I adaptIn(ConfigurationSection section, String key, Class<I> inClass, Class<?> type) { if (inClass.isArray()) { ArrayAdapter adapter = ArrayAdapter.create(inClass.getComponentType(), this); return inClass.cast(adapter.read(key, section)); } if (Collection.class.isAssignableFrom(inClass)) { CollectionAdapter adapter = CollectionAdapter.create(type, (Class<? extends Collection>) inClass, this); return inClass.cast(adapter.read(key, section)); } if (Map.class.isAssignableFrom(inClass)) { MapAdapter adapter = MapAdapter.create(type, this); return inClass.cast(adapter.read(key, section)); } if (inClass.isPrimitive() || inClass == String.class || PRIMITIVE_BOXES.values().contains(inClass)) { return inClass.cast(section.get(key)); } ObjectAdapter<?, ?> oldAdapter = ADAPTERS.get(inClass); if (oldAdapter == null) { if (ConfigurationSerializableHelper.isRegistered(inClass) && ConfigurationSerializable.class.isAssignableFrom(inClass)) { return ConfigurationSerializableHelper.deserialize(ConfigurationSerializableHelper.toMap(section.getConfigurationSection(key)), inClass); } ConfigurationSection readingSection = (key == null) ? section : section.getConfigurationSection(key); I instance = InternalsHelper.newInstance(inClass); Field[] fields = inClass.getDeclaredFields(); Field selfField = null; for (Field field : fields) { if (Modifier.isTransient(field.getModifiers())) { continue; } String name = namingStrategy.rename(field.getName()); if (field.getDeclaredAnnotation(Self.class) != null) { if (!ConfigurationSection.class.isAssignableFrom(field.getType())) { throw new ClassStructureException("Field " + field.getName() + " with @Self annotation is not a " + "configuration section, is " + field.getType().getName()); } selfField = field; continue; } if (!readingSection.contains(name)) { if (field.getAnnotation(RequiredField.class) != null) { String message = "Could not find the required field, " + name; if (key != null) { message += ", in section " + key; } throw new InvalidConfigurationException(message); } continue; } Class<?> fieldType = null; Class<?> fieldClass = field.getType(); if (fieldClass.isPrimitive()) { fieldClass = PRIMITIVE_BOXES.get(fieldClass); } if (Map.class.isAssignableFrom(fieldClass)) { fieldType = InternalsHelper.typeOf(field, 1); } else if (Collection.class.isAssignableFrom(fieldClass)) { fieldType = InternalsHelper.typeOf(field, 0); } InternalsHelper.setField(field, instance, adaptIn(readingSection, name, fieldClass, fieldType)); } if (selfField != null) { InternalsHelper.setField(selfField, instance, readingSection); } return instance; } ObjectAdapter<I, O> adapter; try { adapter = (ObjectAdapter<I, O>) oldAdapter; } catch (ClassCastException ex) { throw new ClassStructureException(ex); } return adapter.read(key, section); } public static String translateAlternateColorCodes(char colorChar, char altColorChar, String textToTranslate) { char[] b = textToTranslate.toCharArray(); for (int i = 0; i < b.length - 1; i++) { if (b[i] == altColorChar && "0123456789AaBbCcDdEeFfKkLlMmNnOoRr".indexOf(b[i+1]) > -1) { b[i] = colorChar; b[i+1] = Character.toLowerCase(b[i+1]); } } return new String(b); } }
package com.sinyuk.yuk.data.user; import android.os.Parcel; import android.os.Parcelable; import com.google.gson.annotations.SerializedName; import com.sinyuk.yuk.data.links.Links; /** * Created by Sinyuk on 16.6.17. */ public class User implements Parcelable { public static final String TEAM = "Team"; public static final String PLAYER = "Player"; public static final String PRO = "Pro"; public static final Parcelable.Creator<User> CREATOR = new Parcelable.Creator<User>() { @Override public User createFromParcel(Parcel source) {return new User(source);} @Override public User[] newArray(int size) {return new User[size];} }; /** * id : 1 * name : Dan Cederholm * username : simplebits * html_url : https://dribbble.com/simplebits * avatar_url : https://d13yacurqjgara.cloudfront.net/users/1/avatars/normal/dc.jpg?1371679243 * bio : Co-founder &amp; designer of <a href="https://dribbble.com/dribbble">@Dribbble</a>. Principal of SimpleBits. Aspiring clawhammer banjoist. * location : Salem, MA * links : {"web":"http://simplebits.com","twitter":"https://twitter.com/simplebits"} * buckets_count : 10 * comments_received_count : 3395 * followers_count : 29262 * followings_count : 1728 * likes_count : 34954 * likes_received_count : 27568 * projects_count : 8 * rebounds_received_count : 504 * shots_count : 214 * teams_count : 1 * can_upload_shot : true * type : Player * pro : true * buckets_url : https://dribbble.com/v1/users/1/buckets * followers_url : https://dribbble.com/v1/users/1/followers * following_url : https://dribbble.com/v1/users/1/following * likes_url : https://dribbble.com/v1/users/1/likes * shots_url : https://dribbble.com/v1/users/1/shots * teams_url : https://dribbble.com/v1/users/1/teams * created_at : 2009-07-08T02:51:22Z * updated_at : 2014-02-22T17:10:33Z */ @SerializedName("id") private long mId; @SerializedName("name") private String mName; @SerializedName("username") private String mUsername; @SerializedName("html_url") private String mHtmlUrl; @SerializedName("avatar_url") private String mAvatarUrl; @SerializedName("bio") private String mBio; @SerializedName("location") private String mLocation; /** * web : http://simplebits.com * twitter : https://twitter.com/simplebits */ @SerializedName("links") private Links mLinks; @SerializedName("buckets_count") private int mBucketsCount; @SerializedName("comments_received_count") private int mCommentsReceivedCount; @SerializedName("followers_count") private int mFollowersCount; @SerializedName("followings_count") private int mFollowingsCount; @SerializedName("likes_count") private int mLikesCount; @SerializedName("likes_received_count") private int mLikesReceivedCount; @SerializedName("projects_count") private int mProjectsCount; @SerializedName("rebounds_received_count") private int mReboundsReceivedCount; @SerializedName("shots_count") private int mShotsCount; @SerializedName("teams_count") private int mTeamsCount; @SerializedName("can_upload_shot") private boolean mCanUploadShot; @SerializedName("type") private String mType; @SerializedName("pro") private boolean mPro; @SerializedName("buckets_url") private String mBucketsUrl; @SerializedName("followers_url") private String mFollowersUrl; @SerializedName("following_url") private String mFollowingUrl; @SerializedName("likes_url") private String mLikesUrl; @SerializedName("shots_url") private String mShotsUrl; @SerializedName("teams_url") private String mTeamsUrl; @SerializedName("created_at") private String mCreatedAt; @SerializedName("updated_at") private String mUpdatedAt; public User() {} protected User(Parcel in) { this.mId = in.readLong(); this.mName = in.readString(); this.mUsername = in.readString(); this.mHtmlUrl = in.readString(); this.mAvatarUrl = in.readString(); this.mBio = in.readString(); this.mLocation = in.readString(); this.mLinks = in.readParcelable(Links.class.getClassLoader()); this.mBucketsCount = in.readInt(); this.mCommentsReceivedCount = in.readInt(); this.mFollowersCount = in.readInt(); this.mFollowingsCount = in.readInt(); this.mLikesCount = in.readInt(); this.mLikesReceivedCount = in.readInt(); this.mProjectsCount = in.readInt(); this.mReboundsReceivedCount = in.readInt(); this.mShotsCount = in.readInt(); this.mTeamsCount = in.readInt(); this.mCanUploadShot = in.readByte() != 0; this.mType = in.readString(); this.mPro = in.readByte() != 0; this.mBucketsUrl = in.readString(); this.mFollowersUrl = in.readString(); this.mFollowingUrl = in.readString(); this.mLikesUrl = in.readString(); this.mShotsUrl = in.readString(); this.mTeamsUrl = in.readString(); this.mCreatedAt = in.readString(); this.mUpdatedAt = in.readString(); } public long getId() { return mId;} public void setId(long id) { mId = id;} public String getName() { return mName;} public void setName(String name) { mName = name;} public String getUsername() { return mUsername;} public void setUsername(String username) { mUsername = username;} public String getHtmlUrl() { return mHtmlUrl;} public void setHtmlUrl(String htmlUrl) { mHtmlUrl = htmlUrl;} public String getAvatarUrl() { return mAvatarUrl;} public void setAvatarUrl(String avatarUrl) { mAvatarUrl = avatarUrl;} public String getBio() { return mBio;} public void setBio(String bio) { mBio = bio;} public String getLocation() { return mLocation;} public void setLocation(String location) { mLocation = location;} public Links getLinks() { return mLinks;} public void setLinks(Links links) { mLinks = links;} public int getBucketsCount() { return mBucketsCount;} public void setBucketsCount(int bucketsCount) { mBucketsCount = bucketsCount;} public int getCommentsReceivedCount() { return mCommentsReceivedCount;} public void setCommentsReceivedCount(int commentsReceivedCount) { mCommentsReceivedCount = commentsReceivedCount;} public int getFollowersCount() { return mFollowersCount;} public void setFollowersCount(int followersCount) { mFollowersCount = followersCount;} public int getFollowingsCount() { return mFollowingsCount;} public void setFollowingsCount(int followingsCount) { mFollowingsCount = followingsCount;} public int getLikesCount() { return mLikesCount;} public void setLikesCount(int likesCount) { mLikesCount = likesCount;} public int getLikesReceivedCount() { return mLikesReceivedCount;} public void setLikesReceivedCount(int likesReceivedCount) { mLikesReceivedCount = likesReceivedCount;} public int getProjectsCount() { return mProjectsCount;} public void setProjectsCount(int projectsCount) { mProjectsCount = projectsCount;} public int getReboundsReceivedCount() { return mReboundsReceivedCount;} public void setReboundsReceivedCount(int reboundsReceivedCount) { mReboundsReceivedCount = reboundsReceivedCount;} public int getShotsCount() { return mShotsCount;} public void setShotsCount(int shotsCount) { mShotsCount = shotsCount;} public int getTeamsCount() { return mTeamsCount;} public void setTeamsCount(int teamsCount) { mTeamsCount = teamsCount;} public boolean isCanUploadShot() { return mCanUploadShot;} public void setCanUploadShot(boolean canUploadShot) { mCanUploadShot = canUploadShot;} public String getType() { return mType;} public void setType(String type) { mType = type;} public boolean isPro() { return mPro;} public void setPro(boolean pro) { mPro = pro;} public String getBucketsUrl() { return mBucketsUrl;} public void setBucketsUrl(String bucketsUrl) { mBucketsUrl = bucketsUrl;} public String getFollowersUrl() { return mFollowersUrl;} public void setFollowersUrl(String followersUrl) { mFollowersUrl = followersUrl;} public String getFollowingUrl() { return mFollowingUrl;} public void setFollowingUrl(String followingUrl) { mFollowingUrl = followingUrl;} public String getLikesUrl() { return mLikesUrl;} public void setLikesUrl(String likesUrl) { mLikesUrl = likesUrl;} public String getShotsUrl() { return mShotsUrl;} public void setShotsUrl(String shotsUrl) { mShotsUrl = shotsUrl;} public String getTeamsUrl() { return mTeamsUrl;} public void setTeamsUrl(String teamsUrl) { mTeamsUrl = teamsUrl;} public String getCreatedAt() { return mCreatedAt;} public void setCreatedAt(String createdAt) { mCreatedAt = createdAt;} public String getUpdatedAt() { return mUpdatedAt;} public void setUpdatedAt(String updatedAt) { mUpdatedAt = updatedAt;} @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeLong(this.mId); dest.writeString(this.mName); dest.writeString(this.mUsername); dest.writeString(this.mHtmlUrl); dest.writeString(this.mAvatarUrl); dest.writeString(this.mBio); dest.writeString(this.mLocation); dest.writeParcelable(this.mLinks, flags); dest.writeInt(this.mBucketsCount); dest.writeInt(this.mCommentsReceivedCount); dest.writeInt(this.mFollowersCount); dest.writeInt(this.mFollowingsCount); dest.writeInt(this.mLikesCount); dest.writeInt(this.mLikesReceivedCount); dest.writeInt(this.mProjectsCount); dest.writeInt(this.mReboundsReceivedCount); dest.writeInt(this.mShotsCount); dest.writeInt(this.mTeamsCount); dest.writeByte(this.mCanUploadShot ? (byte) 1 : (byte) 0); dest.writeString(this.mType); dest.writeByte(this.mPro ? (byte) 1 : (byte) 0); dest.writeString(this.mBucketsUrl); dest.writeString(this.mFollowersUrl); dest.writeString(this.mFollowingUrl); dest.writeString(this.mLikesUrl); dest.writeString(this.mShotsUrl); dest.writeString(this.mTeamsUrl); dest.writeString(this.mCreatedAt); dest.writeString(this.mUpdatedAt); } @Override public String toString() { return "User{" + "mId=" + mId + ", mName='" + mName + '\'' + ", mUsername='" + mUsername + '\'' + ", mHtmlUrl='" + mHtmlUrl + '\'' + ", mAvatarUrl='" + mAvatarUrl + '\'' + ", mBio='" + mBio + '\'' + ", mLocation='" + mLocation + '\'' + ", mLinks=" + mLinks + ", mBucketsCount=" + mBucketsCount + ", mCommentsReceivedCount=" + mCommentsReceivedCount + ", mFollowersCount=" + mFollowersCount + ", mFollowingsCount=" + mFollowingsCount + ", mLikesCount=" + mLikesCount + ", mLikesReceivedCount=" + mLikesReceivedCount + ", mProjectsCount=" + mProjectsCount + ", mReboundsReceivedCount=" + mReboundsReceivedCount + ", mShotsCount=" + mShotsCount + ", mTeamsCount=" + mTeamsCount + ", mCanUploadShot=" + mCanUploadShot + ", mType='" + mType + '\'' + ", mPro=" + mPro + ", mBucketsUrl='" + mBucketsUrl + '\'' + ", mFollowersUrl='" + mFollowersUrl + '\'' + ", mFollowingUrl='" + mFollowingUrl + '\'' + ", mLikesUrl='" + mLikesUrl + '\'' + ", mShotsUrl='" + mShotsUrl + '\'' + ", mTeamsUrl='" + mTeamsUrl + '\'' + ", mCreatedAt='" + mCreatedAt + '\'' + ", mUpdatedAt='" + mUpdatedAt + '\'' + '}'; } }
/* * CANARY Copyright 2007-2010 Sandia Corporation. * This source code is distributed under the LGPL License. * Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, * the U.S. Government retains certain rights in this software. * This software was written as part of an Inter-Agency Agreement between * Sandia National Laboratories and the US EPA NHSRC. * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or (at * your option) any later version. This library is distributed in the hope * that it will be useful, but WITHOUT ANY WARRANTY; without even the * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this library; if not, write to the Free Software Foundation, * Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ package org.teva.canary.algs; import java.io.IOException; import java.io.StringReader; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.teva.canary.lib.Algorithm; import org.teva.canary.lib.RegisterEnum; import org.teva.canary.lib.RulesEnum; import org.teva.canary.lib.StatusEnum; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * This is a complete implementation of the MVNN algorithm in Java. Please see * the source code provided in the CANARY source distribution for documentation. * @author dbhart, Sandia National Laboratories */ public abstract class MyMVNN implements Algorithm { public MyMVNN() { this.tau_mvnn = 0.0; this.detection_status = StatusEnum.UNINITIALIZED; this.threshold = Double.NaN; this.tau_prob = Double.NaN; this.data_rule = RulesEnum.LINK_USE_RAW; this.on_event = RulesEnum.ON_EVENT_KEEP_NANS; this.on_missing = RulesEnum.ON_MISSING_KEEP_CURRENT; this.on_normal = RulesEnum.ON_NORMAL_KEEP_CURRENT; this.on_outlier = RulesEnum.ON_OUTLIER_KEEP_NANS; this.on_nodata = RulesEnum.ON_NODATA_KEEP_NANS; this.on_calib = RulesEnum.ON_CALIB_KEEP_CURRENT; this.on_match = RulesEnum.ON_MATCH_KEEP_CURRENT; this.data_register_usage = new boolean[RegisterEnum.values().length]; for (int i = 0; i < this.data_register_usage.length; i++) { this.data_register_usage[i] = true; } } protected int history_window_size; protected int num_signals; protected double[] data_lim_high; protected double[] data_lim_low; protected double[] setpt_lim_high; protected double[] setpt_lim_low; protected boolean[] clusterizable; protected boolean[] auto_ignore_signals; protected double[] sigma_lim; protected boolean[] usable_signals; protected int[] contributing_parameters; protected double[][] history_window; protected StatusEnum detection_status; protected double[] tau_out; protected double tau_prob; protected double threshold; protected double[] data; protected double[] predictions; protected double[] residuals; protected double probability; protected double probability2; protected RulesEnum on_normal; protected RulesEnum on_event; protected RulesEnum on_outlier; protected RulesEnum on_missing; protected RulesEnum on_nodata; protected RulesEnum on_calib; protected RulesEnum on_match; protected RulesEnum data_rule; protected double[] cur_sum; protected int[] cur_count; protected double[] cur_sigma; protected String cur_message; protected boolean[] data_register_usage; protected boolean inCalibration; protected double tau_mvnn; protected int n_steps; /** * This is the main piece to modify for your own algorithm. * */ public void evaluate() { int minIdx = -1; double min = Float.MAX_VALUE; double dist; int nGood = 0; int nOut = 0; this.probability2 = 0.0; this.probability = 0.0; if (this.detection_status == StatusEnum.MISSINGHIST) { this.n_steps = this.n_steps + 1; if (this.n_steps <= this.history_window_size) { return; } this.detection_status = StatusEnum.NORMAL; } this.detection_status = StatusEnum.NORMAL; double[][] norm_window = this.normalize_window(); /* for (int iCol = 0; iCol < this.num_signals; iCol++) { System.out.println("[" + iCol + "]:" + " IGN=" + this.auto_ignore_signals[iCol] + " USE=" + this.usable_signals[iCol] + " mean=" + this.cur_sum[iCol] / this.cur_count[iCol] + " s=" + this.cur_sigma[iCol]); } */ double mean = 0.0; double sigma = 0.0; double val = 0.0; double min_ndist = Double.POSITIVE_INFINITY; int min_idx = -1; double nDist = 0.0; double eDist = 0.0; for (int jRow = 0; jRow < this.history_window_size; jRow++) { int cNGood = 0; nDist = 0.0; for (int iCol = 0; iCol < this.num_signals; iCol++) { mean = this.cur_sum[iCol] / this.cur_count[iCol]; sigma = this.cur_sigma[iCol]; val = (this.data[iCol] - mean) / sigma; val = norm_window[jRow][iCol] - val; if (this.usable_signals[iCol] && !this.auto_ignore_signals[iCol] && !Double.isNaN(val)) { nDist += val * val; cNGood++; } } nDist = Math.sqrt(nDist); if (cNGood < 1) { nDist = Double.POSITIVE_INFINITY; } /* System.out.println(" ["+jRow+"]"+" DIST="+nDist+" N="+cNGood); */ if (!Double.isNaN(nDist) && nDist < min_ndist) { min_ndist = nDist; min_idx = jRow; nGood = cNGood; } } //System.out.println("Nearest Neighbor: "+min_idx+" @ "+min_ndist+" / "+nGood); if (min_idx < 0) { for (int iCol = 0; iCol < this.num_signals; iCol++) { this.residuals[iCol] = Double.NaN; this.predictions[iCol] = this.data[iCol]; this.contributing_parameters[iCol] = 0; } } else { eDist = Math.sqrt(this.tau_mvnn * this.tau_mvnn / (double)nGood); for (int iCol = 0; iCol < this.num_signals; iCol++) { mean = this.cur_sum[iCol] / this.cur_count[iCol]; sigma = this.cur_sigma[iCol]; val = (this.data[iCol] - mean) / sigma; val = norm_window[min_idx][iCol] - val; if (this.usable_signals[iCol] && !this.auto_ignore_signals[iCol] && !Double.isNaN(val)) { this.residuals[iCol] = Math.abs(val); if (val > eDist) { this.contributing_parameters[iCol] = 1; nOut++; } else if (val < -eDist) { this.contributing_parameters[iCol] = -1; nOut++; } else { this.contributing_parameters[iCol] = 0; } this.predictions[iCol] = this.history_window[min_idx][iCol]; } else { this.residuals[iCol] = Double.NaN; this.predictions[iCol] = this.history_window[min_idx][iCol]; } } } this.probability = min_ndist; this.probability2 = min_idx; if (min_ndist > this.tau_mvnn) { this.detection_status = StatusEnum.OUTLIER; if (min_ndist >= this.tau_prob) { this.detection_status = StatusEnum.EVENT; } } } public void set_config_parameter(String name, String value) { if (name.compareToIgnoreCase("threshold") == 0) { this.tau_mvnn = Double.valueOf(value); for (int i = 0; i < this.num_signals; i++) { this.tau_out[i] = Double.NaN; } } else { throw new UnsupportedOperationException("Invalid configuration parameter: "+name+"."); } } public String get_config_parameter(String name) { if (name.compareToIgnoreCase("threshold") == 0) { return Double.toString(this.tau_mvnn); } else { throw new UnsupportedOperationException("Invalid configuration parameter: "+name+"."); } } private void set_uses_register(RegisterEnum register, boolean value) { this.data_register_usage[register.ordinal()] = value; } private void set_uses_register(String register, boolean value) { this.data_register_usage[RegisterEnum.valueOf(register).ordinal()] = value; } public boolean uses_register(RegisterEnum register) { return this.data_register_usage[register.ordinal()]; } public boolean uses_register(String register) { return this.data_register_usage[RegisterEnum.valueOf(register).ordinal()]; } public void configure(String XMLString) { try { DocumentBuilderFactory factory = javax.xml.parsers.DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); StringReader reader = new java.io.StringReader(XMLString); InputSource source = new org.xml.sax.InputSource(reader); Document doc = builder.parse(source); this.configure(doc.getDocumentElement()); } catch (SAXException ex) { Logger.getLogger(MyMVNN.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(MyMVNN.class.getName()).log(Level.SEVERE, null, ex); } catch (ParserConfigurationException ex) { Logger.getLogger(MyMVNN.class.getName()).log(Level.SEVERE, null, ex); } } public void configure(Object DOMObject) { Element Node = (Element) DOMObject; String temp = Node.getElementsByTagName("history-window").item(0).getTextContent(); this.history_window_size = Integer.parseInt(temp); temp = Node.getElementsByTagName("outlier-threshold").item(0).getTextContent(); this.threshold = Double.parseDouble(temp); temp = Node.getElementsByTagName("event-threshold").item(0).getTextContent(); this.tau_prob = Double.parseDouble(temp); NodeList list = Node.getElementsByTagName("rule-option"); int nEng = list.getLength(); for (int i = 0; i < nEng; i++) { temp = list.item(i).getTextContent(); RulesEnum rule = null; try { rule = RulesEnum.valueOf(temp); switch (rule) { case LINK_USE_RAW: case LINK_USE_RESIDUALS: case LINK_USE_PREDICTIONS: case LINK_USE_CONTRIBUTING: this.data_rule = rule; break; case REG_OUTPUTS_ON: this.set_uses_register(RegisterEnum.CURRENT_RESIDUALS, true); this.set_uses_register(RegisterEnum.CURRENT_PREDICTIONS, true); this.set_uses_register(RegisterEnum.CURRENT_CONTRIBUTORS, true); break; case REG_OUTPUTS_OFF: this.set_uses_register(RegisterEnum.CURRENT_RESIDUALS, false); this.set_uses_register(RegisterEnum.CURRENT_PREDICTIONS, false); this.set_uses_register(RegisterEnum.CURRENT_CONTRIBUTORS, false); break; case REG_CURRENT_RESIDUALS_ON: this.set_uses_register(RegisterEnum.CURRENT_RESIDUALS, true); break; case REG_CURRENT_RESIDUALS_OFF: this.set_uses_register(RegisterEnum.CURRENT_RESIDUALS, false); break; case REG_CURRENT_PREDICTIONS_ON: this.set_uses_register(RegisterEnum.CURRENT_PREDICTIONS, true); break; case REG_CURRENT_PREDICTIONS_OFF: this.set_uses_register(RegisterEnum.CURRENT_PREDICTIONS, false); break; case REG_CURRENT_CONTRIBUTORS_ON: this.set_uses_register(RegisterEnum.CURRENT_CONTRIBUTORS, true); break; case REG_CURRENT_CONTRIBUTORS_OFF: this.set_uses_register(RegisterEnum.CURRENT_CONTRIBUTORS, false); break; case REG_DELTA_MIN_ON: this.set_uses_register(RegisterEnum.DELTA_MIN, false); case REG_DELTA_MIN_OFF: this.set_uses_register(RegisterEnum.DELTA_MIN, true); case REG_CURRENT_USEABLE_ON: this.set_uses_register(RegisterEnum.CURRENT_USEABLE, true); case REG_CURRENT_USEABLE_OFF: this.set_uses_register(RegisterEnum.CURRENT_USEABLE, false); case ON_NORMAL_KEEP_CURRENT: case ON_NORMAL_KEEP_PREDICT: case ON_NORMAL_KEEP_NANS: case ON_NORMAL_KEEP_LAST: case ON_NORMAL_KEEP_ZEROS: this.on_normal = rule; break; case ON_OUTLIER_KEEP_CURRENT: case ON_OUTLIER_KEEP_PREDICT: case ON_OUTLIER_KEEP_NANS: case ON_OUTLIER_KEEP_LAST: case ON_OUTLIER_KEEP_ZEROS: this.on_outlier = rule; break; case ON_EVENT_KEEP_CURRENT: case ON_EVENT_KEEP_PREDICT: case ON_EVENT_KEEP_NANS: case ON_EVENT_KEEP_LAST: case ON_EVENT_KEEP_ZEROS: this.on_event = rule; break; case ON_MISSING_KEEP_CURRENT: case ON_MISSING_KEEP_PREDICT: case ON_MISSING_KEEP_NANS: case ON_MISSING_KEEP_LAST: case ON_MISSING_KEEP_ZEROS: this.on_missing = rule; break; default: System.out.println("Unknown or unhandled rule-option: " + rule); break; } } catch (IllegalArgumentException ex) { Logger.getLogger(MyMVNN.class.getName()).log(Level.WARNING, null, ex); } } list = Node.getElementsByTagName("parameter"); int nPar = list.getLength(); for (int i = 0; i < nPar; i++) { Element par = (Element) list.item(i); this.set_config_parameter(par.getAttribute("name"), par.getTextContent()); } } public void set_data_register(String register, double[] data) { this.set_data_register(RegisterEnum.valueOf(register), data); } public void set_data_register(String register, int[] data) { this.set_data_register(RegisterEnum.valueOf(register), data); } public void set_data_register(String register, boolean[] data) { this.set_data_register(RegisterEnum.valueOf(register), data); } public double[] get_data_register(String register) { return this.get_data_register(RegisterEnum.valueOf(register)); } public int[] get_data_register_int(String register) { return this.get_data_register_int(RegisterEnum.valueOf(register)); } public boolean[] get_data_register_bool(String register) { return this.get_data_register_bool(RegisterEnum.valueOf(register)); } public double[] get_data_register(RegisterEnum register) { if (this.uses_register(register)) { double[] values = new double[this.num_signals]; switch (register) { case DATA_LIM_HIGH: for (int i = 0; i < values.length; i++) { values[i] = this.data_lim_high[i]; } break; case DATA_LIM_LOW: for (int i = 0; i < values.length; i++) { values[i] = this.data_lim_low[i]; } break; case DELTA_MIN: for (int i = 0; i < values.length; i++) { values[i] = this.sigma_lim[i]; } break; case SETPOINT_LIM_HIGH: for (int i = 0; i < values.length; i++) { values[i] = this.setpt_lim_high[i]; } break; case SETPOINT_LIM_LOW: for (int i = 0; i < values.length; i++) { values[i] = this.setpt_lim_low[i]; } break; default: return null; } return values; } else { return null; } } public void set_data_register(RegisterEnum register, double[] data) { if (this.uses_register(register)) { switch (register) { case DATA_LIM_HIGH: for (int i = 0; i < data.length; i++) { this.data_lim_high[i] = data[i]; } break; case DATA_LIM_LOW: for (int i = 0; i < data.length; i++) { this.data_lim_low[i] = data[i]; } break; case DELTA_MIN: for (int i = 0; i < data.length; i++) { this.sigma_lim[i] = data[i]; } break; case SETPOINT_LIM_HIGH: for (int i = 0; i < data.length; i++) { this.setpt_lim_high[i] = data[i]; } break; case SETPOINT_LIM_LOW: for (int i = 0; i < data.length; i++) { this.setpt_lim_low[i] = data[i]; } break; default: } } } public boolean[] get_data_register_bool(RegisterEnum register) { if (this.uses_register(register)) { boolean[] values = new boolean[this.num_signals]; switch (register) { case AUTO_IGNORE: for (int i = 0; i < values.length; i++) { values[i] = this.auto_ignore_signals[i]; } break; case CLUSTERIZABLE: for (int i = 0; i < values.length; i++) { values[i] = this.clusterizable[i]; } break; default: return null; } return values; } else { return null; } } public int[] get_data_register_int(RegisterEnum register) { if (this.uses_register(register)) { //int[] values = new int[this.num_signals]; switch (register) { default: return null; } //return values; } else { return null; } } public void set_data_register(RegisterEnum register, boolean[] data) { if (this.uses_register(register)) { switch (register) { case AUTO_IGNORE: for (int i = 0; i < data.length; i++) { this.auto_ignore_signals[i] = data[i]; } break; case CLUSTERIZABLE: for (int i = 0; i < data.length; i++) { this.clusterizable[i] = data[i]; } break; default: } } } public void set_data_register(RegisterEnum register, int[] data) { if (this.uses_register(register)) { switch (register) { default: } } } public double[][] normalize_window() { double[][] norm_window = new double[this.history_window_size][this.num_signals]; double sigma = 0.0; double mean = 0.0; for (int iCol = 0; iCol < this.num_signals; iCol++) { double sum2 = 0; double val = 0.0; mean = this.cur_sum[iCol] / this.cur_count[iCol]; for (int jRow = 0; jRow < this.history_window_size; jRow++) { val = this.history_window[jRow][iCol]; if (!Double.isInfinite(val) && !Double.isNaN(val)) { sum2 = sum2 + (val - mean) * (val - mean); } } sigma = Math.sqrt(sum2 / (this.cur_count[iCol] - 1)); if (sigma < this.sigma_lim[iCol]) { sigma = this.sigma_lim[iCol] + Double.MIN_VALUE; } this.cur_sigma[iCol] = sigma; for (int jRow = 0; jRow < this.history_window_size; jRow++) { val = this.history_window[jRow][iCol]; val = (val - mean) / sigma; norm_window[jRow][iCol] = val; } } return norm_window; } public void initialize(int n_Sig) { this.num_signals = n_Sig; this.auto_ignore_signals = new boolean[n_Sig]; this.usable_signals = new boolean[n_Sig]; this.history_window = new double[this.history_window_size][n_Sig]; this.tau_out = new double[n_Sig]; this.data = new double[n_Sig]; this.inCalibration = false; this.predictions = new double[n_Sig]; this.residuals = new double[n_Sig]; this.sigma_lim = new double[n_Sig]; this.data_lim_high = new double[n_Sig]; this.data_lim_low = new double[n_Sig]; this.clusterizable = new boolean[n_Sig]; this.setpt_lim_high = new double[n_Sig]; this.setpt_lim_low = new double[n_Sig]; this.cur_count = new int[n_Sig]; this.cur_sum = new double[n_Sig]; this.cur_sigma = new double[n_Sig]; this.contributing_parameters = new int[n_Sig]; for (int iCol = 0; iCol < n_Sig; iCol++) { for (int jRow = 0; jRow < this.history_window_size; jRow++) { this.history_window[jRow][iCol] = Double.NaN; } this.auto_ignore_signals[iCol] = false; this.usable_signals[iCol] = true; this.contributing_parameters[iCol] = 0; this.tau_out[iCol] = this.threshold; this.data[iCol] = Double.NaN; this.predictions[iCol] = Double.NaN; this.residuals[iCol] = Double.NaN; this.sigma_lim[iCol] = 0.0; this.cur_sigma[iCol] = 0.0; this.cur_count[iCol] = 0; this.cur_sum[iCol] = 0; this.clusterizable[iCol] = true; this.data_lim_high[iCol] = Double.POSITIVE_INFINITY; this.setpt_lim_high[iCol] = Double.POSITIVE_INFINITY; this.data_lim_low[iCol] = Double.NEGATIVE_INFINITY; this.setpt_lim_low[iCol] = Double.NEGATIVE_INFINITY; } this.detection_status = StatusEnum.MISSINGHIST; this.n_steps = 0; this.tau_mvnn = this.threshold; if (!Double.isNaN(this.tau_mvnn)) { for (int i = 0; i < n_Sig; i++) { this.tau_out[i] = Double.NaN; } } } public void keep_by_rule() { switch (this.detection_status) { case UNINITIALIZED: break; case MISSINGHIST: switch (this.on_missing) { case ON_MISSING_KEEP_CURRENT: this.keep_current(); break; case ON_MISSING_KEEP_PREDICT: this.keep_predicted(); break; case ON_MISSING_KEEP_NANS: this.keep_nans(); break; case ON_MISSING_KEEP_LAST: this.keep_last(); break; case ON_MISSING_KEEP_ZEROS: this.keep_zeros(); break; } break; case NORMAL: switch (this.on_normal) { case ON_NORMAL_KEEP_CURRENT: this.keep_current(); break; case ON_NORMAL_KEEP_PREDICT: this.keep_predicted(); break; case ON_NORMAL_KEEP_NANS: this.keep_nans(); break; case ON_NORMAL_KEEP_LAST: this.keep_last(); break; case ON_NORMAL_KEEP_ZEROS: this.keep_zeros(); break; } break; case OUTLIER: switch (this.on_outlier) { case ON_OUTLIER_KEEP_CURRENT: this.keep_current(); break; case ON_OUTLIER_KEEP_PREDICT: this.keep_predicted(); break; case ON_OUTLIER_KEEP_NANS: this.keep_nans(); break; case ON_OUTLIER_KEEP_LAST: this.keep_last(); break; case ON_OUTLIER_KEEP_ZEROS: this.keep_zeros(); break; } break; case EVENT: switch (this.on_event) { case ON_EVENT_KEEP_CURRENT: this.keep_current(); break; case ON_EVENT_KEEP_PREDICT: this.keep_predicted(); break; case ON_EVENT_KEEP_NANS: this.keep_nans(); break; case ON_EVENT_KEEP_LAST: this.keep_last(); break; case ON_EVENT_KEEP_ZEROS: this.keep_zeros(); break; } break; case NODATA: switch (this.on_nodata) { case ON_NODATA_KEEP_CURRENT: this.keep_current(); break; case ON_NODATA_KEEP_PREDICT: this.keep_predicted(); break; case ON_NODATA_KEEP_NANS: this.keep_nans(); break; case ON_NODATA_KEEP_LAST: this.keep_last(); break; case ON_NODATA_KEEP_ZEROS: this.keep_zeros(); break; } break; case CALIBRATION: switch (this.on_nodata) { case ON_CALIB_KEEP_CURRENT: this.keep_current(); break; case ON_CALIB_KEEP_PREDICT: this.keep_predicted(); break; case ON_CALIB_KEEP_NANS: this.keep_nans(); break; case ON_CALIB_KEEP_LAST: this.keep_last(); break; case ON_CALIB_KEEP_ZEROS: this.keep_zeros(); break; } break; case MATCH: switch (this.on_match) { case ON_MATCH_KEEP_CURRENT: this.keep_current(); break; case ON_MATCH_KEEP_PREDICT: this.keep_predicted(); break; case ON_MATCH_KEEP_NANS: this.keep_nans(); break; case ON_MATCH_KEEP_LAST: this.keep_last(); break; case ON_MATCH_KEEP_ZEROS: this.keep_zeros(); break; } break; default: throw new UnsupportedOperationException("Unknown Status: " + this.detection_status + "."); } } public void set_num_signals(int n_Sig) { this.num_signals = n_Sig; } public int get_num_signals() { return this.num_signals; } public void set_history_window_size(int n_Hist) { this.history_window_size = n_Hist; } public int get_history_window_size() { return this.history_window_size; } public void set_outlier_threshold(double tau_out) { for (int iCol = 0; iCol < this.num_signals; iCol++) { this.tau_out[iCol] = tau_out; } } public double get_outlier_threshold() { return this.tau_out[0]; } public void set_probability_threshold(double tau_prob) { this.tau_prob = tau_prob; } public double get_probability_threshold() { return this.tau_prob; } public void set_data_rule(RulesEnum ruleID) { this.data_rule = ruleID; } public RulesEnum get_data_rule() { return this.data_rule; } protected void shift_window() { for (int iCol = 0; iCol < this.num_signals; iCol++) { double val = this.history_window[0][iCol]; if (!Double.isNaN(val) && !Double.isInfinite(val)) { this.cur_sum[iCol] = this.cur_sum[iCol] - val; this.cur_count[iCol] = this.cur_count[iCol] - 1; } } for (int jRow = 1; jRow < this.history_window_size; jRow++) { for (int iCol = 0; iCol < this.num_signals; iCol++) { this.history_window[jRow - 1][iCol] = this.history_window[jRow][iCol]; } } } public void keep_current() { double val; this.shift_window(); for (int iCol = 0; iCol < this.num_signals; iCol++) { val = this.data[iCol]; if (!this.usable_signals[iCol]) { val = Double.NaN; } this.history_window[this.history_window_size - 1][iCol] = val; if (!Double.isNaN(val) && !Double.isInfinite(val)) { this.cur_sum[iCol] = this.cur_sum[iCol] + val; this.cur_count[iCol] = this.cur_count[iCol] + 1; } } } public void keep_predicted() { double val; this.shift_window(); for (int iCol = 0; iCol < this.num_signals; iCol++) { val = this.predictions[iCol]; this.history_window[this.history_window_size - 1][iCol] = val; if (!Double.isNaN(val) && !Double.isInfinite(val)) { this.cur_sum[iCol] = this.cur_sum[iCol] + val; this.cur_count[iCol] = this.cur_count[iCol] + 1; } } } public void keep_last() { /*double val; this.shift_window(); for (int iCol = 0; iCol < this.num_signals; iCol++) { val = this.history_window[this.history_window_size-1][iCol]; if (!Double.isNaN(val) && !Double.isInfinite(val)) { this.cur_sum[iCol] = this.cur_sum[iCol] + val; this.cur_count[iCol] = this.cur_count[iCol] + 1; } }*/ } public void keep_nans() { double val; this.shift_window(); for (int iCol = 0; iCol < this.num_signals; iCol++) { val = Double.NaN; this.history_window[this.history_window_size - 1][iCol] = val; } } public void keep_zeros() { double val; this.shift_window(); for (int iCol = 0; iCol < this.num_signals; iCol++) { val = 0; this.history_window[this.history_window_size - 1][iCol] = val; if (!Double.isNaN(val) && !Double.isInfinite(val)) { this.cur_sum[iCol] = this.cur_sum[iCol] + val; this.cur_count[iCol] = this.cur_count[iCol] + 1; } } } public void set_history_window_data(double[][] window) { double val; for (int iCol = 0; iCol < this.num_signals; iCol++) { this.cur_sum[iCol] = 0.0; this.cur_count[iCol] = 0; } for (int jRow = 0; jRow < this.history_window_size; jRow++) { for (int iCol = 0; iCol < this.num_signals; iCol++) { val = window[jRow][iCol]; this.history_window[jRow][iCol] = val; if (!Double.isNaN(val) && !Double.isInfinite(val)) { this.cur_sum[iCol] = this.cur_sum[iCol] + val; this.cur_count[iCol] = this.cur_count[iCol] + 1; } } } } public double[][] get_history_window_data() { return this.history_window; } public void set_current_data(double[] data) { for (int iCol = 0; iCol < this.num_signals; iCol++) { this.data[iCol] = data[iCol]; } } public void set_current_data(int[] data) { for (int iCol = 0; iCol < this.num_signals; iCol++) { this.data[iCol] = (double) data[iCol]; } } public double[] get_current_data() { double temp[] = new double[this.num_signals]; for (int i = 0; i < this.num_signals; i++) { temp[i] = (double) this.data[i]; } return temp; } public void set_current_usable(boolean[] usable) { for (int iCol = 0; iCol < this.num_signals; iCol++) { this.usable_signals[iCol] = usable[iCol]; } } public boolean[] get_current_usable() { boolean temp[] = new boolean[this.num_signals]; for (int i = 0; i < this.num_signals; i++) { temp[i] = this.usable_signals[i]; } return temp; } public double[] get_current_predictions() { double temp[] = new double[this.num_signals]; for (int i = 0; i < this.num_signals; i++) { temp[i] = this.predictions[i]; } return temp; } public double[] get_current_residuals() { double temp[] = new double[this.num_signals]; for (int i = 0; i < this.num_signals; i++) { temp[i] = (double) this.residuals[i]; } return temp; } public double get_current_probability() { return this.probability; } public int[] get_contributing_signals() { int temp[] = new int[this.num_signals]; for (int i = 0; i < this.num_signals; i++) { temp[i] = this.contributing_parameters[i]; } return temp; } public double[] get_d_contributing_signals() { double temp[] = new double[this.num_signals]; for (int i = 0; i < this.num_signals; i++) { temp[i] = (double) this.contributing_parameters[i]; } return temp; } public StatusEnum get_detection_status() { return this.detection_status; } public String get_message() { return this.cur_message; } public void set_calibration_status(boolean inCalib) { this.inCalibration = inCalib; } }
/** * Copyright (c) 2015 Netflix, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.msl.entityauth; import org.json.JSONException; import org.json.JSONObject; import com.netflix.msl.MslConstants; import com.netflix.msl.MslCryptoException; import com.netflix.msl.MslEncodingException; import com.netflix.msl.MslEntityAuthException; import com.netflix.msl.MslError; import com.netflix.msl.MslException; import com.netflix.msl.MslMasterTokenException; import com.netflix.msl.crypto.ICryptoContext; import com.netflix.msl.crypto.SessionCryptoContext; import com.netflix.msl.tokens.MasterToken; import com.netflix.msl.util.Base64; import com.netflix.msl.util.MslContext; /** * <p>Master token protected entity authentication data.</p> * * <p> * {@code { * "#mandatory" : [ "mastertoken", "authdata", "signature" ], * "mastertoken" : mastertoken, * "authdata" : "base64", * "signature" : "base64", * }} where: * <ul> * <li>{@code mastertoken} is the master token used to protect the encapsulated authentication data</li> * <li>{@code authdata} is the Base64-encoded ciphertext envelope containing the encapsulated authentication data</li> * <li>{@code signature} is the Base64-encoded signature envelope verifying the encapsulated authentication data</li> * </ul></p> * * @author Wesley Miaw <wmiaw@netflix.com> */ public class MasterTokenProtectedAuthenticationData extends EntityAuthenticationData { /** JSON key master token. */ protected static final String KEY_MASTER_TOKEN = "mastertoken"; /** JSON key authentication data. */ protected static final String KEY_AUTHENTICATION_DATA = "authdata"; /** JSON key signature. */ protected static final String KEY_SIGNATURE = "signature"; /** * <p>Construct a new master token protected entity authentication data * instance using the provided master token and actual entity * authentication data.</p> * * @param ctx MSL context. * @param masterToken the master token. * @param authdata encapsulated authentication data. * @throws MslCryptoException if there is an error encrypting or signing * the encapsulated authentication data. * @throws MslEntityAuthException if the master token crypto context cannot * be found in the MSL store and cannot be created. */ public MasterTokenProtectedAuthenticationData(final MslContext ctx, final MasterToken masterToken, final EntityAuthenticationData authdata) throws MslCryptoException, MslEntityAuthException { super(EntityAuthenticationScheme.MT_PROTECTED); this.masterToken = masterToken; this.authdata = authdata; // Grab master token crypto context. final ICryptoContext cryptoContext; try { final ICryptoContext cachedCryptoContext = ctx.getMslStore().getCryptoContext(masterToken); if (cachedCryptoContext != null) cryptoContext = cachedCryptoContext; else cryptoContext = new SessionCryptoContext(ctx, masterToken); } catch (final MslMasterTokenException e) { throw new MslEntityAuthException(MslError.ENTITYAUTH_MASTERTOKEN_NOT_DECRYPTED, e); } // Encrypt and sign the authentication data. final byte[] plaintext = authdata.toJSONString().getBytes(MslConstants.DEFAULT_CHARSET); this.ciphertext = cryptoContext.encrypt(plaintext); this.signature = cryptoContext.sign(this.ciphertext); } /** * <p>Construct a new master token protected entity authentication data * instance from the provided JSON object.</p> * * @param ctx MSL context. * @param authdataJO the authentication data JSON object. * @throws MslEncodingException if there is an error parsing the JSON * representation. * @throws MslCryptoException if there is an error decrypting or verifying * the encapsulated authentication data. * @throws MslEntityAuthException if the encapsulated authentication data * or signature are invalid, if the master token is invalid, or if * the master token crypto context cannot be found in the MSL store * and cannot be created. */ public MasterTokenProtectedAuthenticationData(final MslContext ctx, final JSONObject authdataJO) throws MslEncodingException, MslCryptoException, MslEntityAuthException { super(EntityAuthenticationScheme.MT_PROTECTED); // Extract authentication data fields. try { try { this.masterToken = new MasterToken(ctx, authdataJO.getJSONObject(KEY_MASTER_TOKEN)); } catch (final MslException e) { throw new MslEntityAuthException(MslError.ENTITYAUTH_MASTERTOKEN_INVALID, "master token protected authdata " + authdataJO.toString(), e); } try { this.ciphertext = Base64.decode(authdataJO.getString(KEY_AUTHENTICATION_DATA)); } catch (final IllegalArgumentException e) { throw new MslEntityAuthException(MslError.ENTITYAUTH_CIPHERTEXT_INVALID, "master token protected authdata " + authdataJO.toString(), e); } try { this.signature = Base64.decode(authdataJO.getString(KEY_SIGNATURE)); } catch (final IllegalArgumentException e) { throw new MslEntityAuthException(MslError.ENTITYAUTH_SIGNATURE_INVALID, "master token protected authdata " + authdataJO.toString(), e); } } catch (final JSONException e) { throw new MslEncodingException(MslError.JSON_PARSE_ERROR, "master token protected authdata " + authdataJO.toString(), e); } // Grab master token crypto context. final ICryptoContext cryptoContext; try { final ICryptoContext cachedCryptoContext = ctx.getMslStore().getCryptoContext(masterToken); if (cachedCryptoContext != null) cryptoContext = cachedCryptoContext; else cryptoContext = new SessionCryptoContext(ctx, masterToken); } catch (final MslMasterTokenException e) { throw new MslEntityAuthException(MslError.ENTITYAUTH_MASTERTOKEN_NOT_DECRYPTED, e); } // Verify and decrypt the authentication data. try { if (!cryptoContext.verify(this.ciphertext, this.signature)) throw new MslEntityAuthException(MslError.ENTITYAUTH_VERIFICATION_FAILED, "master token protected authdata " + authdataJO.toString()); final byte[] plaintext = cryptoContext.decrypt(this.ciphertext); final JSONObject internalAuthdataJO = new JSONObject(new String(plaintext, MslConstants.DEFAULT_CHARSET)); this.authdata = EntityAuthenticationData.create(ctx, internalAuthdataJO); } catch (final JSONException e) { throw new MslEncodingException(MslError.JSON_PARSE_ERROR, "master token protected authdata " + authdataJO.toString(), e); } } /* (non-Javadoc) * @see com.netflix.msl.entityauth.EntityAuthenticationData#getIdentity() */ @Override public String getIdentity() throws MslCryptoException { return authdata.getIdentity(); } /** * Return the encapsulated entity authentication data. * * @return the encapsulated entity authentication data. */ public EntityAuthenticationData getEncapsulatedAuthdata() { return authdata; } /* (non-Javadoc) * @see com.netflix.msl.entityauth.EntityAuthenticationData#getAuthData() */ @Override public JSONObject getAuthData() throws MslEncodingException { try { final JSONObject jsonObj = new JSONObject(); jsonObj.put(KEY_MASTER_TOKEN, masterToken); jsonObj.put(KEY_AUTHENTICATION_DATA, Base64.encode(ciphertext)); jsonObj.put(KEY_SIGNATURE, Base64.encode(signature)); return new JSONObject(jsonObj.toString()); } catch (final JSONException e) { throw new MslEncodingException(MslError.JSON_ENCODE_ERROR, "master token protected authdata", e); } } /* (non-Javadoc) * @see com.netflix.msl.entityauth.EntityAuthenticationData#equals(java.lang.Object) */ @Override public boolean equals(final Object obj) { if (obj == this) return true; if (!(obj instanceof MasterTokenProtectedAuthenticationData)) return false; final MasterTokenProtectedAuthenticationData that = (MasterTokenProtectedAuthenticationData)obj; return super.equals(obj) && this.masterToken.equals(that.masterToken) && this.authdata.equals(that.authdata); } /* (non-Javadoc) * @see com.netflix.msl.entityauth.EntityAuthenticationData#hashCode() */ @Override public int hashCode() { return super.hashCode() ^ masterToken.hashCode() ^ authdata.hashCode(); } /** Master token. */ private final MasterToken masterToken; /** Entity authentication data. */ private final EntityAuthenticationData authdata; /** Encrypted entity authentication data. */ private final byte[] ciphertext; /** Ciphertext signature. */ private final byte[] signature; }
/* * This file is generated by jOOQ. */ package generated.rx.jdbc.regular.vertx.tables.records; import generated.rx.jdbc.regular.vertx.tables.Somethingcomposite; import generated.rx.jdbc.regular.vertx.tables.interfaces.ISomethingcomposite; import io.github.jklingsporn.vertx.jooq.shared.internal.VertxPojo; import io.vertx.core.json.JsonObject; import org.jooq.Field; import org.jooq.Record2; import org.jooq.Record3; import org.jooq.Row3; import org.jooq.impl.UpdatableRecordImpl; import static io.github.jklingsporn.vertx.jooq.shared.internal.VertxPojo.*; /** * This class is generated by jOOQ. */ @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class SomethingcompositeRecord extends UpdatableRecordImpl<SomethingcompositeRecord> implements VertxPojo, Record3<Integer, Integer, JsonObject>, ISomethingcomposite { private static final long serialVersionUID = 1L; /** * Setter for <code>VERTX.SOMETHINGCOMPOSITE.SOMEID</code>. */ @Override public SomethingcompositeRecord setSomeid(Integer value) { set(0, value); return this; } /** * Getter for <code>VERTX.SOMETHINGCOMPOSITE.SOMEID</code>. */ @Override public Integer getSomeid() { return (Integer) get(0); } /** * Setter for <code>VERTX.SOMETHINGCOMPOSITE.SOMESECONDID</code>. */ @Override public SomethingcompositeRecord setSomesecondid(Integer value) { set(1, value); return this; } /** * Getter for <code>VERTX.SOMETHINGCOMPOSITE.SOMESECONDID</code>. */ @Override public Integer getSomesecondid() { return (Integer) get(1); } /** * Setter for <code>VERTX.SOMETHINGCOMPOSITE.SOMEJSONOBJECT</code>. */ @Override public SomethingcompositeRecord setSomejsonobject(JsonObject value) { set(2, value); return this; } /** * Getter for <code>VERTX.SOMETHINGCOMPOSITE.SOMEJSONOBJECT</code>. */ @Override public JsonObject getSomejsonobject() { return (JsonObject) get(2); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- @Override public Record2<Integer, Integer> key() { return (Record2) super.key(); } // ------------------------------------------------------------------------- // Record3 type implementation // ------------------------------------------------------------------------- @Override public Row3<Integer, Integer, JsonObject> fieldsRow() { return (Row3) super.fieldsRow(); } @Override public Row3<Integer, Integer, JsonObject> valuesRow() { return (Row3) super.valuesRow(); } @Override public Field<Integer> field1() { return Somethingcomposite.SOMETHINGCOMPOSITE.SOMEID; } @Override public Field<Integer> field2() { return Somethingcomposite.SOMETHINGCOMPOSITE.SOMESECONDID; } @Override public Field<JsonObject> field3() { return Somethingcomposite.SOMETHINGCOMPOSITE.SOMEJSONOBJECT; } @Override public Integer component1() { return getSomeid(); } @Override public Integer component2() { return getSomesecondid(); } @Override public JsonObject component3() { return getSomejsonobject(); } @Override public Integer value1() { return getSomeid(); } @Override public Integer value2() { return getSomesecondid(); } @Override public JsonObject value3() { return getSomejsonobject(); } @Override public SomethingcompositeRecord value1(Integer value) { setSomeid(value); return this; } @Override public SomethingcompositeRecord value2(Integer value) { setSomesecondid(value); return this; } @Override public SomethingcompositeRecord value3(JsonObject value) { setSomejsonobject(value); return this; } @Override public SomethingcompositeRecord values(Integer value1, Integer value2, JsonObject value3) { value1(value1); value2(value2); value3(value3); return this; } // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- @Override public void from(ISomethingcomposite from) { setSomeid(from.getSomeid()); setSomesecondid(from.getSomesecondid()); setSomejsonobject(from.getSomejsonobject()); } @Override public <E extends ISomethingcomposite> E into(E into) { into.from(this); return into; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached SomethingcompositeRecord */ public SomethingcompositeRecord() { super(Somethingcomposite.SOMETHINGCOMPOSITE); } /** * Create a detached, initialised SomethingcompositeRecord */ public SomethingcompositeRecord(Integer someid, Integer somesecondid, JsonObject somejsonobject) { super(Somethingcomposite.SOMETHINGCOMPOSITE); setSomeid(someid); setSomesecondid(somesecondid); setSomejsonobject(somejsonobject); } /** * Create a detached, initialised SomethingcompositeRecord */ public SomethingcompositeRecord(generated.rx.jdbc.regular.vertx.tables.pojos.Somethingcomposite value) { super(Somethingcomposite.SOMETHINGCOMPOSITE); if (value != null) { setSomeid(value.getSomeid()); setSomesecondid(value.getSomesecondid()); setSomejsonobject(value.getSomejsonobject()); } } public SomethingcompositeRecord(io.vertx.core.json.JsonObject json) { this(); fromJson(json); } }
/* * Copyright (C) 2004-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.database; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Allows PreparedStatement information to be cached. A prepared statement consists of * a SQL statement containing bind variables as well as variable values. For example, * the SQL statement {@code "SELECT * FROM person WHERE age &gt; ?"} would have the integer * variable {@code 18} (which replaces the "?" chracter) to find all adults. This class * encapsulates both the SQL string and bind variable values so that actual * PreparedStatement can be created from that information later. * * @author Matt Tucker */ public class CachedPreparedStatement { private static final Logger Log = LoggerFactory.getLogger(CachedPreparedStatement.class); private String sql; private List<Object> params; private List<Integer> types; /** * Constructs a new CachedPreparedStatement. */ public CachedPreparedStatement() { params = new ArrayList<>(); types = new ArrayList<>(); } /** * Constructs a new CachedPreparedStatement. * * @param sql the SQL. */ public CachedPreparedStatement(String sql) { this(); setSQL(sql); } /** * Returns the SQL. * * @return the SQL. */ public String getSQL() { return sql; } /** * Sets the SQL. * * @param sql the SQL. */ public void setSQL(String sql) { this.sql = sql; } /** * Adds a boolean parameter to the prepared statement. * * @param value the boolean value. */ public void addBoolean(boolean value) { params.add(value); types.add(Types.BOOLEAN); } /** * Adds an integer parameter to the prepared statement. * * @param value the int value. */ public void addInt(int value) { params.add(value); types.add(Types.INTEGER); } /** * Adds a long parameter to the prepared statement. * * @param value the long value. */ public void addLong(long value) { params.add(value); types.add(Types.BIGINT); } /** * Adds a String parameter to the prepared statement. * * @param value the String value. */ public void addString(String value) { params.add(value); types.add(Types.VARCHAR); } /** * Sets all parameters on the given PreparedStatement. The standard code block * for turning a CachedPreparedStatement into a PreparedStatement is as follows: * * <pre> * PreparedStatement pstmt = con.prepareStatement(cachedPstmt.getSQL()); * cachedPstmt.setParams(pstmt); * </pre> * * @param pstmt the prepared statement. * @throws java.sql.SQLException if an SQL Exception occurs. */ public void setParams(PreparedStatement pstmt) throws SQLException { for (int i=0; i<params.size(); i++) { Object param = params.get(i); int type = types.get(i); // Set param, noting fact that params start at 1 and not 0. switch(type) { case Types.INTEGER: pstmt.setInt(i+1, (Integer)param); break; case Types.BIGINT: pstmt.setLong(i+1, (Long)param); break; case Types.VARCHAR: pstmt.setString(i+1, (String)param); break; case Types.BOOLEAN: pstmt.setBoolean(i+1, (Boolean)param); } } } @Override public boolean equals(Object object) { if (object == null) { return false; } if (!(object instanceof CachedPreparedStatement)) { return false; } if (this == object) { return true; } CachedPreparedStatement otherStmt = (CachedPreparedStatement)object; return (sql == null && otherStmt.sql == null) || sql != null && sql.equals(otherStmt.sql) && types.equals(otherStmt.types) && params.equals(otherStmt.params); } @Override public int hashCode() { int hashCode = 1; if (sql != null) { hashCode += sql.hashCode(); } hashCode = hashCode * 31 + types.hashCode(); hashCode = hashCode * 31 + params.hashCode(); return hashCode; } @Override public String toString() { String toStringSql = sql; try { int index = toStringSql.indexOf('?'); int count = 0; while (index > -1) { Object param = params.get(count); int type = types.get(count); String val = null; // Get param switch(type) { case Types.INTEGER: val = "" + param; break; case Types.BIGINT: val = "" + param; break; case Types.VARCHAR: val = '\'' + (String) param + '\''; break; case Types.BOOLEAN: val = "" + param; } toStringSql = toStringSql.substring(0, index) + val + ((index == toStringSql.length() -1) ? "" : toStringSql.substring(index + 1)); index = toStringSql.indexOf('?', index + val.length()); count++; } } catch (Exception e) { Log.error(e.getMessage(), e); } return "CachedPreparedStatement{ sql=" + toStringSql + '}'; } }
package org.basex.build; import static org.basex.core.Text.*; import static org.basex.data.DataText.*; import java.io.*; import org.basex.core.*; import org.basex.core.cmd.*; import org.basex.data.*; import org.basex.index.name.*; import org.basex.io.*; import org.basex.io.in.DataInput; import org.basex.io.out.*; import org.basex.io.out.DataOutput; import org.basex.io.random.*; import org.basex.util.*; /** * This class creates a database instance on disk. * The storage layout is described in the {@link Data} class. * * @author BaseX Team 2005-16, BSD License * @author Christian Gruen */ public final class DiskBuilder extends Builder implements Closeable { /** Text compressor. */ private final Compress comp = new Compress(); /** Database table. */ private DataOutput tout; /** Database texts. */ private DataOutput xout; /** Database values. */ private DataOutput vout; /** Output stream for temporary values. */ private DataOutput sout; /** Static options. */ private final StaticOptions sopts; /** Closed flag. */ private boolean closed; /** Debug counter. */ private int c; /** * Constructor. * @param name name of database * @param parser parser * @param sopts static options * @param opts main options */ public DiskBuilder(final String name, final Parser parser, final StaticOptions sopts, final MainOptions opts) { super(name, parser); this.sopts = sopts; meta = new MetaData(dbname, opts, sopts); } @Override public DiskData build() throws IOException { meta.assign(parser); meta.dirty = true; // calculate optimized output buffer sizes to reduce disk fragmentation final Runtime rt = Runtime.getRuntime(); final long max = Math.min(1 << 22, rt.maxMemory() - rt.freeMemory() >> 2); int bs = (int) Math.min(meta.filesize, max); bs = Math.max(IO.BLOCKSIZE, bs - bs % IO.BLOCKSIZE); // drop old database (if available) and create new one DropDB.drop(dbname, sopts); sopts.dbPath(dbname).md(); elemNames = new Names(meta); attrNames = new Names(meta); try { tout = new DataOutput(new TableOutput(meta, DATATBL)); xout = new DataOutput(meta.dbfile(DATATXT), bs); vout = new DataOutput(meta.dbfile(DATAATV), bs); sout = new DataOutput(meta.dbfile(DATATMP), bs); final Performance perf = Prop.debug ? new Performance() : null; Util.debug(tit() + DOTS); parse(); if(Prop.debug) Util.errln(" " + perf + " (" + Performance.getMemory() + ')'); } catch(final IOException ex) { try { close(); } catch(final IOException ignore) { } throw ex; } close(); // copy temporary values into database table try(final DataInput in = new DataInput(meta.dbfile(DATATMP))) { final TableAccess ta = new TableDiskAccess(meta, true); try { for(; spos < ssize; ++spos) ta.write4(in.readNum(), 8, in.readNum()); } finally { ta.close(); } } meta.dbfile(DATATMP).delete(); // return database instance return new DiskData(meta, elemNames, attrNames, path, nspaces); } @Override public void abort() { try { close(); } catch(final IOException ex) { Util.debug(ex); } if(meta != null) DropDB.drop(meta.name, sopts); } @Override public DataClip dataClip() throws IOException { return new DataClip(build()); } @Override public void close() throws IOException { if(closed) return; closed = true; if(tout != null) tout.close(); if(xout != null) xout.close(); if(vout != null) vout.close(); if(sout != null) sout.close(); parser.close(); tout = null; xout = null; vout = null; sout = null; } @Override protected void addDoc(final byte[] value) throws IOException { tout.write1(Data.DOC); tout.write2(0); tout.write5(textRef(value, true)); tout.write4(0); tout.write4(meta.size++); } @Override protected void addElem(final int dist, final int nameId, final int asize, final int uriId, final boolean ne) throws IOException { tout.write1(asize << 3 | Data.ELEM); tout.write2((ne ? 1 << 15 : 0) | nameId); tout.write1(uriId); tout.write4(dist); tout.write4(asize); tout.write4(meta.size++); if(Prop.debug && (c++ & 0x7FFFF) == 0) Util.err("."); } @Override protected void addAttr(final int nameId, final byte[] value, final int dist, final int uriId) throws IOException { tout.write1(dist << 3 | Data.ATTR); tout.write2(nameId); tout.write5(textRef(value, false)); tout.write4(uriId); tout.write4(meta.size++); } @Override protected void addText(final byte[] value, final int dist, final byte kind) throws IOException { tout.write1(kind); tout.write2(0); tout.write5(textRef(value, true)); tout.write4(dist); tout.write4(meta.size++); } @Override protected void setSize(final int pre, final int size) throws IOException { sout.writeNum(pre); sout.writeNum(size); ++ssize; } /** * Calculates the text offset and writes the text value. * @param value value to be inlined * @param text text/attribute flag * @return inline value or text position * @throws IOException I/O exception */ private long textRef(final byte[] value, final boolean text) throws IOException { // inline integer value final long v = Token.toSimpleInt(value); if(v != Integer.MIN_VALUE) return v | IO.OFFNUM; // store text to heap file final DataOutput store = text ? xout : vout; final long off = store.size(); final byte[] val = comp.pack(value); store.writeToken(val); return val == value ? off : off | IO.OFFCOMP; } }
/** * Copyright (C) 2013 * by 52 North Initiative for Geospatial Open Source Software GmbH * * Contact: Andreas Wytzisk * 52 North Initiative for Geospatial Open Source Software GmbH * Martin-Luther-King-Weg 24 * 48155 Muenster, Germany * info@52north.org * * This program is free software; you can redistribute and/or modify it under * the terms of the GNU General Public License version 2 as published by the * Free Software Foundation. * * This program is distributed WITHOUT ANY WARRANTY; even without the implied * WARRANTY OF MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License along with * this program (see gnu-gpl v2.txt). If not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA or * visit the Free Software Foundation web page, http://www.fsf.org. */ package org.n52.sos.ds.datasource; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.hibernate.dialect.Dialect; import org.hibernate.mapping.Table; import org.hibernate.spatial.dialect.postgis.PostgisDialect52N; import org.hibernate.tool.hbm2ddl.DatabaseMetadata; import org.n52.sos.ds.hibernate.util.HibernateConstants; import org.n52.sos.exception.ConfigurationException; import com.google.common.base.Joiner; import com.google.common.collect.Lists; /** * @since 4.0.0 * */ public abstract class AbstractPostgresDatasource extends AbstractHibernateFullDBDatasource { protected static final String POSTGRES_DRIVER_CLASS = "org.postgresql.Driver"; protected static final Pattern JDBC_URL_PATTERN = Pattern.compile("^jdbc:postgresql://([^:]+):([0-9]+)/(.*)$"); protected static final String USERNAME_DESCRIPTION = "Your database server user name. The default value for PostgreSQL is \"postgres\"."; protected static final String USERNAME_DEFAULT_VALUE = "postgres"; protected static final String PASSWORD_DESCRIPTION = "Your database server password. The default value is \"postgres\"."; protected static final String PASSWORD_DEFAULT_VALUE = "postgres"; protected static final String HOST_DESCRIPTION = "Set this to the IP/net location of PostgreSQL database server. The default value for PostgreSQL is \"localhost\"."; protected static final String PORT_DESCRIPTION = "Set this to the port number of your PostgreSQL server. The default value for PostgreSQL is 5432."; protected static final int PORT_DEFAULT_VALUE = 5432; // public static final String CATALOG_DEFAULT_VALUE = "public"; protected static final String SCHEMA_DEFAULT_VALUE = "public"; protected static final String FUNC_POSTGIS_VERSION = "postgis_version()"; protected static final String TAB_SPATIAL_REF_SYS = "spatial_ref_sys"; public AbstractPostgresDatasource() { super(); } @Override protected Dialect createDialect() { return new PostgisDialect52N(); } @Override protected String getDriverClass() { return POSTGRES_DRIVER_CLASS; } @Override public boolean checkSchemaCreation(Map<String, Object> settings) { Connection conn = null; Statement stmt = null; try { conn = openConnection(settings); stmt = conn.createStatement(); String schema = (String) settings.get(createSchemaDefinition().getKey()); schema = schema == null ? "" : "." + schema; final String command = String.format("BEGIN; " + "DROP TABLE IF EXISTS \"%1$ssos_installer_test_table\"; " + "CREATE TABLE \"%1$ssos_installer_test_table\" (id integer NOT NULL); " + "DROP TABLE \"%1$ssos_installer_test_table\"; " + "END;", schema); stmt.execute(command); return true; } catch (SQLException e) { return false; } finally { close(stmt); close(conn); } } @Override protected void validatePrerequisites(Connection con, DatabaseMetadata metadata, Map<String, Object> settings) { checkPostgis(con, settings); checkSpatialRefSys(con, metadata, settings); } protected void checkPostgis(Connection con, Map<String, Object> settings) { Statement stmt = null; try { StringBuilder builder = new StringBuilder(); builder.append(SELECT); builder.append(BLANK_CHAR); builder.append(FUNC_POSTGIS_VERSION); builder.append(SEMICOLON_CHAR); stmt = con.createStatement(); stmt.execute(builder.toString()); // TODO check PostGIS version } catch (SQLException ex) { throw new ConfigurationException("PostGIS does not seem to be installed.", ex); } finally { close(stmt); } } protected void checkSpatialRefSys(Connection con, DatabaseMetadata metadata, Map<String, Object> settings) { Statement stmt = null; try { if (!metadata.isTable("spatial_ref_sys")) { throw new ConfigurationException("Missing 'spatial_ref_sys' table."); } StringBuilder builder = new StringBuilder(); builder.append(SELECT); builder.append(BLANK_CHAR); builder.append(DEFAULT_COUNT); builder.append(BLANK_CHAR); builder.append(FROM); builder.append(BLANK_CHAR); builder.append(TAB_SPATIAL_REF_SYS); builder.append(SEMICOLON_CHAR); stmt = con.createStatement(); stmt.execute(builder.toString()); } catch (SQLException ex) { throw new ConfigurationException("Can not read from table 'spatial_ref_sys'", ex); } finally { close(stmt); } } @Override protected String toURL(Map<String, Object> settings) { String url = String.format("jdbc:postgresql://%s:%d/%s", settings.get(HOST_KEY), settings.get(PORT_KEY), settings.get(DATABASE_KEY)); return url; } @Override protected String[] parseURL(String url) { Matcher matcher = JDBC_URL_PATTERN.matcher(url); matcher.find(); return new String[] { matcher.group(1), matcher.group(2), matcher.group(3) }; } @Override public boolean supportsClear() { return true; } @Override public void clear(Properties properties) { Map<String, Object> settings = parseDatasourceProperties(properties); CustomConfiguration config = getConfig(settings); Iterator<Table> tables = config.getTableMappings(); List<String> names = new LinkedList<String>(); while (tables.hasNext()) { Table table = tables.next(); if (table.isPhysicalTable()) { names.add(table.getName()); } } if (!names.isEmpty()) { Connection conn = null; Statement stmt = null; try { conn = openConnection(settings); stmt = conn.createStatement(); stmt.execute(String.format("truncate %s restart identity cascade", Joiner.on(", ").join(names))); } catch (SQLException ex) { throw new ConfigurationException(ex); } finally { close(stmt); close(conn); } } } @Override protected Connection openConnection(Map<String, Object> settings) throws SQLException { try { String jdbc = toURL(settings); Class.forName(getDriverClass()); String pass = (String) settings.get(HibernateConstants.CONNECTION_PASSWORD); String user = (String) settings.get(HibernateConstants.CONNECTION_USERNAME); return DriverManager.getConnection(jdbc, user, pass); } catch (ClassNotFoundException ex) { throw new SQLException(ex); } } @Override protected String[] checkDropSchema(String[] dropSchema) { List<String> checkedSchema = Lists.newLinkedList(); for (String string : dropSchema) { if (!string.startsWith("alter")) { checkedSchema.add(string); } } return checkedSchema.toArray(new String[checkedSchema.size()]); } }
/* * $Id$ */ /* Copyright (c) 2000-2016 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.plugin.igiglobal; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.lockss.config.Configuration; import org.lockss.daemon.ConfigParamDescr; import org.lockss.daemon.RangeCachedUrlSetSpec; import org.lockss.plugin.*; import org.lockss.plugin.ArchivalUnit.ConfigurationException; import org.lockss.plugin.base.BaseCachedUrlSet; import org.lockss.state.AuState; import org.lockss.test.*; import org.lockss.util.Constants; import org.lockss.util.ListUtil; import org.lockss.util.PatternFloatMap; import org.lockss.util.RegexpUtil; import org.lockss.util.TimeBase; public class TestIgiGlobalPlugin extends LockssPluginTestCase { protected MockLockssDaemon daemon; private final String PLUGIN_NAME = "org.lockss.plugin.igiglobal.IgiGlobalPlugin"; static final String BASE_URL_KEY = ConfigParamDescr.BASE_URL.getKey(); static final String JOURNAL_ISSN_KEY = ConfigParamDescr.JOURNAL_ISSN.getKey(); static final String VOLUME_NUMBER_KEY = ConfigParamDescr.VOLUME_NUMBER.getKey(); private final String BASE_URL = "http://www.example.com/"; private final String BASE_URL2 = "https://www.example.com/"; private final String VOLUME = "21"; private final String JOURNAL_ISSN = "1546-2234"; private final Configuration AU_CONFIG = ConfigurationUtil.fromArgs( BASE_URL_KEY, BASE_URL, VOLUME_NUMBER_KEY, VOLUME, JOURNAL_ISSN_KEY, JOURNAL_ISSN); // from au_url_poll_result_weight in plugins/src/org/lockss/plugin/igiglobal/IgiGlobalPlugin.xml // if it changes in the plugin, you will likely need to change the test, so verify static final String IGI_REPAIR_FROM_PEER_REGEXP1 = "(?i)://[^/]+/(images|jquery|sourcecontent)/.*[.](bmp|gif|ico|jpe?g|png|tif?f)$"; static final String IGI_REPAIR_FROM_PEER_REGEXP2 = "[.](css|js)$"; public void setUp() throws Exception { super.setUp(); } public void tearDown() throws Exception { super.tearDown(); } protected ArchivalUnit createAu() throws ArchivalUnit.ConfigurationException { return createAu(AU_CONFIG); } protected ArchivalUnit createAu(Configuration config) throws ArchivalUnit.ConfigurationException { return PluginTestUtil.createAndStartAu(PLUGIN_NAME, config); } public void testCreateAu() { try { createAu(ConfigurationUtil.fromArgs( BASE_URL_KEY, BASE_URL, VOLUME_NUMBER_KEY, VOLUME)); fail("Bad AU configuration should throw configuration exception"); } catch (ConfigurationException ex) { } try { createAu(); } catch (ConfigurationException ex) { fail("Unable to creat AU from valid configuration"); } } public void testShouldCacheProperPages() throws Exception { ArchivalUnit au = createAu(); BaseCachedUrlSet cus = new BaseCachedUrlSet(au, new RangeCachedUrlSetSpec(BASE_URL)); // start page assertShouldCache(BASE_URL + "lockss/journal-issues.aspx?issn=" + JOURNAL_ISSN + "&volume=" + VOLUME, true, au, cus); assertShouldCache(BASE_URL + "lockss/journal-issues.aspx?issn=" + JOURNAL_ISSN + "&volume=", false, au, cus); // issue and article pages assertShouldCache(BASE_URL + "gateway/contentowned/articles.aspx?titleid=55656", true, au, cus); assertShouldCache(BASE_URL + "gateway/article/55656", true, au, cus); assertShouldCache(BASE_URL + "gateway/issue/55656", true, au, cus); assertShouldCache(BASE_URL + "gateway/contentowned/article.aspx?titleid=55656&accesstype=infosci", false, au, cus); assertShouldCache(BASE_URL + "gateway/contentowned/issues.aspx?titleid=55656", false, au, cus); // pdf page with iframe assertShouldCache(BASE_URL + "gateway/article/full-text-pdf/55656", true, au, cus); // pdf file displayed in iframe assertShouldCache(BASE_URL + "viewtitle.aspx?titleid=55663", true, au, cus); assertShouldCache(BASE_URL + "pdf.aspx?titleid=55663", true, au, cus); // images, css, js assertShouldCache(BASE_URL + "jQuery/css/smoothness/images/ui-bg_flat_75_ffffff_40x100.png", true, au, cus); assertShouldCache(BASE_URL + "Images/publish-with-igi-global.jpg", true, au, cus); assertShouldCache(BASE_URL + "App_Themes/HeatherStyles/images/App_Master/favicon.ico", true, au, cus); assertShouldCache(BASE_URL + "jQuery/js/jquery-ui-1.7.2.custom.min.js", true, au, cus); // images, css, js with bad formatting assertShouldCache(BASE_URL + "App_Themes/HeatherStyles/IGIMain.css?v=02242012b", true, au, cus); assertShouldCache(BASE_URL + "Scripts/gateway.js?v=02162012", true, au, cus); // specified bad pages assertShouldCache(BASE_URL + "membership/login.aspx?returnurl=%2fgateway%2fcontentowned%2farticle.aspx%3ftitleid%3d55656%26accesstype%3dinfosci", false, au, cus); assertShouldCache(BASE_URL + "membership/login.aspx?jQuery%2css%2smoothness%2images%2ui-bg_flat_75_ffffff_40x100.png", false, au, cus); assertShouldCache(BASE_URL + "App_Themes/HeatherStyles/images/App_Master/App_Master/App_Master/favicon.ico", false, au, cus); assertShouldCache(BASE_URL + "gateway/edatabasetools/librariancorner.aspx", false, au, cus); // facebook assertShouldCache("http://www.facebook.com/pages/IGI-Global/138206739534176?ref=sgm", false, au, cus); // LOCKSS assertShouldCache("http://lockss.stanford.edu", false, au, cus); // other site assertShouldCache("http://exo.com/~noid/ConspiracyNet/satan.html", false, au, cus); } private void assertShouldCache(String url, boolean shouldCache, ArchivalUnit au, CachedUrlSet cus) { assertEquals("AU crawl rules applied incorrectly to " + url + " ", shouldCache, au.shouldBeCached(url)); } public void testStartUrlConstruction() throws Exception { String expectedStartUrl = BASE_URL + "lockss/journal-issues.aspx?issn=" + JOURNAL_ISSN + "&volume=" + VOLUME; String expectedStartUrl2 = BASE_URL2 + "lockss/journal-issues.aspx?issn=" + JOURNAL_ISSN + "&volume=" + VOLUME; ArchivalUnit au = createAu(); assertSameElements(ListUtil.list(expectedStartUrl, expectedStartUrl2), au.getStartUrls()); } public void testGetUrlStems() throws Exception { ArchivalUnit au = createAu(); assertSameElements(ListUtil.list(BASE_URL, BASE_URL2), au.getUrlStems()); } public void testShouldDoNewContentCrawlTooEarly() throws Exception { ArchivalUnit au = createAu(); AuState aus = new MockAuState(null, TimeBase.nowMs(), -1, -1, null); assertFalse(au.shouldCrawlForNewContent(aus)); } public void testShouldDoNewContentCrawlForZero() throws Exception { ArchivalUnit au = createAu(); AuState aus = new MockAuState(null, 0, -1, -1, null); assertTrue(au.shouldCrawlForNewContent(aus)); } public void testShouldDoNewContentCrawlEachMonth() throws Exception { ArchivalUnit au = createAu(); AuState aus = new MockAuState(null, 4 * Constants.WEEK, -1, -1, null); assertTrue(au.shouldCrawlForNewContent(aus)); } public void testGetName() throws Exception { ArchivalUnit au = createAu(); assertEquals("IGI Global Journals Plugin, Base URL " + BASE_URL + ", Journal ISSN " + JOURNAL_ISSN + ", Volume " + VOLUME, au.getName()); } public void testPollSpecial() throws Exception { ArchivalUnit au = createAu(); // if it changes in the plugin, you will likely need to change the test, so verify assertEquals(ListUtil.list( IGI_REPAIR_FROM_PEER_REGEXP1, IGI_REPAIR_FROM_PEER_REGEXP2), RegexpUtil.regexpCollection(au.makeRepairFromPeerIfMissingUrlPatterns())); PatternFloatMap pfm = au.makeUrlPollResultWeightMap(); // make sure that's the regexp that will match to the expected url string // this also tests the regexp (which is the same) for the weighted poll map // Add to pattern these urls? Has not been seen as problem, yet // http://www.igi-global.com/favicon.ico List <String> repairList1 = ListUtil.list( BASE_URL + "sourcecontent/9781466601161_58264/978-1-4666-0116-1.ch002.f01.png", BASE_URL + "jQuery/css/blitzer/images/ui-icons_004276_256x240.png", BASE_URL + "images/workflow-wizard-hand-circle-medium-gray.png", BASE_URL + "images/erl-2015.png", BASE_URL + "Images/erl-2015.png"); Pattern p = Pattern.compile(IGI_REPAIR_FROM_PEER_REGEXP1, Pattern.CASE_INSENSITIVE); for (String urlString : repairList1) { assertEquals(urlString, true, p.matcher(urlString).find()); assertEquals(urlString, 0.0f, pfm.getMatch(urlString, 1.0f)); } List <String> repairList2 = ListUtil.list( BASE_URL + "includes/gateway.61113.js", BASE_URL + "includes/main.02052016.css", BASE_URL + "Scripts/tipped/tipped.css", BASE_URL + "Scripts/tipped/tipped.js"); p = Pattern.compile(IGI_REPAIR_FROM_PEER_REGEXP2, Pattern.CASE_INSENSITIVE); for (String urlString : repairList2) { assertEquals(urlString, true, p.matcher(urlString).find()); assertEquals(urlString, 0.0f, pfm.getMatch(urlString, 1.0f)); } //and this one should fail - it needs to be weighted correctly and repaired from publisher if possible String notString = BASE_URL + "favicon.ico"; Matcher m = p.matcher(notString); assertEquals(false, m.find()); PatternFloatMap urlPollResults = au.makeUrlPollResultWeightMap(); assertNotNull(urlPollResults); for (String urlString : repairList2) { assertEquals(0.0, urlPollResults.getMatch(urlString, (float) 1), .0001); } assertEquals(1.0, urlPollResults.getMatch(notString, (float) 1), .0001); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.cordova.api.CordovaInterface; import org.apache.cordova.api.Plugin; import org.apache.cordova.api.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.location.Location; import android.util.Log; import android.content.Context; /** * This class listens to the accelerometer sensor and stores the latest * acceleration values x,y,z. */ public class AccelListener extends Plugin implements SensorEventListener { public static int STOPPED = 0; public static int STARTING = 1; public static int RUNNING = 2; public static int ERROR_FAILED_TO_START = 3; private float x,y,z; // most recent acceleration values private long timestamp; // time of most recent value private int status; // status of listener private int accuracy = SensorManager.SENSOR_STATUS_UNRELIABLE; private SensorManager sensorManager; // Sensor manager private Sensor mSensor; // Acceleration sensor returned by sensor manager private String callbackId; // Keeps track of the single "start" callback ID passed in from JS /** * Create an accelerometer listener. */ public AccelListener() { this.x = 0; this.y = 0; this.z = 0; this.timestamp = 0; this.setStatus(AccelListener.STOPPED); } /** * Sets the context of the Command. This can then be used to do things like * get file paths associated with the Activity. * * @param ctx The context of the main Activity. */ public void setContext(CordovaInterface ctx) { super.setContext(ctx); this.sensorManager = (SensorManager) ctx.getSystemService(Context.SENSOR_SERVICE); } /** * Executes the request and returns PluginResult. * * @param action The action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackId The callback id used when calling back into JavaScript. * @return A PluginResult object with a status and message. */ public PluginResult execute(String action, JSONArray args, String callbackId) { PluginResult.Status status = PluginResult.Status.NO_RESULT; String message = ""; PluginResult result = new PluginResult(status, message); result.setKeepCallback(true); if (action.equals("start")) { this.callbackId = callbackId; if (this.status != AccelListener.RUNNING) { // If not running, then this is an async call, so don't worry about waiting // We drop the callback onto our stack, call start, and let start and the sensor callback fire off the callback down the road this.start(); } } else if (action.equals("stop")) { if (this.status == AccelListener.RUNNING) { this.stop(); } } else { // Unsupported action return new PluginResult(PluginResult.Status.INVALID_ACTION); } return result; } /** * Called by AccelBroker when listener is to be shut down. * Stop listener. */ public void onDestroy() { this.stop(); } //-------------------------------------------------------------------------- // LOCAL METHODS //-------------------------------------------------------------------------- // /** * Start listening for acceleration sensor. * * @return status of listener */ private int start() { // If already starting or running, then just return if ((this.status == AccelListener.RUNNING) || (this.status == AccelListener.STARTING)) { return this.status; } this.setStatus(AccelListener.STARTING); // Get accelerometer from sensor manager List<Sensor> list = this.sensorManager.getSensorList(Sensor.TYPE_ACCELEROMETER); // If found, then register as listener if ((list != null) && (list.size() > 0)) { this.mSensor = list.get(0); this.sensorManager.registerListener(this, this.mSensor, SensorManager.SENSOR_DELAY_UI); this.setStatus(AccelListener.STARTING); } else { this.setStatus(AccelListener.ERROR_FAILED_TO_START); this.fail(AccelListener.ERROR_FAILED_TO_START, "No sensors found to register accelerometer listening to."); return this.status; } // Wait until running long timeout = 2000; while ((this.status == STARTING) && (timeout > 0)) { timeout = timeout - 100; try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } if (timeout == 0) { this.setStatus(AccelListener.ERROR_FAILED_TO_START); this.fail(AccelListener.ERROR_FAILED_TO_START, "Accelerometer could not be started."); } return this.status; } /** * Stop listening to acceleration sensor. */ private void stop() { if (this.status != AccelListener.STOPPED) { this.sensorManager.unregisterListener(this); } this.setStatus(AccelListener.STOPPED); this.accuracy = SensorManager.SENSOR_STATUS_UNRELIABLE; } /** * Called when the accuracy of the sensor has changed. * * @param sensor * @param accuracy */ public void onAccuracyChanged(Sensor sensor, int accuracy) { // Only look at accelerometer events if (sensor.getType() != Sensor.TYPE_ACCELEROMETER) { return; } // If not running, then just return if (this.status == AccelListener.STOPPED) { return; } this.accuracy = accuracy; } /** * Sensor listener event. * * @param SensorEvent event */ public void onSensorChanged(SensorEvent event) { // Only look at accelerometer events if (event.sensor.getType() != Sensor.TYPE_ACCELEROMETER) { return; } // If not running, then just return if (this.status == AccelListener.STOPPED) { return; } this.setStatus(AccelListener.RUNNING); if (this.accuracy >= SensorManager.SENSOR_STATUS_ACCURACY_MEDIUM) { // Save time that event was received this.timestamp = System.currentTimeMillis(); this.x = event.values[0]; this.y = event.values[1]; this.z = event.values[2]; this.win(); } } // Sends an error back to JS private void fail(int code, String message) { // Error object JSONObject errorObj = new JSONObject(); try { errorObj.put("code", code); errorObj.put("message", message); } catch (JSONException e) { e.printStackTrace(); } PluginResult err = new PluginResult(PluginResult.Status.ERROR, errorObj); err.setKeepCallback(true); this.error(err, this.callbackId); } private void win() { // Success return object PluginResult result = new PluginResult(PluginResult.Status.OK, this.getAccelerationJSON()); result.setKeepCallback(true); this.success(result, this.callbackId); } private void setStatus(int status) { this.status = status; } private JSONObject getAccelerationJSON() { JSONObject r = new JSONObject(); try { r.put("x", this.x); r.put("y", this.y); r.put("z", this.z); r.put("timestamp", this.timestamp); } catch (JSONException e) { e.printStackTrace(); } return r; } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.exec; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior.ERROR; import static com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior.IGNORE; import static com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior.RESOLVE; import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionInputHelper; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander; import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact; import com.google.devtools.build.lib.actions.Artifact.SpecialArtifactType; import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.EmptyRunfilesSupplier; import com.google.devtools.build.lib.actions.FileArtifactValue; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.RunfilesSupplier; import com.google.devtools.build.lib.actions.Spawn; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.Runfiles; import com.google.devtools.build.lib.analysis.RunfilesSupplierImpl; import com.google.devtools.build.lib.exec.util.FakeActionInputFileCache; import com.google.devtools.build.lib.exec.util.SpawnBuilder; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link SpawnInputExpander}. */ @RunWith(JUnit4.class) public class SpawnInputExpanderTest { private static final byte[] FAKE_DIGEST = new byte[] {1, 2, 3, 4}; private static final ArtifactExpander NO_ARTIFACT_EXPANDER = (a, b) -> fail("expected no interactions"); private final FileSystem fs = new InMemoryFileSystem(); private final Path execRoot = fs.getPath("/root"); private final ArtifactRoot rootDir = ArtifactRoot.asDerivedRoot(execRoot, "out"); private SpawnInputExpander expander = new SpawnInputExpander(execRoot, /*strict=*/ true); private Map<PathFragment, ActionInput> inputMappings = new HashMap<>(); @Test public void testEmptyRunfiles() throws Exception { RunfilesSupplier supplier = EmptyRunfilesSupplier.INSTANCE; FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); expander.addRunfilesToInputs(inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER); assertThat(inputMappings).isEmpty(); } @Test public void testRunfilesSingleFile() throws Exception { Artifact artifact = ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))), fs.getPath("/root/dir/file")); Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build(); RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); mockCache.put( artifact, FileArtifactValue.createForNormalFile( FAKE_DIGEST, /*proxy=*/ null, 0L, /*isShareable=*/ true)); expander.addRunfilesToInputs(inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER); assertThat(inputMappings).hasSize(1); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/dir/file"), artifact); } @Test public void testRunfilesWithFileset() throws Exception { Artifact artifact = createFilesetArtifact("foo/biz/fs_out"); Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build(); RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); mockCache.put( artifact, FileArtifactValue.createForNormalFile( FAKE_DIGEST, /*proxy=*/ null, 0L, /*isShareable=*/ true)); ArtifactExpander filesetExpander = new ArtifactExpander() { @Override public void expand(Artifact artifact, Collection<? super Artifact> output) { throw new IllegalStateException("Unexpected tree expansion"); } @Override public ImmutableList<FilesetOutputSymlink> getFileset(Artifact artifact) { return ImmutableList.of( FilesetOutputSymlink.createForTesting( PathFragment.create("zizz"), PathFragment.create("/foo/fake_exec/xyz/zizz"), PathFragment.create("/foo/fake_exec/"))); } }; expander.addRunfilesToInputs(inputMappings, supplier, mockCache, filesetExpander); assertThat(inputMappings).hasSize(1); assertThat(inputMappings) .containsEntry( PathFragment.create("runfiles/workspace/foo/biz/fs_out/zizz"), ActionInputHelper.fromPath("/root/xyz/zizz")); } @Test public void testRunfilesDirectoryStrict() { Artifact artifact = ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))), fs.getPath("/root/dir/file")); Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build(); RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); mockCache.put(artifact, FileArtifactValue.createForDirectoryWithMtime(-1)); IOException expected = assertThrows( IOException.class, () -> expander.addRunfilesToInputs( inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER)); assertThat(expected).hasMessageThat().isEqualTo("Not a file: dir/file"); } @Test public void testRunfilesDirectoryNonStrict() throws Exception { Artifact artifact = ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))), fs.getPath("/root/dir/file")); Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build(); RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); mockCache.put(artifact, FileArtifactValue.createForDirectoryWithMtime(-1)); expander = new SpawnInputExpander(execRoot, /*strict=*/ false); expander.addRunfilesToInputs(inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER); assertThat(inputMappings).hasSize(1); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/dir/file"), artifact); } @Test public void testRunfilesTwoFiles() throws Exception { Artifact artifact1 = ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))), fs.getPath("/root/dir/file")); Artifact artifact2 = ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))), fs.getPath("/root/dir/baz")); Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact1).addArtifact(artifact2).build(); RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); mockCache.put( artifact1, FileArtifactValue.createForNormalFile( FAKE_DIGEST, /*proxy=*/ null, 1L, /*isShareable=*/ true)); mockCache.put( artifact2, FileArtifactValue.createForNormalFile( FAKE_DIGEST, /*proxy=*/ null, 12L, /*isShareable=*/ true)); expander.addRunfilesToInputs(inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER); assertThat(inputMappings).hasSize(2); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/dir/file"), artifact1); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/dir/baz"), artifact2); } @Test public void testRunfilesSymlink() throws Exception { Artifact artifact = ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))), fs.getPath("/root/dir/file")); Runfiles runfiles = new Runfiles.Builder("workspace") .addSymlink(PathFragment.create("symlink"), artifact) .build(); RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); mockCache.put( artifact, FileArtifactValue.createForNormalFile( FAKE_DIGEST, /*proxy=*/ null, 1L, /*isShareable=*/ true)); expander.addRunfilesToInputs(inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER); assertThat(inputMappings).hasSize(1); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/symlink"), artifact); } @Test public void testRunfilesRootSymlink() throws Exception { Artifact artifact = ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))), fs.getPath("/root/dir/file")); Runfiles runfiles = new Runfiles.Builder("workspace") .addRootSymlink(PathFragment.create("symlink"), artifact) .build(); RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache mockCache = new FakeActionInputFileCache(); mockCache.put( artifact, FileArtifactValue.createForNormalFile( FAKE_DIGEST, /*proxy=*/ null, 1L, /*isShareable=*/ true)); expander.addRunfilesToInputs(inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER); assertThat(inputMappings).hasSize(2); assertThat(inputMappings).containsEntry(PathFragment.create("runfiles/symlink"), artifact); // If there's no other entry, Runfiles adds an empty file in the workspace to make sure the // directory gets created. assertThat(inputMappings) .containsEntry( PathFragment.create("runfiles/workspace/.runfile"), SpawnInputExpander.EMPTY_FILE); } @Test public void testRunfilesWithTreeArtifacts() throws Exception { SpecialArtifact treeArtifact = createTreeArtifact("treeArtifact"); assertThat(treeArtifact.isTreeArtifact()).isTrue(); TreeFileArtifact file1 = TreeFileArtifact.createTreeOutput(treeArtifact, "file1"); TreeFileArtifact file2 = TreeFileArtifact.createTreeOutput(treeArtifact, "file2"); FileSystemUtils.writeContentAsLatin1(file1.getPath(), "foo"); FileSystemUtils.writeContentAsLatin1(file2.getPath(), "bar"); Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(treeArtifact).build(); ArtifactExpander artifactExpander = (Artifact artifact, Collection<? super Artifact> output) -> { if (artifact.equals(treeArtifact)) { output.addAll(Arrays.asList(file1, file2)); } }; RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache fakeCache = new FakeActionInputFileCache(); fakeCache.put(file1, FileArtifactValue.createForTesting(file1)); fakeCache.put(file2, FileArtifactValue.createForTesting(file2)); expander.addRunfilesToInputs(inputMappings, supplier, fakeCache, artifactExpander); assertThat(inputMappings).hasSize(2); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/treeArtifact/file1"), file1); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/treeArtifact/file2"), file2); } @Test public void testRunfilesWithTreeArtifactsInSymlinks() throws Exception { SpecialArtifact treeArtifact = createTreeArtifact("treeArtifact"); assertThat(treeArtifact.isTreeArtifact()).isTrue(); TreeFileArtifact file1 = TreeFileArtifact.createTreeOutput(treeArtifact, "file1"); TreeFileArtifact file2 = TreeFileArtifact.createTreeOutput(treeArtifact, "file2"); FileSystemUtils.writeContentAsLatin1(file1.getPath(), "foo"); FileSystemUtils.writeContentAsLatin1(file2.getPath(), "bar"); Runfiles runfiles = new Runfiles.Builder("workspace") .addSymlink(PathFragment.create("symlink"), treeArtifact) .build(); ArtifactExpander artifactExpander = (Artifact artifact, Collection<? super Artifact> output) -> { if (artifact.equals(treeArtifact)) { output.addAll(Arrays.asList(file1, file2)); } }; RunfilesSupplier supplier = new RunfilesSupplierImpl(PathFragment.create("runfiles"), runfiles); FakeActionInputFileCache fakeCache = new FakeActionInputFileCache(); fakeCache.put(file1, FileArtifactValue.createForTesting(file1)); fakeCache.put(file2, FileArtifactValue.createForTesting(file2)); expander.addRunfilesToInputs(inputMappings, supplier, fakeCache, artifactExpander); assertThat(inputMappings).hasSize(2); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/symlink/file1"), file1); assertThat(inputMappings) .containsEntry(PathFragment.create("runfiles/workspace/symlink/file2"), file2); } @Test public void testTreeArtifactsInInputs() throws Exception { SpecialArtifact treeArtifact = createTreeArtifact("treeArtifact"); assertThat(treeArtifact.isTreeArtifact()).isTrue(); TreeFileArtifact file1 = TreeFileArtifact.createTreeOutput(treeArtifact, "file1"); TreeFileArtifact file2 = TreeFileArtifact.createTreeOutput(treeArtifact, "file2"); FileSystemUtils.writeContentAsLatin1(file1.getPath(), "foo"); FileSystemUtils.writeContentAsLatin1(file2.getPath(), "bar"); ArtifactExpander artifactExpander = (Artifact artifact, Collection<? super Artifact> output) -> { if (artifact.equals(treeArtifact)) { output.addAll(Arrays.asList(file1, file2)); } }; FakeActionInputFileCache fakeCache = new FakeActionInputFileCache(); fakeCache.put(file1, FileArtifactValue.createForTesting(file1)); fakeCache.put(file2, FileArtifactValue.createForTesting(file2)); Spawn spawn = new SpawnBuilder("/bin/echo", "Hello World").withInput(treeArtifact).build(); inputMappings = expander.getInputMapping(spawn, artifactExpander, fakeCache); assertThat(inputMappings).hasSize(2); assertThat(inputMappings).containsEntry(PathFragment.create("out/treeArtifact/file1"), file1); assertThat(inputMappings).containsEntry(PathFragment.create("out/treeArtifact/file2"), file2); } private SpecialArtifact createTreeArtifact(String relPath) throws IOException { SpecialArtifact treeArtifact = createSpecialArtifact(relPath, SpecialArtifactType.TREE); treeArtifact.setGeneratingActionKey(ActionsTestUtil.NULL_ACTION_LOOKUP_DATA); return treeArtifact; } private SpecialArtifact createFilesetArtifact(String relPath) throws IOException { return createSpecialArtifact(relPath, SpecialArtifactType.FILESET); } private SpecialArtifact createSpecialArtifact(String relPath, SpecialArtifactType type) throws IOException { String outputSegment = "out"; Path outputDir = execRoot.getRelative(outputSegment); Path outputPath = outputDir.getRelative(relPath); outputPath.createDirectoryAndParents(); ArtifactRoot derivedRoot = ArtifactRoot.asDerivedRoot(execRoot, outputSegment); return new SpecialArtifact( derivedRoot, derivedRoot.getExecPath().getRelative(derivedRoot.getRoot().relativize(outputPath)), ActionsTestUtil.NULL_ARTIFACT_OWNER, type); } @Test public void testEmptyManifest() throws Exception { Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings = ImmutableMap.of(createFileset("out"), ImmutableList.of()); expander.addFilesetManifests(filesetMappings, inputMappings); assertThat(inputMappings).isEmpty(); } @Test public void testManifestWithSingleFile() throws Exception { Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings = ImmutableMap.of( createFileset("out"), ImmutableList.of(filesetSymlink("foo/bar", "/dir/file"))); expander.addFilesetManifests(filesetMappings, inputMappings); assertThat(inputMappings) .containsExactly( PathFragment.create("out/foo/bar"), ActionInputHelper.fromPath("/dir/file")); } @Test public void testManifestWithTwoFiles() throws Exception { Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings = ImmutableMap.of( createFileset("out"), ImmutableList.of( filesetSymlink("foo/bar", "/dir/file"), filesetSymlink("foo/baz", "/dir/file"))); expander.addFilesetManifests(filesetMappings, inputMappings); assertThat(inputMappings) .containsExactly( PathFragment.create("out/foo/bar"), ActionInputHelper.fromPath("/dir/file"), PathFragment.create("out/foo/baz"), ActionInputHelper.fromPath("/dir/file")); } @Test public void testManifestWithDirectory() throws Exception { Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings = ImmutableMap.of(createFileset("out"), ImmutableList.of(filesetSymlink("foo/bar", "/some"))); expander.addFilesetManifests(filesetMappings, inputMappings); assertThat(inputMappings) .containsExactly(PathFragment.create("out/foo/bar"), ActionInputHelper.fromPath("/some")); } private static FilesetOutputSymlink filesetSymlink(String from, String to) { return FilesetOutputSymlink.createForTesting( PathFragment.create(from), PathFragment.create(to), PathFragment.create("/root")); } private ImmutableMap<Artifact, ImmutableList<FilesetOutputSymlink>> simpleFilesetManifest() { return ImmutableMap.of( createFileset("out"), ImmutableList.of( filesetSymlink("workspace/bar", "foo"), filesetSymlink("workspace/foo", "/root/bar"))); } private SpecialArtifact createFileset(String execPath) { return new SpecialArtifact( rootDir, PathFragment.create(execPath), ActionsTestUtil.NULL_ARTIFACT_OWNER, SpecialArtifactType.FILESET); } @Test public void testManifestWithErrorOnRelativeSymlink() throws Exception { expander = new SpawnInputExpander(execRoot, /*strict=*/ true, ERROR); IOException e = assertThrows( IOException.class, () -> expander.addFilesetManifests(simpleFilesetManifest(), inputMappings)); assertThat(e).hasMessageThat().contains("runfiles target is not absolute: foo"); } @Test public void testManifestWithIgnoredRelativeSymlink() throws Exception { expander = new SpawnInputExpander(execRoot, /*strict=*/ true, IGNORE); expander.addFilesetManifests(simpleFilesetManifest(), inputMappings); assertThat(inputMappings) .containsExactly( PathFragment.create("out/workspace/foo"), ActionInputHelper.fromPath("/root/bar")); } @Test public void testManifestWithResolvedRelativeSymlink() throws Exception { expander = new SpawnInputExpander(execRoot, /*strict=*/ true, RESOLVE); expander.addFilesetManifests(simpleFilesetManifest(), inputMappings); assertThat(inputMappings) .containsExactly( PathFragment.create("out/workspace/bar"), ActionInputHelper.fromPath("/root/bar"), PathFragment.create("out/workspace/foo"), ActionInputHelper.fromPath("/root/bar")); } }
/* * Copyright 2016-2017 Daniel Siviter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cito.stomp.jms; import static cito.server.SecurityContext.NOOP; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Map; import javax.enterprise.concurrent.ManagedScheduledExecutorService; import javax.enterprise.event.Event; import javax.enterprise.inject.spi.BeanManager; import javax.inject.Provider; import javax.jms.ConnectionFactory; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.MessageConsumer; import javax.jms.MessageListener; import javax.security.auth.login.LoginException; import javax.websocket.CloseReason; import javax.websocket.CloseReason.CloseCodes; import org.hamcrest.core.StringEndsWith; import org.hamcrest.core.StringStartsWith; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnit; import org.mockito.junit.MockitoRule; import org.slf4j.Logger; import cito.ReflectionUtil; import cito.event.Message; import cito.server.SecurityContext; import cito.stomp.Command; import cito.stomp.Frame; import cito.stomp.Header.Standard; import cito.stomp.HeartBeatMonitor; /** * Unit tests for {@link Connection}. * * @author Daniel Siviter * @since v1.0 [25 Jul 2016] */ public class ConnectionTest { @Rule public MockitoRule mockito = MockitoJUnit.rule(); @Rule public ExpectedException thrown = ExpectedException.none(); @Mock private Logger log; @Mock private BeanManager beanManager; @Mock private Relay relay; @Mock private ConnectionFactory connectionFactory; @Mock private Factory factory; @Mock private ManagedScheduledExecutorService scheduler; @Mock private Event<Message> brokerMessageEvent; @Mock private Provider<javax.websocket.Session> wsSessionProvider; @Mock private javax.websocket.Session wsSession; @Mock private Provider<SecurityContext> securityCtx; @InjectMocks private Connection connection; @Before public void before() { ReflectionUtil.set(this.connection, "sessionId", "ABC123"); this.connection.init(); } @Test public void send_frame() { final HeartBeatMonitor heartBeatMonitor = mock(HeartBeatMonitor.class); ReflectionUtil.set(this.connection, "heartBeatMonitor", heartBeatMonitor); final Frame frame = mock(Frame.class); when(frame.command()).thenReturn(Command.MESSAGE); this.connection.sendToClient(frame); verify(heartBeatMonitor).resetSend(); verify(frame).isHeartBeat(); verify(frame).command(); verify(this.log).info("Sending message to client. [sessionId={},command={}]", "ABC123", Command.MESSAGE); verify(this.brokerMessageEvent).fire(any(Message.class)); verifyNoMoreInteractions(heartBeatMonitor, frame); } @Test public void send_frame_HEARTBEAT() { final HeartBeatMonitor heartBeatMonitor = mock(HeartBeatMonitor.class); ReflectionUtil.set(this.connection, "heartBeatMonitor", heartBeatMonitor); final Frame frame = Frame.HEART_BEAT; this.connection.sendToClient(frame); verify(heartBeatMonitor).resetSend(); verify(this.log).debug("Sending message to client. [sessionId={},command=HEARTBEAT]", "ABC123"); verify(this.brokerMessageEvent).fire(any(Message.class)); verifyNoMoreInteractions(heartBeatMonitor); } @Test public void connect() throws JMSException, LoginException { ReflectionUtil.set(this.connection, "sessionId", null); // every other test needs it set! final HeartBeatMonitor heartBeatMonitor = mock(HeartBeatMonitor.class); ReflectionUtil.set(this.connection, "heartBeatMonitor", heartBeatMonitor); final Frame frame = Frame.connect("myhost.com", "1.0").build(); final Message messageEvent = new Message("ABC123", frame); final javax.jms.Connection jmsConnection = mock(javax.jms.Connection.class); when(this.connectionFactory.createConnection(null, null)).thenReturn(jmsConnection); when(this.securityCtx.get()).thenReturn(NOOP); this.connection.connect(messageEvent); verify(this.log).info("Connecting... [sessionId={}]", "ABC123"); verify(this.securityCtx).get(); verify(this.connectionFactory).createConnection(null, null); verify(heartBeatMonitor).resetSend(); verify(this.log).info("Starting JMS connection... [sessionId={}]", "ABC123"); verify(jmsConnection).setClientID("ABC123"); verify(jmsConnection).start(); verify(this.log).info("Sending message to client. [sessionId={},command={}]", "ABC123", Command.CONNECTED); verify(this.brokerMessageEvent).fire(any(Message.class)); verifyNoMoreInteractions(heartBeatMonitor, jmsConnection); } @Test public void on_wrongSession() { this.thrown.expect(IllegalArgumentException.class); this.thrown.expectMessage("Session identifier mismatch! [expected=ABC123,actual=Another]"); this.connection.on(new Message("Another", Frame.HEART_BEAT)); } @Test public void on_CONNECT() { final Message messageEvent = new Message("ABC123", Frame.connect("myhost.com", "1.0").build()); IllegalArgumentException expected = null; try { this.connection.on(messageEvent); fail("IllegalArgumentException expected!"); } catch (IllegalArgumentException e) { expected = e; } assertEquals("CONNECT not supported! [ABC123]", expected.getMessage()); } @Test public void on_DISCONNECT() { final Message messageEvent = new Message("ABC123", Frame.disconnect().build()); IllegalArgumentException expected = null; try { this.connection.on(messageEvent); fail("IllegalArgumentException expected!"); } catch (IllegalArgumentException e) { expected = e; } assertEquals("DISCONNECT not supported! [ABC123]", expected.getMessage()); } @Test public void on_SEND() throws JMSException { final Session session = mock(Session.class); ReflectionUtil.set(this.connection, "session", session); final Frame frame = Frame.send("/there", null, "{}").build(); this.connection.on(new Message("ABC123", frame)); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.SEND); verifyNoMoreInteractions(session); } @Test public void on_SEND_jmsDestination() throws JMSException { final Session session = mock(Session.class); ReflectionUtil.set(this.connection, "session", session); final Frame frame = Frame.send("topic/there", null, "{}").build(); this.connection.on(new Message("ABC123", frame)); verify(session).sendToBroker(frame); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.SEND); verifyNoMoreInteractions(session); } @Test @SuppressWarnings("unchecked") public void on_ACK() throws JMSException { final javax.jms.Message msg = mock(javax.jms.Message.class); ReflectionUtil.get(this.connection, "ackMessages", Map.class).put("1", msg); this.connection.on(new Message("ABC123", Frame.builder(Command.ACK).header(Standard.ID, "1").build())); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.ACK); verify(msg).acknowledge(); verifyNoMoreInteractions(msg); } @Test public void on_ACK_noExist() { IllegalStateException expected = null; try { this.connection.on(new Message("ABC123", Frame.builder(Command.ACK).header(Standard.ID, "1").build())); } catch (IllegalStateException e) { expected = e; } assertEquals("No such message to ACK! [1]", expected.getMessage()); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.ACK); } @Test @SuppressWarnings("unchecked") public void on_NACK() { final javax.jms.Message msg = mock(javax.jms.Message.class); ReflectionUtil.get(this.connection, "ackMessages", Map.class).put("1", msg); this.connection.on(new Message("ABC123", Frame.builder(Command.NACK).header(Standard.ID, "1").build())); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.NACK); verify(this.log).warn("NACK recieved, but no JMS equivalent! [{}]", "1"); verifyNoMoreInteractions(msg); } @Test public void on_NACK_noExist() { IllegalStateException expected = null; try { this.connection.on(new Message("ABC123", Frame.builder(Command.NACK).header(Standard.ID, "1").build())); } catch (IllegalStateException e) { expected = e; } assertNotNull(expected); assertEquals("No such message to NACK! [1]", expected.getMessage()); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.NACK); } @Test public void on_BEGIN() throws JMSException { final Session txSession = mock(Session.class); when(this.factory.toSession(this.connection, true, javax.jms.Session.SESSION_TRANSACTED)).thenReturn(txSession); this.connection.on(new Message("ABC123", Frame.builder(Command.BEGIN).header(Standard.TRANSACTION, "1").build())); assertEquals(txSession, ReflectionUtil.get(this.connection, "txSessions", Map.class).get("1")); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.BEGIN); verify(this.factory).toSession(this.connection, true, javax.jms.Session.SESSION_TRANSACTED); verifyNoMoreInteractions(txSession); } @Test @SuppressWarnings("unchecked") public void on_BEGIN_alreadyExists() { final Session txSession = mock(Session.class); ReflectionUtil.get(this.connection, "txSessions", Map.class).put("1", txSession); IllegalStateException expected = null; try { this.connection.on(new Message("ABC123", Frame.builder(Command.BEGIN).header(Standard.TRANSACTION, "1").build())); } catch (IllegalStateException e) { expected = e; } assertEquals("Transaction already started! [1]", expected.getMessage()); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.BEGIN); } @Test @SuppressWarnings("unchecked") public void on_COMMIT() throws JMSException { final Session txSession = mock(Session.class); ReflectionUtil.get(this.connection, "txSessions", Map.class).put("1", txSession); this.connection.on(new Message("ABC123", Frame.builder(Command.COMMIT).header(Standard.TRANSACTION, "1").build())); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.COMMIT); verify(txSession).commit(); verifyNoMoreInteractions(txSession); } @Test public void on_COMMIT_notExists() { IllegalStateException expected = null; try { this.connection.on(new Message("ABC123", Frame.builder(Command.COMMIT).header(Standard.TRANSACTION, "1").build())); } catch (IllegalStateException e) { expected = e; } assertEquals("Transaction session does not exists! [1]", expected.getMessage()); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.COMMIT); } @Test @SuppressWarnings("unchecked") public void on_ABORT() throws JMSException { final Session txSession = mock(Session.class); ReflectionUtil.get(this.connection, "txSessions", Map.class).put("1", txSession); this.connection.on(new Message("ABC123", Frame.builder(Command.ABORT).header(Standard.TRANSACTION, "1").build())); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.ABORT); verify(txSession).rollback(); verifyNoMoreInteractions(txSession); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.ABORT); } @Test public void on_ABORT_notExists() { IllegalStateException expected = null; try { this.connection.on(new Message("ABC123", Frame.builder(Command.ABORT).header(Standard.TRANSACTION, "1").build())); } catch (IllegalStateException e) { expected = e; } assertEquals("Transaction session does not exists! [1]", expected.getMessage()); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.ABORT); } @Test public void on_SUBSCRIBE() throws JMSException { final Session session = mock(Session.class); ReflectionUtil.set(this.connection, "session", session); final Destination destination = mock(Destination.class); when(session.toDestination("/dest")).thenReturn(destination); when(session.getConnection()).thenReturn(this.connection); final MessageConsumer consumer = mock(MessageConsumer.class); when(session.createConsumer(eq(destination), any(String.class))).thenReturn(consumer); this.connection.on(new Message("ABC123", Frame.subscribe("1", "/dest").build())); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.SUBSCRIBE); verify(session).toDestination("/dest"); verify(session).getConnection(); verify(session).createConsumer(destination, "session IS NULL OR session = 'ABC123'"); verify(consumer).setMessageListener(any(MessageListener.class)); verifyNoMoreInteractions(session, destination, consumer); } @Test @SuppressWarnings("unchecked") public void on_SUBSCRIBE_alreadyExists() { final Subscription subscription = mock(Subscription.class); ReflectionUtil.get(this.connection, "subscriptions", Map.class).put("1", subscription); IllegalStateException expected = null; try { this.connection.on(new Message("ABC123", Frame.subscribe("1", "/dest").build())); } catch (IllegalStateException e) { expected = e; } assertEquals("Subscription already exists! [1]", expected.getMessage()); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.SUBSCRIBE); } @Test @SuppressWarnings("unchecked") public void on_UNSUBSCRIBE() { final Subscription subscription = mock(Subscription.class); ReflectionUtil.get(this.connection, "subscriptions", Map.class).put("1", subscription); this.connection.on(new Message("ABC123", Frame.builder(Command.UNSUBSCRIBE).subscription("1").build())); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.UNSUBSCRIBE); } @Test public void on_UNSUBSCRIBE_noExist() { IllegalStateException expected = null; try { this.connection.on(new Message("ABC123", Frame.builder(Command.UNSUBSCRIBE).subscription("1").build())); } catch (IllegalStateException e) { expected = e; } assertEquals("Subscription does not exist! [1]", expected.getMessage()); verify(this.log).info("Message received from client. [sessionId={},command={}]", "ABC123", Command.UNSUBSCRIBE); } @Test public void addAckMessage() throws JMSException { final javax.jms.Message msg = mock(javax.jms.Message.class); when(msg.getJMSMessageID()).thenReturn("foo"); this.connection.addAckMessage(msg); assertEquals(msg, ReflectionUtil.<Map<String, Message>>get(this.connection, "ackMessages").get("foo")); verify(msg).getJMSMessageID(); verifyNoMoreInteractions(msg); } @Test public void close_closeReason() throws IOException, JMSException { final HeartBeatMonitor heartBeatMonitor = mock(HeartBeatMonitor.class); ReflectionUtil.set(this.connection, "heartBeatMonitor", heartBeatMonitor); final javax.jms.Connection jmsConnection = mock(javax.jms.Connection.class); ReflectionUtil.set(this.connection, "delegate", jmsConnection); final CloseReason reason = new CloseReason(CloseCodes.CANNOT_ACCEPT, "Aggghhh!"); this.connection.close(reason); verify(this.log).info("Closing connection. [sessionId={},code={},reason={}]", "ABC123", CloseCodes.CANNOT_ACCEPT.getCode(), "Aggghhh!"); verify(jmsConnection).close(); verify(heartBeatMonitor).close(); verifyNoMoreInteractions(heartBeatMonitor, jmsConnection); } @Test public void toString_() { assertThat(this.connection.toString(), new StringStartsWith(Connection.class.getName() + "@")); assertThat(this.connection.toString(), new StringEndsWith("[sessionId=ABC123]")); } @After public void after() { verifyNoMoreInteractions(this.log, this.beanManager, this.relay, this.connectionFactory, this.factory, this.scheduler, this.brokerMessageEvent, this.wsSessionProvider, this.wsSession, this.securityCtx); } }
/** * Copyright 2015 Palantir Technologies * * Licensed under the BSD-3 License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.cli.command; import java.util.Scanner; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.BaseEncoding; import com.palantir.atlasdb.cli.runner.InMemoryTestRunner; import com.palantir.atlasdb.cli.runner.SingleBackendCliTestRunner; import com.palantir.atlasdb.cli.services.AtlasDbServicesFactory; import com.palantir.atlasdb.cli.services.DaggerTestAtlasDbServices; import com.palantir.atlasdb.cli.services.ServicesConfigModule; import com.palantir.atlasdb.cli.services.TestAtlasDbServices; import com.palantir.atlasdb.cli.services.TestSweeperModule; import com.palantir.atlasdb.keyvalue.api.Cell; import com.palantir.atlasdb.keyvalue.api.KeyValueService; import com.palantir.atlasdb.keyvalue.api.Namespace; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.api.Value; import com.palantir.atlasdb.protos.generated.TableMetadataPersistence; import com.palantir.atlasdb.table.description.TableDefinition; import com.palantir.atlasdb.table.description.ValueType; import com.palantir.atlasdb.transaction.api.ConflictHandler; import com.palantir.atlasdb.transaction.impl.SerializableTransactionManager; import com.palantir.timestamp.TimestampService; public class TestSweepCommand { private static final Namespace NS1 = Namespace.create("test"); private static final Namespace NS2 = Namespace.create("diff"); private static final TableReference TABLE_ONE = TableReference.create(NS1, "one"); private static final TableReference TABLE_TWO = TableReference.create(NS1, "two"); private static final TableReference TABLE_THREE = TableReference.create(NS2, "one"); private static final String COL = "c"; private static AtomicLong sweepTimestamp; private static AtlasDbServicesFactory moduleFactory; @BeforeClass public static void setup() throws Exception { sweepTimestamp = new AtomicLong(); moduleFactory = new AtlasDbServicesFactory() { @Override public TestAtlasDbServices connect(ServicesConfigModule servicesConfigModule) { return DaggerTestAtlasDbServices.builder() .servicesConfigModule(servicesConfigModule) .testSweeperModule(TestSweeperModule.create(sweepTimestamp::get)) .build(); } }; } private InMemoryTestRunner makeRunner(String... args) { return new InMemoryTestRunner(SweepCommand.class, args); } @Test public void testSweepTable() throws Exception { try (SingleBackendCliTestRunner runner = makeRunner("-t", TABLE_ONE.getQualifiedName())) { TestAtlasDbServices services = runner.connect(moduleFactory); SerializableTransactionManager txm = services.getTransactionManager(); TimestampService tss = services.getTimestampService(); KeyValueService kvs = services.getKeyValueService(); createTable(kvs, TABLE_ONE, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); createTable(kvs, TABLE_TWO, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); long ts1 = put(txm, TABLE_ONE, "foo", "bar"); long ts2 = put(txm, TABLE_TWO, "foo", "tar"); long ts3 = put(txm, TABLE_ONE, "foo", "baz"); long ts4 = put(txm, TABLE_TWO, "foo", "taz"); long ts5 = tss.getFreshTimestamp(); String stdout = sweep(runner, ts5); Scanner scanner = new Scanner(stdout); final long uniqueCells = Long.parseLong(scanner.findInLine("\\d+ unique cells").split(" ")[0]); final long deletedCells = Long.parseLong(scanner.findInLine("deleted \\d+ cells").split(" ")[1]); Assert.assertEquals(1, uniqueCells); Assert.assertEquals(1, deletedCells); Assert.assertEquals("baz", get(kvs, TABLE_ONE, "foo", ts5)); Assert.assertEquals("", get(kvs, TABLE_ONE, "foo", mid(ts1, ts3))); Assert.assertEquals(ImmutableSet.of(-1L, ts3), getAllTs(kvs, TABLE_ONE, "foo")); Assert.assertEquals("taz", get(kvs, TABLE_TWO, "foo", ts5)); Assert.assertEquals("tar", get(kvs, TABLE_TWO, "foo", mid(ts3, ts4))); Assert.assertEquals(ImmutableSet.of(ts2, ts4), getAllTs(kvs, TABLE_TWO, "foo")); } } @Test public void testSweepNamespace() throws Exception { try (SingleBackendCliTestRunner runner = makeRunner("-n", NS1.getName())) { TestAtlasDbServices services = runner.connect(moduleFactory); SerializableTransactionManager txm = services.getTransactionManager(); TimestampService tss = services.getTimestampService(); KeyValueService kvs = services.getKeyValueService(); createTable(kvs, TABLE_ONE, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); createTable(kvs, TABLE_TWO, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); createTable(kvs, TABLE_THREE, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); long ts1 = put(txm, TABLE_ONE, "foo", "bar"); long ts2 = put(txm, TABLE_TWO, "foo", "tar"); long ts3 = put(txm, TABLE_THREE, "foo", "jar"); long ts4 = put(txm, TABLE_ONE, "foo", "baz"); long ts5 = put(txm, TABLE_THREE, "foo", "jaz"); long ts6 = put(txm, TABLE_TWO, "foo", "taz"); long ts7 = tss.getFreshTimestamp(); sweep(runner, ts7); Assert.assertEquals("baz", get(kvs, TABLE_ONE, "foo", ts7)); Assert.assertEquals("", get(kvs, TABLE_ONE, "foo", mid(ts1, ts2))); Assert.assertEquals(ImmutableSet.of(-1L, ts4), getAllTs(kvs, TABLE_ONE, "foo")); Assert.assertEquals("taz", get(kvs, TABLE_TWO, "foo", ts7)); Assert.assertEquals("", get(kvs, TABLE_TWO, "foo", mid(ts4, ts6))); Assert.assertEquals(ImmutableSet.of(-1L, ts6), getAllTs(kvs, TABLE_TWO, "foo")); Assert.assertEquals("jaz", get(kvs, TABLE_THREE, "foo", ts7)); Assert.assertEquals("jar", get(kvs, TABLE_THREE, "foo", mid(ts3, ts5))); Assert.assertEquals(ImmutableSet.of(ts3, ts5), getAllTs(kvs, TABLE_THREE, "foo")); } } @Test public void testSweepAll() throws Exception { try (SingleBackendCliTestRunner runner = makeRunner("-a")) { TestAtlasDbServices services = runner.connect(moduleFactory); SerializableTransactionManager txm = services.getTransactionManager(); TimestampService tss = services.getTimestampService(); KeyValueService kvs = services.getKeyValueService(); createTable(kvs, TABLE_ONE, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); createTable(kvs, TABLE_TWO, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); createTable(kvs, TABLE_THREE, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); long ts1 = put(txm, TABLE_ONE, "foo", "bar"); long ts2 = put(txm, TABLE_TWO, "foo", "tar"); long ts3 = put(txm, TABLE_THREE, "foo", "jar"); long ts4 = put(txm, TABLE_ONE, "foo", "baz"); long ts5 = put(txm, TABLE_THREE, "foo", "jaz"); long ts6 = put(txm, TABLE_TWO, "foo", "taz"); long ts7 = tss.getFreshTimestamp(); sweep(runner, ts7); Assert.assertEquals("baz", get(kvs, TABLE_ONE, "foo", ts7)); Assert.assertEquals("", get(kvs, TABLE_ONE, "foo", mid(ts1, ts2))); Assert.assertEquals(ImmutableSet.of(-1L, ts4), getAllTs(kvs, TABLE_ONE, "foo")); Assert.assertEquals("taz", get(kvs, TABLE_TWO, "foo", ts7)); Assert.assertEquals("", get(kvs, TABLE_TWO, "foo", mid(ts4, ts6))); Assert.assertEquals(ImmutableSet.of(-1L, ts6), getAllTs(kvs, TABLE_TWO, "foo")); Assert.assertEquals("jaz", get(kvs, TABLE_THREE, "foo", ts7)); Assert.assertEquals("", get(kvs, TABLE_THREE, "foo", mid(ts3, ts5))); Assert.assertEquals(ImmutableSet.of(-1L, ts5), getAllTs(kvs, TABLE_THREE, "foo")); } } @Test public void testSweepStartRow() throws Exception { try (SingleBackendCliTestRunner runner = makeRunner("-t", TABLE_ONE.getQualifiedName(), "-r", BaseEncoding.base16().encode("foo".getBytes()))) { TestAtlasDbServices services = runner.connect(moduleFactory); SerializableTransactionManager txm = services.getTransactionManager(); TimestampService tss = services.getTimestampService(); KeyValueService kvs = services.getKeyValueService(); createTable(kvs, TABLE_ONE, TableMetadataPersistence.SweepStrategy.CONSERVATIVE); long ts1 = put(txm, TABLE_ONE, "foo", "bar"); long ts2 = put(txm, TABLE_ONE, "foo", "biz"); long ts3 = put(txm, TABLE_ONE, "boo", "biz"); long ts4 = put(txm, TABLE_ONE, "foo", "baz"); long ts5 = tss.getFreshTimestamp(); sweep(runner, ts5); Assert.assertEquals("baz", get(kvs, TABLE_ONE, "foo", ts5)); Assert.assertEquals("", get(kvs, TABLE_ONE, "foo", mid(ts1, ts3))); Assert.assertEquals("", get(kvs, TABLE_ONE, "foo", mid(ts2, ts4))); Assert.assertEquals("biz", get(kvs, TABLE_ONE, "boo", mid(ts3, ts5))); Assert.assertEquals(ImmutableSet.of(-1L, ts4), getAllTs(kvs, TABLE_ONE, "foo")); Assert.assertEquals(ImmutableSet.of(ts3), getAllTs(kvs, TABLE_ONE, "boo")); } } private long mid(long low, long high) { return low + ((high -low) / 2); } private String sweep(SingleBackendCliTestRunner runner, long ts) { sweepTimestamp.set(ts); return runner.run(); } private String get(KeyValueService kvs, TableReference table, String row, long ts) { Cell cell = Cell.create(row.getBytes(), COL.getBytes()); Value val = kvs.get(table, ImmutableMap.of(cell, ts)).get(cell); return val == null ? null : new String(val.getContents()); } private Set<Long> getAllTs(KeyValueService kvs, TableReference table, String row) { Cell cell = Cell.create(row.getBytes(), COL.getBytes()); return ImmutableSet.copyOf(kvs.getAllTimestamps(table, ImmutableSet.of(cell), Long.MAX_VALUE).get(cell)); } private long put(SerializableTransactionManager txm, TableReference table, String row, String val) { Cell cell = Cell.create(row.getBytes(), COL.getBytes()); return txm.runTaskWithRetry(t -> { t.put(table, ImmutableMap.of(cell, val.getBytes())); return t.getTimestamp(); }); } private void createTable(KeyValueService kvs, TableReference table, final TableMetadataPersistence.SweepStrategy sweepStrategy) { kvs.createTable(table, new TableDefinition() {{ rowName(); rowComponent("row", ValueType.BLOB); columns(); column("col", COL, ValueType.BLOB); conflictHandler(ConflictHandler.IGNORE_ALL); sweepStrategy(sweepStrategy); }}.toTableMetadata().persistToBytes() ); } }
package sune.ssdf; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Pattern; /** * Helps with reading SSDF Syntax and contains methods * that helps with manipulating with objects in SSD files. * @version 1.1 * @author Sune*/ public final class SSDFCore { /** * The main SSD Array object*/ protected final SSDArray array; /** * Opening object brackets*/ private final char oOB = '{'; /** * Closing object brackets*/ private final char cOB = '}'; /** * Opening array brackets*/ private final char oAB = '['; /** * Closing array brackets*/ private final char cAB = ']'; /** * Name/value delimiter*/ private final char nvd = ':'; /** * Items delimiter*/ private final char itd = ','; /** * Stores all special words that should * not be read as a string*/ private final String[] words = { "true", "false", "null" }; /** * Creates new instance of SSDF Core.*/ public SSDFCore() { this(""); } /** * Creates new instance of SSDF Core. * @param content The content of SSDF file in SSDF Syntax*/ public SSDFCore(String content) { this.array = getObjects(format(content)); } /** * Creates new instance of SSDF Core. * @param file The file object to read*/ public SSDFCore(File file) { this.array = getObjects(format(getContent(file))); } /** * Creates new instance of SSDF Core. * @param stream Input stream from which all the object * should be read. * @since 1.1*/ public SSDFCore(InputStream stream) { this.array = getObjects(format(fromStream(stream, "UTF-8"))); } /** * Creates new instance of SSDF Core. * @param array Object of SSDArray containing all the object. * @since 1.1*/ public SSDFCore(SSDArray array) { this.array = array; } /** * Gets the content of the given file. * @param file The file object from where to get the * content * @return The content of the given file*/ private String getContent(File file) { StringBuilder builder = new StringBuilder(); try(BufferedReader reader = new BufferedReader( new FileReader(file))) { String line = ""; while((line = reader.readLine()) != null) { builder.append(line).append("\n"); } return builder.toString(); } catch(Exception ex) { } return null; } /** * Gets a string from an input stream. The output string is decoded * to the given charset. * @param stream Input stream from which the string should be read. * @param charset Name of the charset * @return String from the given input stream. * @since 1.1*/ private String fromStream(InputStream stream, String charset) { if(stream == null) return null; try { StringBuilder builder = new StringBuilder(); CharsetDecoder decoder = Charset.forName(charset).newDecoder(); byte[] buffer = new byte[8192]; int read; while((read = stream.read(buffer)) != -1) { CharBuffer cbuffer = decoder.decode( ByteBuffer.wrap(Arrays.copyOf(buffer, read))); builder.append(cbuffer.array()); } return builder.toString(); } catch(Exception ex) { } finally { try { stream.close(); } catch(Exception ex) { } } return null; } /** * Formats the string. It removes useless and not * important spaces, breaks (line delimiters) and * other characters. * @param string The string to format * @return The formatted string*/ private String format(String string) { // Is double-quoted boolean idq = false; // Is single-quoted boolean isq = false; // Is escaped boolean esc = false; // Is line-commented boolean islc = false; // Is block-commented boolean isbc = false; /* How many characters should be * skipped*/ int skip = 0; char[] chars = string.toCharArray(); StringBuilder sb = new StringBuilder(); for(int p = 0, l = string.length(); p < l; p++) { // Characters skipping if(skip > 0) { skip--; continue; } // Gets character on the position char c = chars[p]; // Quoting if(c == '\"' && !isq && !esc) idq = !idq; if(c == '\'' && !idq && !esc) isq = !isq; // Line comments support if(c == '#' && !(idq || isq)) islc = true; if((c == '\n' || c == '\r') && islc) islc = false; // Block comments support if((c == '/' && chars[p+1] == '*') && !(idq || isq)) { isbc = true; continue; } if((c == '*' && chars[p+1] == '/') && !(idq || isq)) { isbc = false; skip = 1; continue; } // Ignores special characters or comments if(((c == ' ' || c == '\t' || c == '\n' || c == '\r') && !(idq || isq)) || islc || isbc) continue; // Adds the character sb.append(c); /* Removes the escaping. This allows to escape * only one character at the time.*/ if(esc) esc = false; // Escapes the next character if(c == '\\') esc = true; } return sb.toString(); } /** * Formats object's name. * @param name The object's name to format * @return The formatted object's name*/ private String formatName(String name) { List<String> list = SSDFUtils.regex("([A-Za-z0-9\\_]+)", name); return list.isEmpty() ? null : list.get(0); } /** * Formats object's value. * @param value The object's value to format * @return The formatted object's value*/ private String formatValue(String value) { // Is double-quoted boolean idq = false; // Is single-quoted boolean isq = false; // Is escaped boolean esc = false; // Is a digit boolean dig = false; // Special words boolean add = false; int addInt = 0; char[] chars = value.toCharArray(); StringBuilder sb = new StringBuilder(); for(int p = 0, l = value.length(); p < l; p++) { char c = chars[p]; if(p == 0 && Character.isDigit(c)) dig = true; // Quoting if(c == '\"' && !isq && !esc) idq = !idq; if(c == '\'' && !idq && !esc) isq = !isq; // Escapes the next character if(c == '\\') { esc = true; continue; } if(!dig && !(isq || idq) && !add) { int k = 0; for(String word : words) { int f = word.length(); if(((k = p)+f) <= l) { for(int i = 0; i < f; i++) { if(word.charAt(i) != chars[k++]) break; if(i == f-1) add = true; } if(add) { addInt = f; break; } } } } if((dig && (Character.isDigit(c) || c == '.')) || (isq || idq) || ((!idq && c == '\"') || (!isq && c == '\'')) || (add && addInt-- > 0)) sb.append(c); /* Removes the escaping. This allows to escape * only one character at the time.*/ if(esc) esc = false; } return sb.toString(); } /** * Gets all objects in the given SSD File's content string. * @param string The SSD file's content * @return The Map (list) of all read objects*/ private SSDArray getObjects(String string) { return getObjects(getBracketsContent(string, oOB, cOB), "", false); } /** * Gets content between the two given brackets' characters. * @param string The string where to get the content * @param openBrackets The character of opening brackets * @param closeBrackets The character of closing brackets * @return The content between the two given brackets' characters*/ private String getBracketsContent(String string, char openBrackets, char closeBrackets) { int b = 0; int l = 0; char[] chars = string.toCharArray(); StringBuilder sb = new StringBuilder(); for(int p = 0, m = string.length(); p < m; p++) { char c = chars[p]; if(!(b == 0 && c == openBrackets) && !(b == 1 && c == closeBrackets)) sb.append(c); if(c == openBrackets) b++; if(c == closeBrackets) b--; if(b == 0) break; } return SSDFUtils.substringEnd(sb.toString(), 0, l); } /** * Gets all objects in the given SSD File's content string with * pre-defined parent name of the all read objects and information * if the parent object is an array, or not. * @param string The SSD file's content * @param parentName The object's parent name * @param array If true, the parent object is an array, otherwise is not * @return The Map (list) of all read objects*/ private SSDArray getObjects(String string, String parentName, boolean array) { // Is double-quoted boolean idq = false; // Is single-quoted boolean isq = false; // Is escaped boolean esc = false; // Can write name boolean wn = !array; // Can write value boolean wv = array; SSDArray ssdArray = new SSDArray(); String lastName = ""; int lastCount = 0; char[] chars = string.toCharArray(); StringBuilder sb = new StringBuilder(); for(int p = 0, l = string.length(); p < l; p++) { char c = chars[p]; if(wn || wv) sb.append(c); if(c == nvd && !(idq || isq)) { lastName = formatName(sb.toString()); sb.setLength(0); wn = false; wv = true; } if((c == itd && !(idq || isq)) || p == l-1) { String name = parentName + (parentName.isEmpty() ? "" : ".") + (array ? Integer.toString(lastCount++) : lastName); ssdArray.put(name, new SSDObject(name, formatValue(sb.toString()))); sb.setLength(0); wn = !array; wv = array; } if((c == oOB || c == oAB) && !(idq || isq)) { String content = getBracketsContent(string.substring(p), c == oAB ? oAB : oOB, c == oAB ? cAB : cOB); SSDArray ssdar = getObjects(content, parentName + (parentName.isEmpty() ? "" : ".") + (array ? Integer.toString(lastCount++) : lastName), c == oAB); ssdArray.putAll(ssdar.getObjects()); p += content.length()+2; continue; } // Quoting if(c == '\"' && !isq && !esc) idq = !idq; if(c == '\'' && !idq && !esc) isq = !isq; /* Removes the escaping. This allows to escape * only one character at the time.*/ if(esc) esc = false; // Escapes the next character if(c == '\\') esc = true; } return ssdArray; } /** * Gets the content (all the objects) as a string. * @return The content as a string.*/ public String getContentString() { return getContentString(false); } /** * Gets the content (all the objects) as a string. * @param compress Whether to use compress mode or not. * @return The content as a string.*/ public String getContentString(boolean compress) { StringBuilder sb = new StringBuilder(); Map<String, SSDArray> arrays = new LinkedHashMap<>(); Map<String, SSDObject> objects = array.getAllObjects(); for(Entry<String, SSDObject> entry : objects.entrySet()) { String keyPath = entry.getKey(); String[] splitKey = keyPath.split("\\."); while(splitKey.length > 1) { splitKey = keyPath.split("\\."); keyPath = String.join(".", Arrays.copyOfRange( splitKey, 0, splitKey.length-1)).trim(); if(!keyPath.isEmpty() && !arrays.containsKey(keyPath)) arrays.put(keyPath, new SSDArray(keyPath)); } } sb.append("{"); if(!compress) sb.append("\n"); boolean isFirstItem = true; boolean dataWritten = false; for(Entry<String, SSDObject> entry : objects.entrySet()) { String objectName = entry.getKey(); String[] splitKey = objectName.split("\\."); if(splitKey.length == 1 && !arrays.containsKey(objectName)) { if(!isFirstItem) { sb.append(","); if(!compress) sb.append("\n"); } else { isFirstItem = false; } SSDObject object = entry.getValue(); SSDType objectType = object.type(); String objectValue = object.stringValue(); if(!compress) sb.append("\t"); sb.append(objectName); sb.append(":"); if(!compress) sb.append(" "); if(objectType == SSDType.STRING) { sb.append("\""); sb.append(objectValue); sb.append("\""); } else { sb.append(objectType == SSDType.UNDEFINED ? "null" : objectValue); } if(!dataWritten) dataWritten = true; } } String content = getArrayContentString( "", arrays, 1, false, false, compress); if(!content.isEmpty() && dataWritten) { sb.append(","); if(!compress) sb.append("\n\n"); } sb.append(content); if(!compress) sb.append("\n"); sb.append("}"); return sb.toString(); } /** * Gets the next depth level of stored objects and converts them * to a string. * @param startsWith The specified starting name of objects that * should be contained in the final result. * @param arrays The map of all the arrays * @param depth The depth level * @param wasItems Whether there were some written items or not. * @param inArray Whether the current content is in an array. * @param compress Whether to use compress mode or not. * @return The formatted string of the objects.*/ private String getArrayContentString(String startsWith, Map<String, SSDArray> arrays, int depth, boolean wasItems, boolean inArray, boolean compress) { StringBuilder sb = new StringBuilder(); Map<String, SSDObject> objects = array.getAllObjects(); boolean isFirstArray = true; for(Entry<String, SSDArray> array : arrays.entrySet()) { String arrayKey = array.getKey(); String[] splitArrayKey = arrayKey.split("\\."); if(arrayKey.startsWith(startsWith) && splitArrayKey.length == depth) { String arrayName = splitArrayKey[splitArrayKey.length-1]; String arrayTab = SSDFUtils.repeatString("\t", depth); boolean isArray = true; for(Entry<String, SSDObject> object : objects.entrySet()) { String objectKey = object.getKey(); String[] splitObjectKey = objectKey.split("\\."); if(objectKey.startsWith(startsWith) && depth < splitObjectKey.length) { String formatName = splitObjectKey[depth]; if(!Pattern.matches("^\\d+$", formatName)) { isArray = false; break; } } } if(!isFirstArray || wasItems) { sb.append(","); if(!compress) sb.append("\n\n"); } if(isFirstArray) isFirstArray = false; if(!inArray) { if(!compress) sb.append(arrayTab); sb.append(arrayName); sb.append(":"); if(!compress) sb.append("\n"); } if(!compress) sb.append(arrayTab); sb.append(isArray ? "[" : "{"); if(!compress) sb.append("\n"); boolean isFirstItem = true; for(Entry<String, SSDObject> object : objects.entrySet()) { String objectKey = object.getKey(); String[] splitObjectKey = objectKey.split("\\."); if(splitObjectKey.length == depth+1) { String objectPath = String.join(".", Arrays.copyOfRange(splitObjectKey, 0, splitObjectKey.length-1)); String objectTab = SSDFUtils.repeatString("\t", depth+1); String objectName = splitObjectKey[splitObjectKey.length-1]; if(arrayKey.equals(objectPath)) { if(!isFirstItem) { sb.append(","); if(!compress) sb.append("\n"); } else { isFirstItem = false; } if(!compress) sb.append(objectTab); if(!isArray) { sb.append(objectName); sb.append(":"); if(!compress) sb.append(" "); } SSDObject objectVal = object.getValue(); SSDType objectType = objectVal.type(); String objectValue = objectVal.stringValue(); if(objectType == SSDType.STRING) { sb.append("\""); sb.append(objectValue); sb.append("\""); } else { sb.append(objectType == SSDType.UNDEFINED ? "null" : objectValue); } } } } sb.append(getArrayContentString( arrayKey, arrays, depth+1, !isFirstItem, isArray, compress)); if(!compress) { sb.append("\n"); sb.append(arrayTab); } sb.append(isArray ? "]" : "}"); } } return sb.toString(); } /** * Gets the main array that contains all the objects. * @return The main array object*/ public SSDArray getArray() { return array; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class abstracts a bunch of operations the HMaster needs * when splitting log files e.g. finding log files, dirs etc. */ @InterfaceAudience.Private public class MasterWalManager { private static final Logger LOG = LoggerFactory.getLogger(MasterWalManager.class); /** * Filter *in* WAL files that are for the hbase:meta Region. */ final static PathFilter META_FILTER = new PathFilter() { @Override public boolean accept(Path p) { return AbstractFSWALProvider.isMetaFile(p); } }; /** * Filter *out* WAL files that are for the hbase:meta Region; i.e. return user-space WALs only. */ public final static PathFilter NON_META_FILTER = new PathFilter() { @Override public boolean accept(Path p) { return !AbstractFSWALProvider.isMetaFile(p); } }; // metrics for master // TODO: Rename it, since those metrics are split-manager related private final MetricsMasterFileSystem metricsMasterFilesystem = new MetricsMasterFileSystem(); // Keep around for convenience. private final MasterServices services; private final Configuration conf; private final FileSystem fs; // The Path to the old logs dir private final Path oldLogDir; private final Path rootDir; // create the split log lock private final Lock splitLogLock = new ReentrantLock(); /** * Superceded by {@link SplitWALManager}; i.e. procedure-based WAL splitting rather than * 'classic' zk-coordinated WAL splitting. * @deprecated since 2.3.0 and 3.0.0 to be removed in 4.0.0; replaced by {@link SplitWALManager}. * @see SplitWALManager */ @Deprecated private final SplitLogManager splitLogManager; // Is the fileystem ok? private volatile boolean fsOk = true; public MasterWalManager(MasterServices services) throws IOException { this(services.getConfiguration(), services.getMasterFileSystem().getWALFileSystem(), services); } public MasterWalManager(Configuration conf, FileSystem fs, MasterServices services) throws IOException { this.fs = fs; this.conf = conf; this.rootDir = CommonFSUtils.getWALRootDir(conf); this.services = services; this.splitLogManager = new SplitLogManager(services, conf); this.oldLogDir = new Path(rootDir, HConstants.HREGION_OLDLOGDIR_NAME); } public void stop() { if (splitLogManager != null) { splitLogManager.stop(); } } SplitLogManager getSplitLogManager() { return this.splitLogManager; } /** * Get the directory where old logs go * @return the dir */ Path getOldLogDir() { return this.oldLogDir; } public FileSystem getFileSystem() { return this.fs; } /** * Checks to see if the file system is still accessible. * If not, sets closed * @return false if file system is not available */ private boolean checkFileSystem() { if (this.fsOk) { try { FSUtils.checkFileSystemAvailable(this.fs); FSUtils.checkDfsSafeMode(this.conf); } catch (IOException e) { services.abort("Shutting down HBase cluster: file system not available", e); this.fsOk = false; } } return this.fsOk; } /** * Get Servernames which are currently splitting; paths have a '-splitting' suffix. * @return ServerName * @throws IOException IOException */ public Set<ServerName> getSplittingServersFromWALDir() throws IOException { return getServerNamesFromWALDirPath( p -> p.getName().endsWith(AbstractFSWALProvider.SPLITTING_EXT)); } /** * Get Servernames that COULD BE 'alive'; excludes those that have a '-splitting' suffix as these * are already being split -- they cannot be 'alive'. * @return ServerName * @throws IOException IOException */ public Set<ServerName> getLiveServersFromWALDir() throws IOException { return getServerNamesFromWALDirPath( p -> !p.getName().endsWith(AbstractFSWALProvider.SPLITTING_EXT)); } /** * @return listing of ServerNames found by parsing WAL directory paths in FS. */ public Set<ServerName> getServerNamesFromWALDirPath(final PathFilter filter) throws IOException { FileStatus[] walDirForServerNames = getWALDirPaths(filter); return Stream.of(walDirForServerNames).map(s -> { ServerName serverName = AbstractFSWALProvider.getServerNameFromWALDirectoryName(s.getPath()); if (serverName == null) { LOG.warn("Log folder {} doesn't look like its name includes a " + "region server name; leaving in place. If you see later errors about missing " + "write ahead logs they may be saved in this location.", s.getPath()); return null; } return serverName; }).filter(s -> s != null).collect(Collectors.toSet()); } /** * @return List of all RegionServer WAL dirs; i.e. this.rootDir/HConstants.HREGION_LOGDIR_NAME. */ public FileStatus[] getWALDirPaths(final PathFilter filter) throws IOException { Path walDirPath = new Path(CommonFSUtils.getWALRootDir(conf), HConstants.HREGION_LOGDIR_NAME); FileStatus[] walDirForServerNames = CommonFSUtils.listStatus(fs, walDirPath, filter); return walDirForServerNames == null? new FileStatus[0]: walDirForServerNames; } /** * Inspect the log directory to find dead servers which need recovery work * @return A set of ServerNames which aren't running but still have WAL files left in file system * @deprecated With proc-v2, we can record the crash server with procedure store, so do not need * to scan the wal directory to find out the splitting wal directory any more. Leave * it here only because {@code RecoverMetaProcedure}(which is also deprecated) uses * it. */ @Deprecated public Set<ServerName> getFailedServersFromLogFolders() throws IOException { boolean retrySplitting = !conf.getBoolean(WALSplitter.SPLIT_SKIP_ERRORS_KEY, WALSplitter.SPLIT_SKIP_ERRORS_DEFAULT); Set<ServerName> serverNames = new HashSet<>(); Path logsDirPath = new Path(this.rootDir, HConstants.HREGION_LOGDIR_NAME); do { if (services.isStopped()) { LOG.warn("Master stopped while trying to get failed servers."); break; } try { if (!this.fs.exists(logsDirPath)) return serverNames; FileStatus[] logFolders = CommonFSUtils.listStatus(this.fs, logsDirPath, null); // Get online servers after getting log folders to avoid log folder deletion of newly // checked in region servers . see HBASE-5916 Set<ServerName> onlineServers = services.getServerManager().getOnlineServers().keySet(); if (logFolders == null || logFolders.length == 0) { LOG.debug("No log files to split, proceeding..."); return serverNames; } for (FileStatus status : logFolders) { FileStatus[] curLogFiles = CommonFSUtils.listStatus(this.fs, status.getPath(), null); if (curLogFiles == null || curLogFiles.length == 0) { // Empty log folder. No recovery needed continue; } final ServerName serverName = AbstractFSWALProvider.getServerNameFromWALDirectoryName( status.getPath()); if (null == serverName) { LOG.warn("Log folder " + status.getPath() + " doesn't look like its name includes a " + "region server name; leaving in place. If you see later errors about missing " + "write ahead logs they may be saved in this location."); } else if (!onlineServers.contains(serverName)) { LOG.info("Log folder " + status.getPath() + " doesn't belong " + "to a known region server, splitting"); serverNames.add(serverName); } else { LOG.info("Log folder " + status.getPath() + " belongs to an existing region server"); } } retrySplitting = false; } catch (IOException ioe) { LOG.warn("Failed getting failed servers to be recovered.", ioe); if (!checkFileSystem()) { LOG.warn("Bad Filesystem, exiting"); Runtime.getRuntime().halt(1); } try { if (retrySplitting) { Thread.sleep(conf.getInt("hbase.hlog.split.failure.retry.interval", 30 * 1000)); } } catch (InterruptedException e) { LOG.warn("Interrupted, aborting since cannot return w/o splitting"); Thread.currentThread().interrupt(); retrySplitting = false; Runtime.getRuntime().halt(1); } } } while (retrySplitting); return serverNames; } public void splitLog(final ServerName serverName) throws IOException { splitLog(Collections.<ServerName>singleton(serverName)); } /** * Specialized method to handle the splitting for meta WAL * @param serverName logs belonging to this server will be split */ public void splitMetaLog(final ServerName serverName) throws IOException { splitMetaLog(Collections.<ServerName>singleton(serverName)); } /** * Specialized method to handle the splitting for meta WAL * @param serverNames logs belonging to these servers will be split */ public void splitMetaLog(final Set<ServerName> serverNames) throws IOException { splitLog(serverNames, META_FILTER); } @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="UL_UNRELEASED_LOCK", justification= "We only release this lock when we set it. Updates to code that uses it should verify use " + "of the guard boolean.") List<Path> getLogDirs(final Set<ServerName> serverNames) throws IOException { List<Path> logDirs = new ArrayList<>(); boolean needReleaseLock = false; if (!this.services.isInitialized()) { // during master initialization, we could have multiple places splitting a same wal // XXX: Does this still exist after we move to proc-v2? this.splitLogLock.lock(); needReleaseLock = true; } try { for (ServerName serverName : serverNames) { Path logDir = new Path(this.rootDir, AbstractFSWALProvider.getWALDirectoryName(serverName.toString())); Path splitDir = logDir.suffix(AbstractFSWALProvider.SPLITTING_EXT); // Rename the directory so a rogue RS doesn't create more WALs if (fs.exists(logDir)) { if (!this.fs.rename(logDir, splitDir)) { throw new IOException("Failed fs.rename for log split: " + logDir); } logDir = splitDir; LOG.debug("Renamed region directory: " + splitDir); } else if (!fs.exists(splitDir)) { LOG.info("Log dir for server " + serverName + " does not exist"); continue; } logDirs.add(splitDir); } } catch (IOException ioe) { if (!checkFileSystem()) { this.services.abort("Aborting due to filesystem unavailable", ioe); throw ioe; } } finally { if (needReleaseLock) { this.splitLogLock.unlock(); } } return logDirs; } public void splitLog(final Set<ServerName> serverNames) throws IOException { splitLog(serverNames, NON_META_FILTER); } /** * This method is the base split method that splits WAL files matching a filter. Callers should * pass the appropriate filter for meta and non-meta WALs. * @param serverNames logs belonging to these servers will be split; this will rename the log * directory out from under a soft-failed server */ public void splitLog(final Set<ServerName> serverNames, PathFilter filter) throws IOException { long splitTime = 0, splitLogSize = 0; List<Path> logDirs = getLogDirs(serverNames); splitLogManager.handleDeadWorkers(serverNames); splitTime = EnvironmentEdgeManager.currentTime(); splitLogSize = splitLogManager.splitLogDistributed(serverNames, logDirs, filter); splitTime = EnvironmentEdgeManager.currentTime() - splitTime; if (this.metricsMasterFilesystem != null) { if (filter == META_FILTER) { this.metricsMasterFilesystem.addMetaWALSplit(splitTime, splitLogSize); } else { this.metricsMasterFilesystem.addSplit(splitTime, splitLogSize); } } } /** * The hbase:meta region may OPEN and CLOSE without issue on a server and then move elsewhere. * On CLOSE, the WAL for the hbase:meta table may not be archived yet (The WAL is only needed if * hbase:meta did not close cleanaly). Since meta region is no long on this server, * the ServerCrashProcedure won't split these leftover hbase:meta WALs, just leaving them in * the WAL splitting dir. If we try to delete the WAL splitting for the server, it fail since * the dir is not totally empty. We can safely archive these hbase:meta log; then the * WAL dir can be deleted. * @param serverName the server to archive meta log */ public void archiveMetaLog(final ServerName serverName) { try { Path logDir = new Path(this.rootDir, AbstractFSWALProvider.getWALDirectoryName(serverName.toString())); Path splitDir = logDir.suffix(AbstractFSWALProvider.SPLITTING_EXT); if (fs.exists(splitDir)) { FileStatus[] logfiles = CommonFSUtils.listStatus(fs, splitDir, META_FILTER); if (logfiles != null) { for (FileStatus status : logfiles) { if (!status.isDir()) { Path newPath = AbstractFSWAL.getWALArchivePath(this.oldLogDir, status.getPath()); if (!CommonFSUtils.renameAndSetModifyTime(fs, status.getPath(), newPath)) { LOG.warn("Unable to move " + status.getPath() + " to " + newPath); } else { LOG.debug("Archived meta log " + status.getPath() + " to " + newPath); } } } } if (!fs.delete(splitDir, false)) { LOG.warn("Unable to delete log dir. Ignoring. " + splitDir); } } } catch (IOException ie) { LOG.warn("Failed archiving meta log for server " + serverName, ie); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators.sort; import static org.junit.Assert.*; import java.util.List; import java.util.Random; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.TypeHint; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.api.java.typeutils.TupleTypeInfo; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.core.memory.MemoryType; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.operators.testutils.DummyInvokable; import org.apache.flink.util.MutableObjectIterator; import org.junit.Test; public class LargeRecordHandlerTest { @Test public void testEmptyRecordHandler() { final IOManager ioMan = new IOManagerAsync(); final int PAGE_SIZE = 4 * 1024; final int NUM_PAGES = 50; try { final MemoryManager memMan = new MemoryManager(NUM_PAGES * PAGE_SIZE, 1, PAGE_SIZE, MemoryType.HEAP, true); final AbstractInvokable owner = new DummyInvokable(); final List<MemorySegment> memory = memMan.allocatePages(owner, NUM_PAGES); final TupleTypeInfo<Tuple2<Long, String>> typeInfo = (TupleTypeInfo<Tuple2<Long, String>>) TypeInformation.of(new TypeHint<Tuple2<Long, String>>(){}); final TypeSerializer<Tuple2<Long, String>> serializer = typeInfo.createSerializer(new ExecutionConfig()); final TypeComparator<Tuple2<Long, String>> comparator = typeInfo.createComparator( new int[] {0}, new boolean[] {true}, 0, new ExecutionConfig()); LargeRecordHandler<Tuple2<Long, String>> handler = new LargeRecordHandler<Tuple2<Long, String>>( serializer, comparator, ioMan, memMan, memory, owner, 128); assertFalse(handler.hasData()); handler.close(); assertFalse(handler.hasData()); handler.close(); try { handler.addRecord(new Tuple2<Long, String>(92L, "peter pepper")); fail("should throw an exception"); } catch (IllegalStateException e) { // expected } assertTrue(memMan.verifyEmpty()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { ioMan.shutdown(); } } @Test public void testRecordHandlerSingleKey() { final IOManager ioMan = new IOManagerAsync(); final int PAGE_SIZE = 4 * 1024; final int NUM_PAGES = 24; final int NUM_RECORDS = 25000; try { final MemoryManager memMan = new MemoryManager(NUM_PAGES * PAGE_SIZE, 1, PAGE_SIZE, MemoryType.HEAP, true); final AbstractInvokable owner = new DummyInvokable(); final List<MemorySegment> initialMemory = memMan.allocatePages(owner, 6); final List<MemorySegment> sortMemory = memMan.allocatePages(owner, NUM_PAGES - 6); final TupleTypeInfo<Tuple2<Long, String>> typeInfo = (TupleTypeInfo<Tuple2<Long, String>>) TypeInformation.of(new TypeHint<Tuple2<Long, String>>(){}); final TypeSerializer<Tuple2<Long, String>> serializer = typeInfo.createSerializer(new ExecutionConfig()); final TypeComparator<Tuple2<Long, String>> comparator = typeInfo.createComparator( new int[] {0}, new boolean[] {true}, 0, new ExecutionConfig()); LargeRecordHandler<Tuple2<Long, String>> handler = new LargeRecordHandler<Tuple2<Long, String>>( serializer, comparator, ioMan, memMan, initialMemory, owner, 128); assertFalse(handler.hasData()); // add the test data Random rnd = new Random(); for (int i = 0; i < NUM_RECORDS; i++) { long val = rnd.nextLong(); handler.addRecord(new Tuple2<Long, String>(val, String.valueOf(val))); assertTrue(handler.hasData()); } MutableObjectIterator<Tuple2<Long, String>> sorted = handler.finishWriteAndSortKeys(sortMemory); try { handler.addRecord(new Tuple2<Long, String>(92L, "peter pepper")); fail("should throw an exception"); } catch (IllegalStateException e) { // expected } Tuple2<Long, String> previous = null; Tuple2<Long, String> next; while ((next = sorted.next(null)) != null) { // key and value must be equal assertTrue(next.f0.equals(Long.parseLong(next.f1))); // order must be correct if (previous != null) { assertTrue(previous.f0 <= next.f0); } previous = next; } handler.close(); assertFalse(handler.hasData()); handler.close(); try { handler.addRecord(new Tuple2<Long, String>(92L, "peter pepper")); fail("should throw an exception"); } catch (IllegalStateException e) { // expected } assertTrue(memMan.verifyEmpty()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { ioMan.shutdown(); } } @Test public void testRecordHandlerCompositeKey() { final IOManager ioMan = new IOManagerAsync(); final int PAGE_SIZE = 4 * 1024; final int NUM_PAGES = 24; final int NUM_RECORDS = 25000; try { final MemoryManager memMan = new MemoryManager(NUM_PAGES * PAGE_SIZE, 1, PAGE_SIZE, MemoryType.HEAP, true); final AbstractInvokable owner = new DummyInvokable(); final List<MemorySegment> initialMemory = memMan.allocatePages(owner, 6); final List<MemorySegment> sortMemory = memMan.allocatePages(owner, NUM_PAGES - 6); final TupleTypeInfo<Tuple3<Long, String, Byte>> typeInfo = (TupleTypeInfo<Tuple3<Long, String, Byte>>) TypeInformation.of(new TypeHint<Tuple3<Long, String, Byte>>(){}); final TypeSerializer<Tuple3<Long, String, Byte>> serializer = typeInfo.createSerializer(new ExecutionConfig()); final TypeComparator<Tuple3<Long, String, Byte>> comparator = typeInfo.createComparator( new int[] {2, 0}, new boolean[] {true, true}, 0, new ExecutionConfig()); LargeRecordHandler<Tuple3<Long, String, Byte>> handler = new LargeRecordHandler<Tuple3<Long, String, Byte>>( serializer, comparator, ioMan, memMan, initialMemory, owner, 128); assertFalse(handler.hasData()); // add the test data Random rnd = new Random(); for (int i = 0; i < NUM_RECORDS; i++) { long val = rnd.nextLong(); handler.addRecord(new Tuple3<Long, String, Byte>(val, String.valueOf(val), (byte) val)); assertTrue(handler.hasData()); } MutableObjectIterator<Tuple3<Long, String, Byte>> sorted = handler.finishWriteAndSortKeys(sortMemory); try { handler.addRecord(new Tuple3<Long, String, Byte>(92L, "peter pepper", (byte) 1)); fail("should throw an exception"); } catch (IllegalStateException e) { // expected } Tuple3<Long, String, Byte> previous = null; Tuple3<Long, String, Byte> next; while ((next = sorted.next(null)) != null) { // key and value must be equal assertTrue(next.f0.equals(Long.parseLong(next.f1))); assertTrue(next.f0.byteValue() == next.f2); // order must be correct if (previous != null) { assertTrue(previous.f2 <= next.f2); assertTrue(previous.f2.byteValue() != next.f2.byteValue() || previous.f0 <= next.f0); } previous = next; } handler.close(); assertFalse(handler.hasData()); handler.close(); try { handler.addRecord(new Tuple3<Long, String, Byte>(92L, "peter pepper", (byte) 1)); fail("should throw an exception"); } catch (IllegalStateException e) { // expected } assertTrue(memMan.verifyEmpty()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } finally { ioMan.shutdown(); } } }
/* * PowerAuth Crypto Library * Copyright 2018 Wultra s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.getlime.security.powerauth.crypto.lib.encryptor.ecies; import com.google.common.primitives.Bytes; import io.getlime.security.powerauth.crypto.lib.encryptor.ecies.exception.EciesException; import io.getlime.security.powerauth.crypto.lib.encryptor.ecies.model.EciesCryptogram; import io.getlime.security.powerauth.crypto.lib.generator.KeyGenerator; import io.getlime.security.powerauth.crypto.lib.model.exception.CryptoProviderException; import io.getlime.security.powerauth.crypto.lib.model.exception.GenericCryptoException; import io.getlime.security.powerauth.crypto.lib.util.AESEncryptionUtils; import io.getlime.security.powerauth.crypto.lib.util.HMACHashUtilities; import io.getlime.security.powerauth.crypto.lib.util.KeyConvertor; import io.getlime.security.powerauth.crypto.lib.util.SideChannelUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.crypto.SecretKey; import java.security.InvalidKeyException; import java.security.PublicKey; import java.security.interfaces.ECPublicKey; /** * Class implementing an ECIES encryptor. * * @author Petr Dvorak, petr@wultra.com * @author Roman Strobl, roman.strobl@wultra.com */ public class EciesEncryptor { private static final Logger logger = LoggerFactory.getLogger(EciesEncryptor.class); // Underlying implementation classes. private final AESEncryptionUtils aes = new AESEncryptionUtils(); private final HMACHashUtilities hmac = new HMACHashUtilities(); private final KeyConvertor keyConvertor = new KeyConvertor(); private final KeyGenerator keyGenerator = new KeyGenerator(); // Working data storage private final PublicKey publicKey; private final byte[] sharedInfo1; private final byte[] sharedInfo2; private EciesEnvelopeKey envelopeKey; // Lifecycle management private boolean canEncryptData; private boolean canDecryptData; private byte[] ivForDecryption; /** * Construct a new encryptor with null sharedInfo1 and sharedInfo2. * * @param publicKey Public key used for encryption. */ public EciesEncryptor(ECPublicKey publicKey) { this(publicKey, null, null); } /** * Construct a new encryptor with provided sharedInfo1 and sharedInfo2. * * @param publicKey Public key used for encryption. * @param sharedInfo1 Additional shared information used during key derivation. * @param sharedInfo2 Additional shared information used during decryption. */ public EciesEncryptor(ECPublicKey publicKey, byte[] sharedInfo1, byte[] sharedInfo2) { this.publicKey = publicKey; this.sharedInfo1 = sharedInfo1; this.sharedInfo2 = sharedInfo2; this.canEncryptData = true; this.canDecryptData = false; } /** * Construct an encryptor from existing ECIES envelope key and sharedInfo2 parameter. The derivation of * envelope key is skipped. The privateKey and sharedInfo1 values are unknown. The encryptor can be only * used for decrypting the response. * * @param envelopeKey ECIES envelope key. * @param sharedInfo2 Parameter sharedInfo2 for ECIES. */ public EciesEncryptor(EciesEnvelopeKey envelopeKey, byte[] sharedInfo2) { this.publicKey = null; this.envelopeKey = envelopeKey; this.sharedInfo1 = null; this.sharedInfo2 = sharedInfo2; // Allow decrypt only to avoid accidentally reusing the same encryptor for encryption, a new envelope key with // a new ephemeral keypair is always generated for encryption. this.canEncryptData = false; this.canDecryptData = true; } /** * Encrypt request data. * * @param data Request data. * @param useIv Controls whether encryption uses non-zero initialization vector for protocol V3.1+. * @return ECIES cryptogram. * @throws EciesException In case request encryption fails. */ public EciesCryptogram encryptRequest(byte[] data, boolean useIv) throws EciesException { if (data == null) { throw new EciesException("Parameter data for request encryption is null"); } if (!canEncryptRequest()) { throw new EciesException("Request encryption is not allowed"); } envelopeKey = EciesEnvelopeKey.fromPublicKey(publicKey, sharedInfo1); return encrypt(data, useIv); } /** * Decrypt response data. * * @param cryptogram ECIES cryptogram. * @return Decrypted data. * @throws EciesException In case response decryption fails. */ public byte[] decryptResponse(EciesCryptogram cryptogram) throws EciesException { if (cryptogram == null || cryptogram.getEncryptedData() == null || cryptogram.getMac() == null) { throw new EciesException("Parameter cryptogram for response decryption is invalid"); } if (!canDecryptResponse()) { throw new EciesException("Response decryption is not allowed"); } return decrypt(cryptogram); } /** * Get parameter sharedInfo2 for ECIES. * @return Parameter sharedInfo2 for ECIES. */ public byte[] getSharedInfo2() { return sharedInfo2; } /** * Get ECIES envelope key. * @return ECIES envelope key. */ public EciesEnvelopeKey getEnvelopeKey() { return envelopeKey; } /** * Get whether request data can be encrypted. * * @return Whether request data can be encrypted. */ private boolean canEncryptRequest() { return canEncryptData && publicKey != null; } /** * Get whether response data can be decrypted. * * @return Whether response data can be decrypted. */ private boolean canDecryptResponse() { return canDecryptData && envelopeKey.isValid() && ivForDecryption != null; } /** * Encrypt data using ECIES and construct ECIES cryptogram. * * @param data Data to be encrypted. * @param useIv Controls whether encryption uses non-zero initialization vector for protocol V3.1+. * @return Encrypted data as cryptogram. * @throws EciesException In case AES encryption fails. */ private EciesCryptogram encrypt(byte[] data, boolean useIv) throws EciesException { try { // Prepare nonce & IV final byte[] nonce; final byte[] iv; if (useIv) { // V3.1+, generate random nonce and calculate IV nonce = keyGenerator.generateRandomBytes(16); iv = envelopeKey.deriveIvForNonce(nonce); } else { // V2.x, V3.0, use zero IV nonce = null; iv = new byte[16]; } // Encrypt the data with byte[] encKeyBytes = envelopeKey.getEncKey(); final SecretKey encKey = keyConvertor.convertBytesToSharedSecretKey(encKeyBytes); final byte[] encryptedData = aes.encrypt(data, iv, encKey); // Compute MAC of the data final byte[] macData = (sharedInfo2 == null ? encryptedData : Bytes.concat(encryptedData, sharedInfo2)); final byte[] mac = hmac.hash(envelopeKey.getMacKey(), macData); // Invalidate this encryptor for encryption canEncryptData = false; canDecryptData = true; ivForDecryption = iv; // Return encrypted payload return new EciesCryptogram(envelopeKey.getEphemeralKeyPublic(), mac, encryptedData, nonce); } catch (InvalidKeyException | GenericCryptoException | CryptoProviderException ex) { logger.warn(ex.getMessage(), ex); throw new EciesException("Request encryption failed", ex); } } /** * Decrypt provided payload using ECIES algorithm and the same secret key as in previous encrypt call, useful for * request-response cycle. * * @param cryptogram Cryptogram to be decrypted. * @return Decrypted data. * @throws EciesException In case MAC value is invalid or AES decryption fails. */ private byte[] decrypt(EciesCryptogram cryptogram) throws EciesException { try { // Validate data MAC value final byte[] macData = (sharedInfo2 == null ? cryptogram.getEncryptedData() : Bytes.concat(cryptogram.getEncryptedData(), sharedInfo2)); final byte[] mac = hmac.hash(envelopeKey.getMacKey(), macData); if (!SideChannelUtils.constantTimeAreEqual(mac, cryptogram.getMac())) { throw new EciesException("Invalid MAC"); } // Decrypt the data with AES using zero IV final byte[] encKeyBytes = envelopeKey.getEncKey(); final SecretKey encKey = keyConvertor.convertBytesToSharedSecretKey(encKeyBytes); final byte[] iv = ivForDecryption; // Invalidate the encryptor canDecryptData = false; ivForDecryption = null; return aes.decrypt(cryptogram.getEncryptedData(), iv, encKey); } catch (InvalidKeyException | GenericCryptoException | CryptoProviderException ex) { logger.warn(ex.getMessage(), ex); throw new EciesException("Response decryption failed", ex); } } }
package org.hisp.dhis.light.utils; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.Validate; import org.hisp.dhis.dataanalysis.DataAnalysisService; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.dataset.DataSetService; import org.hisp.dhis.datavalue.DataValue; import org.hisp.dhis.datavalue.DataValueService; import org.hisp.dhis.datavalue.DeflatedDataValue; import org.hisp.dhis.expression.ExpressionService; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.period.CalendarPeriodType; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.YearlyPeriodType; import org.hisp.dhis.setting.SettingKey; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.system.filter.OrganisationUnitWithDataSetsFilter; import org.hisp.dhis.system.filter.PastAndCurrentPeriodFilter; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.hisp.dhis.user.UserCredentials; import org.hisp.dhis.commons.filter.FilterUtils; import org.hisp.dhis.validation.ValidationResult; import org.hisp.dhis.validation.ValidationRule; import org.hisp.dhis.validation.ValidationRuleService; import org.joda.time.DateTime; import com.google.common.collect.Sets; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ public class FormUtilsImpl implements FormUtils { public static final Integer DEFAULT_MAX_PERIODS = 10; // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private CurrentUserService currentUserService; public void setCurrentUserService( CurrentUserService currentUserService ) { this.currentUserService = currentUserService; } private OrganisationUnitService organisationUnitService; public void setOrganisationUnitService( OrganisationUnitService organisationUnitService ) { this.organisationUnitService = organisationUnitService; } private DataValueService dataValueService; public void setDataValueService( DataValueService dataValueService ) { this.dataValueService = dataValueService; } private DataSetService dataSetService; public void setDataSetService( DataSetService dataSetService ) { this.dataSetService = dataSetService; } private DataAnalysisService stdDevOutlierAnalysisService; public void setStdDevOutlierAnalysisService( DataAnalysisService stdDevOutlierAnalysisService ) { this.stdDevOutlierAnalysisService = stdDevOutlierAnalysisService; } private DataAnalysisService minMaxOutlierAnalysisService; public void setMinMaxOutlierAnalysisService( DataAnalysisService minMaxOutlierAnalysisService ) { this.minMaxOutlierAnalysisService = minMaxOutlierAnalysisService; } private SystemSettingManager systemSettingManager; public void setSystemSettingManager( SystemSettingManager systemSettingManager ) { this.systemSettingManager = systemSettingManager; } private ValidationRuleService validationRuleService; public void setValidationRuleService( ValidationRuleService validationRuleService ) { this.validationRuleService = validationRuleService; } private ExpressionService expressionService; public void setExpressionService( ExpressionService expressionService ) { this.expressionService = expressionService; } // ------------------------------------------------------------------------- // Utils // ------------------------------------------------------------------------- @Override @SuppressWarnings("unchecked") public Map<String, DeflatedDataValue> getValidationViolations( OrganisationUnit organisationUnit, Collection<DataElement> dataElements, Period period ) { Map<String, DeflatedDataValue> validationErrorMap = new HashMap<>(); Double factor = (Double) systemSettingManager.getSystemSetting( SettingKey.FACTOR_OF_DEVIATION ); Date from = new DateTime( period.getStartDate() ).minusYears( 2 ).toDate(); Collection<DeflatedDataValue> stdDevs = stdDevOutlierAnalysisService.analyse( Sets.newHashSet( organisationUnit ), dataElements, Sets.newHashSet( period ), factor, from ); Collection<DeflatedDataValue> minMaxs = minMaxOutlierAnalysisService.analyse( Sets.newHashSet( organisationUnit ), dataElements, Sets.newHashSet( period ), null, from ); Collection<DeflatedDataValue> deflatedDataValues = CollectionUtils.union( stdDevs, minMaxs ); for ( DeflatedDataValue deflatedDataValue : deflatedDataValues ) { String key = String.format( "DE%dOC%d", deflatedDataValue.getDataElementId(), deflatedDataValue.getCategoryOptionComboId() ); validationErrorMap.put( key, deflatedDataValue ); } return validationErrorMap; } @Override public List<String> getValidationRuleViolations( OrganisationUnit organisationUnit, DataSet dataSet, Period period ) { List<ValidationResult> validationRuleResults = new ArrayList<>( validationRuleService.validate( dataSet, period, organisationUnit, null ) ); List<String> validationRuleViolations = new ArrayList<>( validationRuleResults.size() ); for ( ValidationResult result : validationRuleResults ) { ValidationRule rule = result.getValidationRule(); StringBuilder sb = new StringBuilder(); sb.append( expressionService.getExpressionDescription( rule.getLeftSide().getExpression() ) ); sb.append( " " ).append( rule.getOperator().getMathematicalOperator() ).append( " " ); sb.append( expressionService.getExpressionDescription( rule.getRightSide().getExpression() ) ); validationRuleViolations.add( sb.toString() ); } return validationRuleViolations; } @Override public Map<String, String> getDataValueMap( OrganisationUnit organisationUnit, DataSet dataSet, Period period ) { Map<String, String> dataValueMap = new HashMap<>(); List<DataValue> values = new ArrayList<>( dataValueService.getDataValues( dataSet.getDataElements(), Sets.newHashSet( period ), Sets.newHashSet( organisationUnit ) ) ); for ( DataValue dataValue : values ) { DataElement dataElement = dataValue.getDataElement(); DataElementCategoryOptionCombo optionCombo = dataValue.getCategoryOptionCombo(); String key = String.format( "DE%dOC%d", dataElement.getId(), optionCombo.getId() ); String value = dataValue.getValue(); dataValueMap.put( key, value ); } return dataValueMap; } @Override public List<OrganisationUnit> organisationUnitWithDataSetsFilter( Collection<OrganisationUnit> organisationUnits ) { List<OrganisationUnit> ous = new ArrayList<>( organisationUnits ); FilterUtils.filter( ous, new OrganisationUnitWithDataSetsFilter() ); return ous; } @Override public List<OrganisationUnit> getSortedOrganisationUnitsForCurrentUser() { User user = currentUserService.getCurrentUser(); Validate.notNull( user ); List<OrganisationUnit> organisationUnits = new ArrayList<>( user.getOrganisationUnits() ); Collections.sort( organisationUnits ); return organisationUnitWithDataSetsFilter( organisationUnits ); } @Override public List<DataSet> getDataSetsForCurrentUser( Integer organisationUnitId ) { Validate.notNull( organisationUnitId ); OrganisationUnit organisationUnit = organisationUnitService.getOrganisationUnit( organisationUnitId ); List<DataSet> dataSets = new ArrayList<>( organisationUnit.getDataSets() ); UserCredentials userCredentials = currentUserService.getCurrentUser().getUserCredentials(); if ( !userCredentials.isSuper() ) { dataSets.retainAll( userCredentials.getAllDataSets() ); } return dataSets; } @Override public List<Period> getPeriodsForDataSet( Integer dataSetId ) { return getPeriodsForDataSet( dataSetId, 0, DEFAULT_MAX_PERIODS ); } @Override public List<Period> getPeriodsForDataSet( Integer dataSetId, int first, int max ) { Validate.notNull( dataSetId ); DataSet dataSet = dataSetService.getDataSet( dataSetId ); CalendarPeriodType periodType; if ( dataSet.getPeriodType().getName().equalsIgnoreCase( "Yearly" ) ) { periodType = new YearlyPeriodType(); } else { periodType = (CalendarPeriodType) dataSet.getPeriodType(); } //TODO implement properly if ( dataSet.getOpenFuturePeriods() > 0 ) { List<Period> periods = periodType.generatePeriods( new Date() ); Collections.reverse( periods ); return periods; } else { List<Period> periods = periodType.generateLast5Years( new Date() ); FilterUtils.filter( periods, new PastAndCurrentPeriodFilter() ); Collections.reverse( periods ); if ( periods.size() > (first + max) ) { periods = periods.subList( first, max ); } return periods; } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.connect.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/connect-2017-08-08/SuspendContactRecording" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SuspendContactRecordingRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> */ private String instanceId; /** * <p> * The identifier of the contact. * </p> */ private String contactId; /** * <p> * The identifier of the contact. This is the identifier of the contact associated with the first interaction with * the contact center. * </p> */ private String initialContactId; /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> * * @param instanceId * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> * * @return The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. */ public String getInstanceId() { return this.instanceId; } /** * <p> * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * </p> * * @param instanceId * The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. * @return Returns a reference to this object so that method calls can be chained together. */ public SuspendContactRecordingRequest withInstanceId(String instanceId) { setInstanceId(instanceId); return this; } /** * <p> * The identifier of the contact. * </p> * * @param contactId * The identifier of the contact. */ public void setContactId(String contactId) { this.contactId = contactId; } /** * <p> * The identifier of the contact. * </p> * * @return The identifier of the contact. */ public String getContactId() { return this.contactId; } /** * <p> * The identifier of the contact. * </p> * * @param contactId * The identifier of the contact. * @return Returns a reference to this object so that method calls can be chained together. */ public SuspendContactRecordingRequest withContactId(String contactId) { setContactId(contactId); return this; } /** * <p> * The identifier of the contact. This is the identifier of the contact associated with the first interaction with * the contact center. * </p> * * @param initialContactId * The identifier of the contact. This is the identifier of the contact associated with the first interaction * with the contact center. */ public void setInitialContactId(String initialContactId) { this.initialContactId = initialContactId; } /** * <p> * The identifier of the contact. This is the identifier of the contact associated with the first interaction with * the contact center. * </p> * * @return The identifier of the contact. This is the identifier of the contact associated with the first * interaction with the contact center. */ public String getInitialContactId() { return this.initialContactId; } /** * <p> * The identifier of the contact. This is the identifier of the contact associated with the first interaction with * the contact center. * </p> * * @param initialContactId * The identifier of the contact. This is the identifier of the contact associated with the first interaction * with the contact center. * @return Returns a reference to this object so that method calls can be chained together. */ public SuspendContactRecordingRequest withInitialContactId(String initialContactId) { setInitialContactId(initialContactId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstanceId() != null) sb.append("InstanceId: ").append(getInstanceId()).append(","); if (getContactId() != null) sb.append("ContactId: ").append(getContactId()).append(","); if (getInitialContactId() != null) sb.append("InitialContactId: ").append(getInitialContactId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SuspendContactRecordingRequest == false) return false; SuspendContactRecordingRequest other = (SuspendContactRecordingRequest) obj; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; if (other.getContactId() == null ^ this.getContactId() == null) return false; if (other.getContactId() != null && other.getContactId().equals(this.getContactId()) == false) return false; if (other.getInitialContactId() == null ^ this.getInitialContactId() == null) return false; if (other.getInitialContactId() != null && other.getInitialContactId().equals(this.getInitialContactId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); hashCode = prime * hashCode + ((getContactId() == null) ? 0 : getContactId().hashCode()); hashCode = prime * hashCode + ((getInitialContactId() == null) ? 0 : getInitialContactId().hashCode()); return hashCode; } @Override public SuspendContactRecordingRequest clone() { return (SuspendContactRecordingRequest) super.clone(); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.client.admin.cli.commands; import com.fasterxml.jackson.databind.node.ObjectNode; import org.jboss.aesh.cl.CommandDefinition; import org.jboss.aesh.cl.Option; import org.jboss.aesh.console.command.CommandException; import org.jboss.aesh.console.command.CommandResult; import org.jboss.aesh.console.command.invocation.CommandInvocation; import org.keycloak.client.admin.cli.config.ConfigData; import org.keycloak.client.admin.cli.operations.ClientOperations; import org.keycloak.client.admin.cli.operations.GroupOperations; import org.keycloak.client.admin.cli.operations.RoleOperations; import org.keycloak.client.admin.cli.operations.LocalSearch; import org.keycloak.client.admin.cli.operations.UserOperations; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; import static org.keycloak.client.admin.cli.util.AuthUtil.ensureToken; import static org.keycloak.client.admin.cli.util.ConfigUtil.DEFAULT_CONFIG_FILE_STRING; import static org.keycloak.client.admin.cli.util.ConfigUtil.credentialsAvailable; import static org.keycloak.client.admin.cli.util.ConfigUtil.loadConfig; import static org.keycloak.client.admin.cli.util.OsUtil.CMD; import static org.keycloak.client.admin.cli.util.OsUtil.EOL; import static org.keycloak.client.admin.cli.util.OsUtil.PROMPT; /** * @author <a href="mailto:mstrukel@redhat.com">Marko Strukelj</a> */ @CommandDefinition(name = "add-roles", description = "[ARGUMENTS]") public class AddRolesCmd extends AbstractAuthOptionsCmd { @Option(name = "uusername", description = "Target user's 'username'") String uusername; @Option(name = "uid", description = "Target user's 'id'") String uid; @Option(name = "gname", description = "Target group's 'name'") String gname; @Option(name = "gpath", description = "Target group's 'path'") String gpath; @Option(name = "gid", description = "Target group's 'id'") String gid; @Option(name = "rname", description = "Composite role's 'name'") String rname; @Option(name = "rid", description = "Composite role's 'id'") String rid; @Option(name = "cclientid", description = "Target client's 'clientId'") String cclientid; @Option(name = "cid", description = "Target client's 'id'") String cid; @Override public CommandResult execute(CommandInvocation commandInvocation) throws CommandException, InterruptedException { List<String> roleNames = new LinkedList<>(); List<String> roleIds = new LinkedList<>(); try { if (printHelp()) { return help ? CommandResult.SUCCESS : CommandResult.FAILURE; } processGlobalOptions(); Iterator<String> it = args.iterator(); while (it.hasNext()) { String option = it.next(); switch (option) { case "--rolename": { optionRequiresValueCheck(it, option); roleNames.add(it.next()); break; } case "--roleid": { optionRequiresValueCheck(it, option); roleIds.add(it.next()); break; } default: { throw new IllegalArgumentException("Invalid option: " + option); } } } if (uid != null && uusername != null) { throw new IllegalArgumentException("Incompatible options: --uid and --uusername are mutually exclusive"); } if ((gid != null && gname != null) || (gid != null && gpath != null) || (gname != null && gpath != null)) { throw new IllegalArgumentException("Incompatible options: --gid, --gname and --gpath are mutually exclusive"); } if (roleNames.isEmpty() && roleIds.isEmpty()) { throw new IllegalArgumentException("No role to add specified. Use --rolename or --roleid to specify roles to add"); } if (cid != null && cclientid != null) { throw new IllegalArgumentException("Incompatible options: --cid and --cclientid are mutually exclusive"); } if (rid != null && rname != null) { throw new IllegalArgumentException("Incompatible options: --rid and --rname are mutually exclusive"); } if (isUserSpecified() && isGroupSpecified()) { throw new IllegalArgumentException("Incompatible options: --uusername / --uid can't be used at the same time as --gname / --gid / --gpath"); } if (isUserSpecified() && isCompositeRoleSpecified()) { throw new IllegalArgumentException("Incompatible options: --uusername / --uid can't be used at the same time as --rname / --rid"); } if (isGroupSpecified() && isCompositeRoleSpecified()) { throw new IllegalArgumentException("Incompatible options: --rname / --rid can't be used at the same time as --gname / --gid / --gpath"); } if (!isUserSpecified() && !isGroupSpecified() && !isCompositeRoleSpecified()) { throw new IllegalArgumentException("No user nor group nor composite role specified. Use --uusername / --uid to specify user or --gname / --gid / --gpath to specify group or --rname / --rid to specify a composite role"); } ConfigData config = loadConfig(); config = copyWithServerInfo(config); setupTruststore(config, commandInvocation); String auth = null; config = ensureAuthInfo(config, commandInvocation); config = copyWithServerInfo(config); if (credentialsAvailable(config)) { auth = ensureToken(config); } auth = auth != null ? "Bearer " + auth : null; final String server = config.getServerUrl(); final String realm = getTargetRealm(config); final String adminRoot = adminRestRoot != null ? adminRestRoot : composeAdminRoot(server); if (isUserSpecified()) { if (uid == null) { uid = UserOperations.getIdFromUsername(adminRoot, realm, auth, uusername); } if (isClientSpecified()) { // list client roles for a user if (cid == null) { cid = ClientOperations.getIdFromClientId(adminRoot, realm, auth, cclientid); } List<ObjectNode> roles = RoleOperations.getClientRoles(adminRoot, realm, cid, auth); Set<ObjectNode> rolesToAdd = getRoleRepresentations(roleNames, roleIds, new LocalSearch(roles)); // now add all the roles UserOperations.addClientRoles(adminRoot, realm, auth, uid, cid, new ArrayList<>(rolesToAdd)); } else { Set<ObjectNode> rolesToAdd = getRoleRepresentations(roleNames, roleIds, new LocalSearch(RoleOperations.getRealmRolesAsNodes(adminRoot, realm, auth))); // now add all the roles UserOperations.addRealmRoles(adminRoot, realm, auth, uid, new ArrayList<>(rolesToAdd)); } } else if (isGroupSpecified()) { if (gname != null) { gid = GroupOperations.getIdFromName(adminRoot, realm, auth, gname); } else if (gpath != null) { gid = GroupOperations.getIdFromPath(adminRoot, realm, auth, gpath); } if (isClientSpecified()) { // list client roles for a group if (cid == null) { cid = ClientOperations.getIdFromClientId(adminRoot, realm, auth, cclientid); } List<ObjectNode> roles = RoleOperations.getClientRoles(adminRoot, realm, cid, auth); Set<ObjectNode> rolesToAdd = getRoleRepresentations(roleNames, roleIds, new LocalSearch(roles)); // now add all the roles GroupOperations.addClientRoles(adminRoot, realm, auth, gid, cid, new ArrayList<>(rolesToAdd)); } else { Set<ObjectNode> rolesToAdd = getRoleRepresentations(roleNames, roleIds, new LocalSearch(RoleOperations.getRealmRolesAsNodes(adminRoot, realm, auth))); // now add all the roles GroupOperations.addRealmRoles(adminRoot, realm, auth, gid, new ArrayList<>(rolesToAdd)); } } else if (isCompositeRoleSpecified()) { if (rid == null) { rid = RoleOperations.getIdFromRoleName(adminRoot, realm, auth, rname); } if (isClientSpecified()) { // list client roles for a composite role if (cid == null) { cid = ClientOperations.getIdFromClientId(adminRoot, realm, auth, cclientid); } List<ObjectNode> roles = RoleOperations.getClientRoles(adminRoot, realm, cid, auth); Set<ObjectNode> rolesToAdd = getRoleRepresentations(roleNames, roleIds, new LocalSearch(roles)); // now add all the roles RoleOperations.addClientRoles(adminRoot, realm, auth, rid, new ArrayList<>(rolesToAdd)); } else { Set<ObjectNode> rolesToAdd = getRoleRepresentations(roleNames, roleIds, new LocalSearch(RoleOperations.getRealmRolesAsNodes(adminRoot, realm, auth))); // now add all the roles RoleOperations.addRealmRoles(adminRoot, realm, auth, rid, new ArrayList<>(rolesToAdd)); } } else { throw new IllegalArgumentException("No user nor group, nor composite role specified. Use --uusername / --uid to specify user or --gname / --gid / --gpath to specify group or --rname / --rid to specify a composite role"); } return CommandResult.SUCCESS; } catch (IllegalArgumentException e) { throw new IllegalArgumentException(e.getMessage() + suggestHelp(), e); } finally { commandInvocation.stop(); } } private Set<ObjectNode> getRoleRepresentations(List<String> roleNames, List<String> roleIds, LocalSearch roleSearch) { Set<ObjectNode> rolesToAdd = new HashSet<>(); // now we process roles for (String name : roleNames) { ObjectNode r = roleSearch.exactMatchOne(name, "name"); if (r == null) { throw new RuntimeException("Role not found for name: " + name); } rolesToAdd.add(r); } for (String id : roleIds) { ObjectNode r = roleSearch.exactMatchOne(id, "id"); if (r == null) { throw new RuntimeException("Role not found for id: " + id); } rolesToAdd.add(r); } return rolesToAdd; } private void optionRequiresValueCheck(Iterator<String> it, String option) { if (!it.hasNext()) { throw new IllegalArgumentException("Option " + option + " requires a value"); } } private boolean isClientSpecified() { return cid != null || cclientid != null; } private boolean isGroupSpecified() { return gid != null || gname != null || gpath != null; } private boolean isUserSpecified() { return uid != null || uusername != null; } private boolean isCompositeRoleSpecified() { return rid != null || rname != null; } @Override protected boolean nothingToDo() { return noOptions() && uusername == null && uid == null && cclientid == null && (args == null || args.size() == 0); } protected String suggestHelp() { return EOL + "Try '" + CMD + " help add-roles' for more information"; } protected String help() { return usage(); } public static String usage() { StringWriter sb = new StringWriter(); PrintWriter out = new PrintWriter(sb); out.println("Usage: " + CMD + " add-roles (--uusername USERNAME | --uid ID) [--cclientid CLIENT_ID | --cid ID] (--rolename NAME | --roleid ID)+ [ARGUMENTS]"); out.println(" " + CMD + " add-roles (--gname NAME | --gpath PATH | --gid ID) [--cclientid CLIENT_ID | --cid ID] (--rolename NAME | --roleid ID)+ [ARGUMENTS]"); out.println(" " + CMD + " add-roles (--rname ROLE_NAME | --rid ROLE_ID) [--cclientid CLIENT_ID | --cid ID] (--rolename NAME | --roleid ID)+ [ARGUMENTS]"); out.println(); out.println("Command to add realm or client roles to a user, a group or a composite role."); out.println(); out.println("Use `" + CMD + " config credentials` to establish an authenticated session, or use CREDENTIALS OPTIONS"); out.println("to perform one time authentication."); out.println(); out.println("If client is specified using --cclientid or --cid then roles to add are client roles, otherwise they are realm roles."); out.println("Either a user, or a group needs to be specified. If user is specified using --uusername or --uid then roles are added"); out.println("to a specific user. If group is specified using --gname, --gpath or --gid then roles are added to a specific group."); out.println("If composite role is specified using --rname or --rid then roles are added to a specific composite role."); out.println("One or more roles have to be specified using --rolename or --roleid so that they are added to a group, a user or a composite role."); out.println(); out.println("Arguments:"); out.println(); out.println(" Global options:"); out.println(" -x Print full stack trace when exiting with error"); out.println(" --config Path to the config file (" + DEFAULT_CONFIG_FILE_STRING + " by default)"); out.println(" --truststore PATH Path to a truststore containing trusted certificates"); out.println(" --trustpass PASSWORD Truststore password (prompted for if not specified and --truststore is used)"); out.println(" CREDENTIALS OPTIONS Same set of options as accepted by '" + CMD + " config credentials' in order to establish"); out.println(" an authenticated sessions. This allows on-the-fly transient authentication that does"); out.println(" not touch a config file."); out.println(); out.println(" Command specific options:"); out.println(" --uusername User's 'username'. If more than one user exists with the same username"); out.println(" you'll have to use --uid to specify the target user"); out.println(" --uid User's 'id' attribute"); out.println(" --gname Group's 'name'. If more than one group exists with the same name you'll have"); out.println(" to use --gid, or --gpath to specify the target group"); out.println(" --gpath Group's 'path' attribute"); out.println(" --gid Group's 'id' attribute"); out.println(" --rname Composite role's 'name' attribute"); out.println(" --rid Composite role's 'id' attribute"); out.println(" --cclientid Client's 'clientId' attribute"); out.println(" --cid Client's 'id' attribute"); out.println(" --rolename Role's 'name' attribute"); out.println(" --roleid Role's 'id' attribute"); out.println(" -a, --admin-root URL URL of Admin REST endpoint root if not default - e.g. http://localhost:8080/auth/admin"); out.println(" -r, --target-realm REALM Target realm to issue requests against if not the one authenticated against"); out.println(); out.println("Examples:"); out.println(); out.println("Add 'offline_access' realm role to a user:"); out.println(" " + PROMPT + " " + CMD + " add-roles -r demorealm --uusername testuser --rolename offline_access"); out.println(); out.println("Add 'realm-management' client roles 'view-users', 'view-clients' and 'view-realm' to a user:"); out.println(" " + PROMPT + " " + CMD + " add-roles -r demorealm --uusername testuser --cclientid realm-management --rolename view-users --rolename view-clients --rolename view-realm"); out.println(); out.println("Add 'uma_authorization' realm role to a group:"); out.println(" " + PROMPT + " " + CMD + " add-roles -r demorealm --gname PowerUsers --rolename uma_authorization"); out.println(); out.println("Add 'realm-management' client roles 'realm-admin' to a group:"); out.println(" " + PROMPT + " " + CMD + " add-roles -r demorealm --gname PowerUsers --cclientid realm-management --rolename realm-admin"); out.println(); out.println(); out.println("Use '" + CMD + " help' for general information and a list of commands"); return sb.toString(); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.components; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.registry.Registry; import com.intellij.ui.components.JBScrollPane.Alignment; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.MouseEventAdapter; import com.intellij.util.ui.RegionPainter; import com.intellij.util.ui.UIUtil; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.plaf.ScrollBarUI; import java.awt.*; import java.awt.event.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import static java.awt.Adjustable.VERTICAL; /** * @author Sergey.Malenkov */ class DefaultScrollBarUI extends ScrollBarUI { static final Key<RegionPainter<Object>> LEADING_AREA = Key.create("PLAIN_SCROLL_BAR_UI_LEADING_AREA");//TODO:support private final Listener myListener = new Listener(); private final Timer myScrollTimer = UIUtil.createNamedTimer("ScrollBarThumbScrollTimer", 60, myListener); final TwoWayAnimator myTrackAnimator = new TwoWayAnimator("ScrollBarTrack", 11, 150, 125, 300, 125) { @Override void onValueUpdate() { repaint(); } }; final TwoWayAnimator myThumbAnimator = new TwoWayAnimator("ScrollBarThumb", 11, 150, 125, 300, 125) { @Override void onValueUpdate() { repaint(); } }; private final Rectangle myThumbBounds = new Rectangle(); private final Rectangle myTrackBounds = new Rectangle(); private final Rectangle myLeadingBounds = new Rectangle(); private final int myThickness; private final int myThicknessMax; private final int myThicknessMin; JScrollBar myScrollBar; private boolean isValueCached; private int myCachedValue; DefaultScrollBarUI() { this(13, 14, 10); } DefaultScrollBarUI(int thickness, int thicknessMax, int thicknessMin) { myThickness = thickness; myThicknessMax = thicknessMax; myThicknessMin = thicknessMin; } int getThickness() { return scale(myScrollBar == null || myScrollBar.isOpaque() ? myThickness : myThicknessMax); } int getMinimalThickness() { return scale(myThicknessMin); } boolean isAbsolutePositioning(MouseEvent event) { return SwingUtilities.isMiddleMouseButton(event); } boolean isBorderNeeded(JComponent c) { return c.isOpaque() && Registry.is("ide.scroll.track.border.paint"); } boolean isTrackClickable() { return myScrollBar.isOpaque() || myTrackAnimator.myValue > 0; } boolean isTrackExpandable() { return Registry.is("ide.scroll.bar.expand.animation"); } boolean isTrackContains(int x, int y) { return myTrackBounds.contains(x, y); } boolean isThumbContains(int x, int y) { return myThumbBounds.contains(x, y); } void onTrackHover(boolean hover) { myTrackAnimator.start(hover); } void onThumbHover(boolean hover) { myThumbAnimator.start(hover); } void paintTrack(Graphics2D g, int x, int y, int width, int height, JComponent c) { RegionPainter<Float> p = ScrollColorProducer.isDark(c) ? JBScrollPane.TRACK_DARK_PAINTER : JBScrollPane.TRACK_PAINTER; if (!isTrackExpandable() && Registry.is("ide.scroll.background.wide")) { p.paint(g, x, y, width, height, myTrackAnimator.myValue); return; // temporary registry key for designer } paint(p, g, x, y, width, height, c, myTrackAnimator.myValue, false); } void paintThumb(Graphics2D g, int x, int y, int width, int height, JComponent c) { RegionPainter<Float> p = ScrollColorProducer.isDark(c) ? JBScrollPane.THUMB_DARK_PAINTER : JBScrollPane.THUMB_PAINTER; paint(p, g, x, y, width, height, c, myThumbAnimator.myValue, Registry.is("ide.scroll.thumb.small.if.opaque")); } void onThumbMove() { } void paint(RegionPainter<Float> p, Graphics2D g, int x, int y, int width, int height, JComponent c, float value, boolean small) { if (!c.isOpaque()) { Alignment alignment = Alignment.get(c); if (alignment == Alignment.LEFT || alignment == Alignment.RIGHT) { int offset = getTrackOffset(width - getMinimalThickness()); if (offset > 0) { width -= offset; if (alignment == Alignment.RIGHT) x += offset; } } else { int offset = getTrackOffset(height - getMinimalThickness()); if (offset > 0) { height -= offset; if (alignment == Alignment.BOTTOM) y += offset; } } } else if (small) { x += 1; y += 1; width -= 2; height -= 2; } p.paint(g, x, y, width, height, value); } private int getTrackOffset(int offset) { if (!isTrackExpandable()) return offset; float value = myTrackAnimator.myValue; if (value <= 0) return offset; if (value >= 1) return 0; return (int)(.5f + offset * (1 - value)); } void repaint() { if (myScrollBar != null) myScrollBar.repaint(); } void repaint(int x, int y, int width, int height) { if (myScrollBar != null) myScrollBar.repaint(x, y, width, height); } private int scale(int value) { value = JBUI.scale(value); //noinspection EnumSwitchStatementWhichMissesCases switch (UIUtil.getComponentStyle(myScrollBar)) { case LARGE: return (int)(value * 1.15); case SMALL: return (int)(value * 0.857); case MINI: return (int)(value * 0.714); } return value; } @Override public void installUI(JComponent c) { myScrollBar = (JScrollBar)c; ScrollColorProducer.setBackground(c); ScrollColorProducer.setForeground(c); myScrollBar.setFocusable(false); myScrollBar.addMouseListener(myListener); myScrollBar.addMouseMotionListener(myListener); myScrollBar.getModel().addChangeListener(myListener); myScrollBar.addPropertyChangeListener(myListener); myScrollBar.addFocusListener(myListener); myScrollTimer.setInitialDelay(300); } @Override public void uninstallUI(JComponent c) { myScrollTimer.stop(); myTrackAnimator.stop(); myThumbAnimator.stop(); myScrollBar.removeFocusListener(myListener); myScrollBar.removePropertyChangeListener(myListener); myScrollBar.getModel().removeChangeListener(myListener); myScrollBar.removeMouseMotionListener(myListener); myScrollBar.removeMouseListener(myListener); myScrollBar.setForeground(null); myScrollBar.setBackground(null); myScrollBar = null; } @Override public Dimension getPreferredSize(JComponent c) { int thickness = getThickness(); Alignment alignment = Alignment.get(c); return alignment == Alignment.LEFT || alignment == Alignment.RIGHT ? new Dimension(thickness, thickness * 2) : new Dimension(thickness * 2, thickness); } @Override public void paint(Graphics g, JComponent c) { Alignment alignment = Alignment.get(c); if (alignment != null && g instanceof Graphics2D) { Container parent = c.getParent(); Color background = !c.isOpaque() ? null : c.getBackground(); if (background != null) { g.setColor(background); g.fillRect(0, 0, c.getWidth(), c.getHeight()); } Rectangle bounds = new Rectangle(c.getWidth(), c.getHeight()); JBInsets.removeFrom(bounds, c.getInsets()); if (parent instanceof JScrollPane) { Color foreground = c.getForeground(); if (foreground != null && !foreground.equals(background) && isBorderNeeded(c)) { g.setColor(foreground); switch (alignment) { case TOP: bounds.height--; g.drawLine(bounds.x, bounds.y + bounds.height, bounds.x + bounds.width, bounds.y + bounds.height); break; case LEFT: bounds.width--; g.drawLine(bounds.x + bounds.width, bounds.y, bounds.x + bounds.width, bounds.y + bounds.height); break; case RIGHT: g.drawLine(bounds.x, bounds.y, bounds.x, bounds.y + bounds.height); bounds.width--; bounds.x++; break; case BOTTOM: g.drawLine(bounds.x, bounds.y, bounds.x + bounds.width, bounds.y); bounds.height--; bounds.y++; break; } } } if (!c.isOpaque() && myTrackAnimator.myValue > 0) { paintTrack((Graphics2D)g, bounds.x, bounds.y, bounds.width, bounds.height, c); } // process a square area before the track RegionPainter<Object> leading = UIUtil.getClientProperty(c, LEADING_AREA); if (leading == null) { myLeadingBounds.setSize(0, 0); } else if (alignment == Alignment.LEFT || alignment == Alignment.RIGHT) { int size = bounds.width; myLeadingBounds.setBounds(bounds.x, bounds.y, size, size); leading.paint((Graphics2D)g, bounds.x, bounds.y, size, size, null); bounds.height -= size; bounds.y += size; } else { int size = bounds.height; myLeadingBounds.setBounds(bounds.x, bounds.y, size, size); leading.paint((Graphics2D)g, bounds.x, bounds.y, size, size, null); bounds.width -= size; bounds.x += size; } myTrackBounds.setBounds(bounds); updateThumbBounds(); // process additional drawing on the track RegionPainter<Object> track = UIUtil.getClientProperty(c, JBScrollBar.TRACK); if (track != null && myTrackBounds.width > 0 && myTrackBounds.height > 0) { track.paint((Graphics2D)g, myTrackBounds.x, myTrackBounds.y, myTrackBounds.width, myTrackBounds.height, null); } // process drawing the thumb if (myThumbBounds.width > 0 && myThumbBounds.height > 0) { paintThumb((Graphics2D)g, myThumbBounds.x, myThumbBounds.y, myThumbBounds.width, myThumbBounds.height, c); } } } private void updateThumbBounds() { int min = myScrollBar.getMinimum(); int max = myScrollBar.getMaximum(); int range = max - min; if (range <= 0) { myThumbBounds.setBounds(0, 0, 0, 0); } else if (VERTICAL == myScrollBar.getOrientation()) { int extent = myScrollBar.getVisibleAmount(); int height = Math.max(myTrackBounds.height * extent / range, 2 * getThickness()); if (myTrackBounds.height <= height) { myThumbBounds.setBounds(0, 0, 0, 0); } else { int value = getValue(); int maxY = myTrackBounds.y + myTrackBounds.height - height; int y = (value < max - extent) ? (myTrackBounds.height - height) * (value - min) / (range - extent) : maxY; y = adjust(y, myTrackBounds.y, maxY); boolean moved = myThumbBounds.y != y || myThumbBounds.height != height; myThumbBounds.setBounds(myTrackBounds.x, y, myTrackBounds.width, height); if (moved) onThumbMove(); } } else { int extent = myScrollBar.getVisibleAmount(); int width = Math.max(myTrackBounds.width * extent / range, 2 * getThickness()); if (myTrackBounds.width <= width) { myThumbBounds.setBounds(0, 0, 0, 0); } else { int value = getValue(); int maxX = myTrackBounds.x + myTrackBounds.width - width; int x = (value < max - extent) ? (myTrackBounds.width - width) * (value - min) / (range - extent) : maxX; if (!myScrollBar.getComponentOrientation().isLeftToRight()) x = myTrackBounds.x - x + maxX; x = adjust(x, myTrackBounds.x, maxX); boolean moved = myThumbBounds.x != x || myThumbBounds.width != width; myThumbBounds.setBounds(x, myTrackBounds.y, width, myTrackBounds.height); if (moved) onThumbMove(); } } } private int getValue() { return isValueCached ? myCachedValue : myScrollBar.getValue(); } private static int adjust(int value, int min, int max) { return value < min ? min : value > max ? max : value; } private final class Listener extends MouseAdapter implements ActionListener, FocusListener, ChangeListener, PropertyChangeListener { private int myOffset; private int myMouseX, myMouseY; private boolean isReversed; private boolean isDragging; private boolean isOverTrack; private boolean isOverThumb; private void updateMouse(int x, int y) { if (isTrackContains(x, y)) { if (!isOverTrack) onTrackHover(isOverTrack = true); boolean hover = isThumbContains(x, y); if (isOverThumb != hover) onThumbHover(isOverThumb = hover); } else { updateMouseExit(); } } private void updateMouseExit() { if (isOverThumb) onThumbHover(isOverThumb = false); if (isOverTrack) onTrackHover(isOverTrack = false); } private boolean redispatchIfTrackNotClickable(MouseEvent event) { if (isTrackClickable()) return false; // redispatch current event to the view Container parent = myScrollBar.getParent(); if (parent instanceof JScrollPane) { JScrollPane pane = (JScrollPane)parent; Component view = pane.getViewport().getView(); if (view != null) view.dispatchEvent(MouseEventAdapter.convert(event, view)); } return true; } @Override public void mousePressed(MouseEvent event) { if (myScrollBar == null || !myScrollBar.isEnabled()) return; if (redispatchIfTrackNotClickable(event)) return; if (SwingUtilities.isRightMouseButton(event)) return; isValueCached = true; myCachedValue = myScrollBar.getValue(); myScrollBar.setValueIsAdjusting(true); myMouseX = event.getX(); myMouseY = event.getY(); boolean vertical = VERTICAL == myScrollBar.getOrientation(); if (isThumbContains(myMouseX, myMouseY)) { // pressed on the thumb myOffset = vertical ? (myMouseY - myThumbBounds.y) : (myMouseX - myThumbBounds.x); isDragging = true; } else if (isTrackContains(myMouseX, myMouseY)) { // pressed on the track if (isAbsolutePositioning(event)) { myOffset = (vertical ? myThumbBounds.height : myThumbBounds.width) / 2; isDragging = true; setValueFrom(event); } else { myScrollTimer.stop(); isDragging = false; if (VERTICAL == myScrollBar.getOrientation()) { int y = myThumbBounds.isEmpty() ? myScrollBar.getHeight() / 2 : myThumbBounds.y; isReversed = myMouseY < y; } else { int x = myThumbBounds.isEmpty() ? myScrollBar.getWidth() / 2 : myThumbBounds.x; isReversed = myMouseX < x; if (!myScrollBar.getComponentOrientation().isLeftToRight()) { isReversed = !isReversed; } } scroll(isReversed); startScrollTimerIfNecessary(); } } } @Override public void mouseReleased(MouseEvent event) { if (isDragging) updateMouse(event.getX(), event.getY()); if (myScrollBar == null || !myScrollBar.isEnabled()) return; if (redispatchIfTrackNotClickable(event)) return; if (SwingUtilities.isRightMouseButton(event)) return; isDragging = false; myOffset = 0; myScrollTimer.stop(); isValueCached = true; myCachedValue = myScrollBar.getValue(); myScrollBar.setValueIsAdjusting(false); repaint(); } @Override public void mouseDragged(MouseEvent event) { if (myScrollBar == null || !myScrollBar.isEnabled()) return; if (myThumbBounds.isEmpty() || SwingUtilities.isRightMouseButton(event)) return; if (isDragging) { setValueFrom(event); } else { myMouseX = event.getX(); myMouseY = event.getY(); updateMouse(myMouseX, myMouseY); startScrollTimerIfNecessary(); } } @Override public void mouseMoved(MouseEvent event) { if (myScrollBar == null || !myScrollBar.isEnabled()) return; if (!isDragging) updateMouse(event.getX(), event.getY()); redispatchIfTrackNotClickable(event); } @Override public void mouseExited(MouseEvent event) { if (myScrollBar == null || !myScrollBar.isEnabled()) return; if (!isDragging) updateMouseExit(); } @Override public void actionPerformed(ActionEvent event) { if (myScrollBar == null) { myScrollTimer.stop(); } else { scroll(isReversed); if (!myThumbBounds.isEmpty()) { if (isReversed ? !isMouseBeforeThumb() : !isMouseAfterThumb()) { myScrollTimer.stop(); } } int value = myScrollBar.getValue(); if (isReversed ? value <= myScrollBar.getMinimum() : value >= myScrollBar.getMaximum() - myScrollBar.getVisibleAmount()) { myScrollTimer.stop(); } } } @Override public void focusGained(FocusEvent event) { repaint(); } @Override public void focusLost(FocusEvent event) { repaint(); } @Override public void stateChanged(ChangeEvent event) { updateThumbBounds(); // TODO: update mouse isValueCached = false; repaint(); } @Override public void propertyChange(PropertyChangeEvent event) { String name = event.getPropertyName(); if ("model" == name) { BoundedRangeModel oldModel = (BoundedRangeModel)event.getOldValue(); BoundedRangeModel newModel = (BoundedRangeModel)event.getNewValue(); oldModel.removeChangeListener(this); newModel.addChangeListener(this); } if ("model" == name || "orientation" == name || "componentOrientation" == name) { repaint(); } if ("opaque" == name || "visible" == name) { myTrackAnimator.rewind(false); myThumbAnimator.rewind(false); myTrackBounds.setBounds(0, 0, 0, 0); myThumbBounds.setBounds(0, 0, 0, 0); } } private void setValueFrom(MouseEvent event) { int x = event.getX(); int y = event.getY(); int thumbMin, thumbMax, thumbPos; if (VERTICAL == myScrollBar.getOrientation()) { thumbMin = myTrackBounds.y; thumbMax = myTrackBounds.y + myTrackBounds.height - myThumbBounds.height; thumbPos = Math.min(thumbMax, Math.max(thumbMin, (y - myOffset))); if (myThumbBounds.y != thumbPos) { int minY = Math.min(myThumbBounds.y, thumbPos); int maxY = Math.max(myThumbBounds.y, thumbPos) + myThumbBounds.height; myThumbBounds.y = thumbPos; onThumbMove(); repaint(myThumbBounds.x, minY, myThumbBounds.width, maxY - minY); } } else { thumbMin = myTrackBounds.x; thumbMax = myTrackBounds.x + myTrackBounds.width - myThumbBounds.width; thumbPos = Math.min(thumbMax, Math.max(thumbMin, (x - myOffset))); if (myThumbBounds.x != thumbPos) { int minX = Math.min(myThumbBounds.x, thumbPos); int maxX = Math.max(myThumbBounds.x, thumbPos) + myThumbBounds.width; myThumbBounds.x = thumbPos; onThumbMove(); repaint(minX, myThumbBounds.y, maxX - minX, myThumbBounds.height); } } int valueMin = myScrollBar.getMinimum(); int valueMax = myScrollBar.getMaximum() - myScrollBar.getVisibleAmount(); // If the thumb has reached the end of the scrollbar, then just set the value to its maximum. // Otherwise compute the value as accurately as possible. boolean isDefaultOrientation = VERTICAL == myScrollBar.getOrientation() || myScrollBar.getComponentOrientation().isLeftToRight(); if (thumbPos == thumbMax) { myScrollBar.setValue(isDefaultOrientation ? valueMax : valueMin); } else { int valueRange = valueMax - valueMin; int thumbRange = thumbMax - thumbMin; int thumbValue = isDefaultOrientation ? thumbPos - thumbMin : thumbMax - thumbPos; isValueCached = true; myCachedValue = valueMin + valueRange * thumbValue / thumbRange; myScrollBar.setValue(myCachedValue); } if (!isDragging) updateMouse(x, y); } private void startScrollTimerIfNecessary() { if (!myScrollTimer.isRunning()) { if (isReversed ? isMouseBeforeThumb() : isMouseAfterThumb()) { myScrollTimer.start(); } } } private boolean isMouseBeforeThumb() { return VERTICAL == myScrollBar.getOrientation() ? isMouseOnTop() : myScrollBar.getComponentOrientation().isLeftToRight() ? isMouseOnLeft() : isMouseOnRight(); } private boolean isMouseAfterThumb() { return VERTICAL == myScrollBar.getOrientation() ? isMouseOnBottom() : myScrollBar.getComponentOrientation().isLeftToRight() ? isMouseOnRight() : isMouseOnLeft(); } private boolean isMouseOnTop() { return myMouseY < myThumbBounds.y; } private boolean isMouseOnLeft() { return myMouseX < myThumbBounds.x; } private boolean isMouseOnRight() { return myMouseX > myThumbBounds.x + myThumbBounds.width; } private boolean isMouseOnBottom() { return myMouseY > myThumbBounds.y + myThumbBounds.height; } private void scroll(boolean reversed) { int delta = myScrollBar.getBlockIncrement(reversed ? -1 : 1); if (reversed) delta = -delta; int oldValue = myScrollBar.getValue(); int newValue = oldValue + delta; if (delta > 0 && newValue < oldValue) { newValue = myScrollBar.getMaximum(); } else if (delta < 0 && newValue > oldValue) { newValue = myScrollBar.getMinimum(); } if (oldValue != newValue) { myScrollBar.setValue(newValue); } } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.resourcemanager.v3; import static com.google.cloud.resourcemanager.v3.OrganizationsClient.SearchOrganizationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.resourcenames.ResourceName; import com.google.common.collect.Lists; import com.google.iam.v1.Binding; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.protobuf.AbstractMessage; import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class OrganizationsClientTest { private static MockOrganizations mockOrganizations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private OrganizationsClient client; @BeforeClass public static void startStaticServer() { mockOrganizations = new MockOrganizations(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockOrganizations)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); OrganizationsSettings settings = OrganizationsSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = OrganizationsClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void getOrganizationTest() throws Exception { Organization expectedResponse = Organization.newBuilder() .setName(OrganizationName.of("[ORGANIZATION]").toString()) .setDisplayName("displayName1714148973") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .build(); mockOrganizations.addResponse(expectedResponse); OrganizationName name = OrganizationName.of("[ORGANIZATION]"); Organization actualResponse = client.getOrganization(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetOrganizationRequest actualRequest = ((GetOrganizationRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getOrganizationExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { OrganizationName name = OrganizationName.of("[ORGANIZATION]"); client.getOrganization(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getOrganizationTest2() throws Exception { Organization expectedResponse = Organization.newBuilder() .setName(OrganizationName.of("[ORGANIZATION]").toString()) .setDisplayName("displayName1714148973") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setDeleteTime(Timestamp.newBuilder().build()) .setEtag("etag3123477") .build(); mockOrganizations.addResponse(expectedResponse); String name = "name3373707"; Organization actualResponse = client.getOrganization(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetOrganizationRequest actualRequest = ((GetOrganizationRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getOrganizationExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { String name = "name3373707"; client.getOrganization(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchOrganizationsTest() throws Exception { Organization responsesElement = Organization.newBuilder().build(); SearchOrganizationsResponse expectedResponse = SearchOrganizationsResponse.newBuilder() .setNextPageToken("") .addAllOrganizations(Arrays.asList(responsesElement)) .build(); mockOrganizations.addResponse(expectedResponse); String query = "query107944136"; SearchOrganizationsPagedResponse pagedListResponse = client.searchOrganizations(query); List<Organization> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getOrganizationsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); SearchOrganizationsRequest actualRequest = ((SearchOrganizationsRequest) actualRequests.get(0)); Assert.assertEquals(query, actualRequest.getQuery()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void searchOrganizationsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { String query = "query107944136"; client.searchOrganizations(query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .setEtag(ByteString.EMPTY) .build(); mockOrganizations.addResponse(expectedResponse); ResourceName resource = FolderName.of("[FOLDER]"); Policy actualResponse = client.getIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(resource.toString(), actualRequest.getResource()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIamPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { ResourceName resource = FolderName.of("[FOLDER]"); client.getIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIamPolicyTest2() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .setEtag(ByteString.EMPTY) .build(); mockOrganizations.addResponse(expectedResponse); String resource = "resource-341064690"; Policy actualResponse = client.getIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(resource, actualRequest.getResource()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIamPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { String resource = "resource-341064690"; client.getIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .setEtag(ByteString.EMPTY) .build(); mockOrganizations.addResponse(expectedResponse); ResourceName resource = FolderName.of("[FOLDER]"); Policy actualResponse = client.setIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(resource.toString(), actualRequest.getResource()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setIamPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { ResourceName resource = FolderName.of("[FOLDER]"); client.setIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setIamPolicyTest2() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .setEtag(ByteString.EMPTY) .build(); mockOrganizations.addResponse(expectedResponse); String resource = "resource-341064690"; Policy actualResponse = client.setIamPolicy(resource); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(resource, actualRequest.getResource()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setIamPolicyExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { String resource = "resource-341064690"; client.setIamPolicy(resource); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void testIamPermissionsTest() throws Exception { TestIamPermissionsResponse expectedResponse = TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build(); mockOrganizations.addResponse(expectedResponse); ResourceName resource = FolderName.of("[FOLDER]"); List<String> permissions = new ArrayList<>(); TestIamPermissionsResponse actualResponse = client.testIamPermissions(resource, permissions); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0)); Assert.assertEquals(resource.toString(), actualRequest.getResource()); Assert.assertEquals(permissions, actualRequest.getPermissionsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void testIamPermissionsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { ResourceName resource = FolderName.of("[FOLDER]"); List<String> permissions = new ArrayList<>(); client.testIamPermissions(resource, permissions); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void testIamPermissionsTest2() throws Exception { TestIamPermissionsResponse expectedResponse = TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build(); mockOrganizations.addResponse(expectedResponse); String resource = "resource-341064690"; List<String> permissions = new ArrayList<>(); TestIamPermissionsResponse actualResponse = client.testIamPermissions(resource, permissions); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockOrganizations.getRequests(); Assert.assertEquals(1, actualRequests.size()); TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0)); Assert.assertEquals(resource, actualRequest.getResource()); Assert.assertEquals(permissions, actualRequest.getPermissionsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void testIamPermissionsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockOrganizations.addException(exception); try { String resource = "resource-341064690"; List<String> permissions = new ArrayList<>(); client.testIamPermissions(resource, permissions); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.console; import com.intellij.execution.impl.ConsoleViewImpl; import com.intellij.execution.impl.ConsoleViewUtil; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.ide.DataManager; import com.intellij.ide.GeneralSettings; import com.intellij.ide.highlighter.HighlighterFactory; import com.intellij.injected.editor.EditorWindow; import com.intellij.lang.Language; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.EmptyAction; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.application.TransactionGuard; import com.intellij.openapi.command.undo.UndoUtil; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.FocusChangeListener; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.editor.ex.util.LexerEditorHighlighter; import com.intellij.openapi.editor.highlighter.EditorHighlighter; import com.intellij.openapi.editor.highlighter.EditorHighlighterFactory; import com.intellij.openapi.editor.impl.EditorFactoryImpl; import com.intellij.openapi.fileEditor.*; import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx; import com.intellij.openapi.fileEditor.impl.FileEditorManagerImpl; import com.intellij.openapi.fileTypes.SyntaxHighlighter; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.psi.PsiFile; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiUtilCore; import com.intellij.testFramework.LightVirtualFile; import com.intellij.ui.components.JBScrollBar; import com.intellij.ui.components.JBScrollPane.Alignment; import com.intellij.util.DocumentUtil; import com.intellij.util.FileContentUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.AbstractLayoutManager; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.util.Collections; /** * @author Gregory.Shrago * In case of REPL consider to use {@link LanguageConsoleBuilder} */ public class LanguageConsoleImpl extends ConsoleViewImpl implements LanguageConsoleView, DataProvider { private final Helper myHelper; private final EditorEx myConsoleEditor; private final EditorEx myHistoryViewer; private final Document myEditorDocument; private final JPanel myPanel = new JPanel(new MyLayout()); private final JScrollBar myScrollBar = new JBScrollBar(Adjustable.HORIZONTAL); private final DocumentListener myDocumentAdapter = new DocumentListener() { @Override public void documentChanged(@NotNull DocumentEvent event) { myPanel.revalidate(); } }; @Nullable private String myPrompt = "> "; private ConsoleViewContentType myPromptAttributes = ConsoleViewContentType.USER_INPUT; private EditorEx myCurrentEditor; private final MessageBusConnection myBusConnection; private final FocusChangeListener myFocusListener = new FocusChangeListener() { @Override public void focusGained(@NotNull Editor editor) { myCurrentEditor = (EditorEx)editor; if (GeneralSettings.getInstance().isSaveOnFrameDeactivation()) { TransactionGuard.submitTransaction(LanguageConsoleImpl.this, () -> FileDocumentManager.getInstance().saveAllDocuments()); // PY-12487 } } @Override public void focusLost(@NotNull Editor editor) { } }; public LanguageConsoleImpl(@NotNull Project project, @NotNull String title, @NotNull Language language) { this(new Helper(project, new LightVirtualFile(title, language, ""))); } public LanguageConsoleImpl(@NotNull Project project, @NotNull String title, @NotNull VirtualFile virtualFile) { this(new Helper(project, virtualFile).setTitle(title)); } public LanguageConsoleImpl(@NotNull Helper helper) { super(helper.project, GlobalSearchScope.allScope(helper.project), true, true); myHelper = helper; EditorFactory editorFactory = EditorFactory.getInstance(); myEditorDocument = helper.getDocument(); myConsoleEditor = (EditorEx)editorFactory.createEditor(myEditorDocument, getProject()); myConsoleEditor.getDocument().addDocumentListener(myDocumentAdapter); myConsoleEditor.getScrollPane().getHorizontalScrollBar().setEnabled(false); myConsoleEditor.addFocusListener(myFocusListener); myConsoleEditor.getSettings().setVirtualSpace(false); myCurrentEditor = myConsoleEditor; Document historyDocument = ((EditorFactoryImpl)editorFactory).createDocument(true); UndoUtil.disableUndoFor(historyDocument); myHistoryViewer = (EditorEx)editorFactory.createViewer(historyDocument, getProject(), EditorKind.CONSOLE); myHistoryViewer.getDocument().addDocumentListener(myDocumentAdapter); myScrollBar.setModel(new MyModel(myScrollBar, myHistoryViewer, myConsoleEditor)); myScrollBar.putClientProperty(Alignment.class, Alignment.BOTTOM); myBusConnection = getProject().getMessageBus().connect(); // action shortcuts are not yet registered ApplicationManager.getApplication().invokeLater(() -> installEditorFactoryListener(), getProject().getDisposed()); } @NotNull @Override protected final EditorEx doCreateConsoleEditor() { return myHistoryViewer; } @Override protected final void disposeEditor() { } @NotNull @Override protected JComponent createCenterComponent() { initComponents(); return myPanel; } @Override public JComponent getPreferredFocusableComponent() { return getConsoleEditor().getContentComponent(); } private void initComponents() { setupComponents(); myPanel.add(myHistoryViewer.getComponent()); myPanel.add(myConsoleEditor.getComponent()); myPanel.add(myScrollBar); myPanel.setBackground(myConsoleEditor.getBackgroundColor()); DataManager.registerDataProvider(myPanel, this); setPromptInner(myPrompt); } @Override public void setConsoleEditorEnabled(boolean consoleEditorEnabled) { if (isConsoleEditorEnabled() == consoleEditorEnabled) { return; } if (consoleEditorEnabled) { FileEditorManager.getInstance(getProject()).closeFile(getVirtualFile()); myCurrentEditor = myConsoleEditor; } setHistoryScrollBarVisible(!consoleEditorEnabled); myScrollBar.setVisible(consoleEditorEnabled); myConsoleEditor.getComponent().setVisible(consoleEditorEnabled); } private void setHistoryScrollBarVisible(boolean visible) { JScrollBar prev = myHistoryViewer.getScrollPane().getHorizontalScrollBar(); prev.setEnabled(visible); } private void setupComponents() { myHelper.setupEditor(myConsoleEditor); myHelper.setupEditor(myHistoryViewer); myHistoryViewer.getComponent().setMinimumSize(JBUI.emptySize()); myHistoryViewer.getComponent().setPreferredSize(JBUI.emptySize()); myHistoryViewer.setCaretEnabled(false); myConsoleEditor.setContextMenuGroupId(IdeActions.GROUP_CONSOLE_EDITOR_POPUP); myConsoleEditor.setHighlighter( EditorHighlighterFactory.getInstance().createEditorHighlighter(getVirtualFile(), myConsoleEditor.getColorsScheme(), getProject())); setHistoryScrollBarVisible(false); myHistoryViewer.getContentComponent().addKeyListener(new KeyAdapter() { @Override public void keyTyped(KeyEvent event) { if (isConsoleEditorEnabled() && UIUtil.isReallyTypedEvent(event)) { IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(myConsoleEditor.getContentComponent(), true)); myConsoleEditor.processKeyTyped(event); } } }); EmptyAction.registerActionShortcuts(myHistoryViewer.getComponent(), myConsoleEditor.getComponent()); } @Override public final boolean isConsoleEditorEnabled() { return myConsoleEditor.getComponent().isVisible(); } @Override @Nullable public String getPrompt() { return myPrompt; } @Override @Nullable public ConsoleViewContentType getPromptAttributes() { return myPromptAttributes; } @Override public void setPromptAttributes(@NotNull ConsoleViewContentType textAttributes) { myPromptAttributes = textAttributes; } @Override public void setPrompt(@Nullable String prompt) { // always add space to the prompt otherwise it may look ugly myPrompt = prompt != null && !prompt.endsWith(" ") ? prompt + " " : prompt; setPromptInner(myPrompt); } private void setPromptInner(@Nullable final String prompt) { UIUtil.invokeAndWaitIfNeeded((Runnable)() -> { if (!myConsoleEditor.isDisposed()) { myConsoleEditor.setPrefixTextAndAttributes(prompt, myPromptAttributes.getAttributes()); } }); } @Override public void setEditable(boolean editable) { myConsoleEditor.setRendererMode(!editable); setPromptInner(editable ? myPrompt : ""); } @Override public boolean isEditable() { return !myConsoleEditor.isRendererMode(); } @Override @NotNull public final PsiFile getFile() { return myHelper.getFileSafe(); } @Override @NotNull public final VirtualFile getVirtualFile() { return myHelper.virtualFile; } @Override @NotNull public final EditorEx getHistoryViewer() { return myHistoryViewer; } @Override @NotNull public final Document getEditorDocument() { return myEditorDocument; } @Override @NotNull public final EditorEx getConsoleEditor() { return myConsoleEditor; } @Override @NotNull public String getTitle() { return myHelper.title; } @Override public void setTitle(@NotNull String title) { myHelper.setTitle(title); } public String addToHistory(@NotNull TextRange textRange, @NotNull EditorEx editor, boolean preserveMarkup) { return addToHistoryInner(textRange, editor, false, preserveMarkup); } @NotNull public String prepareExecuteAction(boolean addToHistory, boolean preserveMarkup, boolean clearInput) { EditorEx editor = getCurrentEditor(); Document document = editor.getDocument(); String text = document.getText(); TextRange range = new TextRange(0, document.getTextLength()); if (!clearInput) { editor.getSelectionModel().setSelection(range.getStartOffset(), range.getEndOffset()); } if (addToHistory) { addToHistoryInner(range, editor, clearInput, preserveMarkup); } else if (clearInput) { setInputText(""); } return text; } @NotNull protected String addToHistoryInner(@NotNull final TextRange textRange, @NotNull final EditorEx editor, boolean erase, final boolean preserveMarkup) { ApplicationManager.getApplication().assertIsDispatchThread(); String result = addTextRangeToHistory(textRange, editor, preserveMarkup); if (erase) { DocumentUtil.writeInRunUndoTransparentAction( () -> editor.getDocument().deleteString(textRange.getStartOffset(), textRange.getEndOffset())); } // always scroll to end on user input scrollToEnd(); return result; } public static String printWithHighlighting(@NotNull LanguageConsoleView console, @NotNull Editor inputEditor, @NotNull TextRange textRange) { String text; EditorHighlighter highlighter; if (inputEditor instanceof EditorWindow) { PsiFile file = ((EditorWindow)inputEditor).getInjectedFile(); highlighter = HighlighterFactory.createHighlighter(file.getVirtualFile(), EditorColorsManager.getInstance().getGlobalScheme(), console.getProject()); String fullText = InjectedLanguageUtil.getUnescapedText(file, null, null); highlighter.setText(fullText); text = textRange.substring(fullText); } else { text = inputEditor.getDocument().getText(textRange); highlighter = ((EditorEx)inputEditor).getHighlighter(); } SyntaxHighlighter syntax = highlighter instanceof LexerEditorHighlighter ? ((LexerEditorHighlighter)highlighter).getSyntaxHighlighter() : null; ((LanguageConsoleImpl)console).doAddPromptToHistory(); if (syntax != null) { ConsoleViewUtil.printWithHighlighting(console, text, syntax); } else { console.print(text, ConsoleViewContentType.USER_INPUT); } console.print("\n", ConsoleViewContentType.NORMAL_OUTPUT); return text; } @NotNull protected String addTextRangeToHistory(@NotNull TextRange textRange, @NotNull EditorEx inputEditor, boolean preserveMarkup) { return printWithHighlighting(this, inputEditor, textRange); //if (preserveMarkup) { // duplicateHighlighters(markupModel, DocumentMarkupModel.forDocument(inputEditor.getDocument(), myProject, true), offset, textRange); // // don't copy editor markup model, i.e. brace matcher, spell checker, etc. // // duplicateHighlighters(markupModel, inputEditor.getMarkupModel(), offset, textRange); //} } protected void doAddPromptToHistory() { if (myPrompt != null) { print(myPrompt, myPromptAttributes); } } //private static void duplicateHighlighters(@NotNull MarkupModel to, @NotNull MarkupModel from, int offset, @NotNull TextRange textRange) { // for (RangeHighlighter rangeHighlighter : from.getAllHighlighters()) { // if (!rangeHighlighter.isValid()) { // continue; // } // Object tooltip = rangeHighlighter.getErrorStripeTooltip(); // HighlightInfo highlightInfo = tooltip instanceof HighlightInfo? (HighlightInfo)tooltip : null; // if (highlightInfo != null) { // if (highlightInfo.getSeverity() != HighlightSeverity.INFORMATION) { // continue; // } // if (highlightInfo.type.getAttributesKey() == EditorColors.IDENTIFIER_UNDER_CARET_ATTRIBUTES) { // continue; // } // } // int localOffset = textRange.getStartOffset(); // int start = Math.max(rangeHighlighter.getStartOffset(), localOffset) - localOffset; // int end = Math.min(rangeHighlighter.getEndOffset(), textRange.getEndOffset()) - localOffset; // if (start > end) { // continue; // } // RangeHighlighter h = to.addRangeHighlighter(start + offset, end + offset, rangeHighlighter.getLayer(), // rangeHighlighter.getTextAttributes(), rangeHighlighter.getTargetArea()); // ((RangeHighlighterEx)h).setAfterEndOfLine(((RangeHighlighterEx)rangeHighlighter).isAfterEndOfLine()); // } //} @Override public void dispose() { super.dispose(); // double dispose via RunContentDescriptor and ContentImpl if (myHistoryViewer.isDisposed()) return; myConsoleEditor.getDocument().removeDocumentListener(myDocumentAdapter); myHistoryViewer.getDocument().removeDocumentListener(myDocumentAdapter); myBusConnection.deliverImmediately(); Disposer.dispose(myBusConnection); EditorFactory editorFactory = EditorFactory.getInstance(); editorFactory.releaseEditor(myConsoleEditor); editorFactory.releaseEditor(myHistoryViewer); if (getProject().isOpen()) { FileEditorManager editorManager = FileEditorManager.getInstance(getProject()); if (editorManager.isFileOpen(getVirtualFile())) { editorManager.closeFile(getVirtualFile()); } } } @Nullable @Override public Object getData(@NotNull @NonNls String dataId) { return super.getData(dataId); } private void installEditorFactoryListener() { FileEditorManagerListener fileEditorListener = new FileEditorManagerListener() { @Override public void fileOpened(@NotNull FileEditorManager source, @NotNull VirtualFile file) { if (myConsoleEditor == null || !Comparing.equal(file, getVirtualFile())) { return; } Editor selectedTextEditor = source.getSelectedTextEditor(); for (FileEditor fileEditor : source.getAllEditors(file)) { if (!(fileEditor instanceof TextEditor)) { continue; } final EditorEx editor = (EditorEx)((TextEditor)fileEditor).getEditor(); editor.addFocusListener(myFocusListener); if (selectedTextEditor == editor) { // already focused myCurrentEditor = editor; } EmptyAction.registerActionShortcuts(editor.getComponent(), myConsoleEditor.getComponent()); } } @Override public void fileClosed(@NotNull FileEditorManager source, @NotNull VirtualFile file) { if (!Comparing.equal(file, getVirtualFile())) { return; } if (!Boolean.TRUE.equals(file.getUserData(FileEditorManagerImpl.CLOSING_TO_REOPEN))) { if (myCurrentEditor != null && myCurrentEditor.isDisposed()) { myCurrentEditor = null; } } } }; myBusConnection.subscribe(FileEditorManagerListener.FILE_EDITOR_MANAGER, fileEditorListener); FileEditorManager editorManager = FileEditorManager.getInstance(getProject()); if (editorManager.isFileOpen(getVirtualFile())) { fileEditorListener.fileOpened(editorManager, getVirtualFile()); } } @Override @NotNull public EditorEx getCurrentEditor() { return ObjectUtils.notNull(myCurrentEditor, myConsoleEditor); } @Override @NotNull public Language getLanguage() { return getFile().getLanguage(); } @Override public void setLanguage(@NotNull Language language) { myHelper.setLanguage(language); myHelper.getFileSafe(); } @Override public void setInputText(@NotNull final String query) { DocumentUtil.writeInRunUndoTransparentAction(() -> myConsoleEditor.getDocument().setText(StringUtil.convertLineSeparators(query))); } boolean isHistoryViewerForceAdditionalColumnsUsage() { return true; } int getMinHistoryLineCount() { return 2; } public static class Helper { public final Project project; public final VirtualFile virtualFile; String title; PsiFile file; public Helper(@NotNull Project project, @NotNull VirtualFile virtualFile) { this.project = project; this.virtualFile = virtualFile; title = virtualFile.getName(); } public Helper setTitle(String title) { this.title = title; return this; } @NotNull public PsiFile getFile() { return ReadAction.compute(() -> PsiUtilCore.getPsiFile(project, virtualFile)); } @NotNull public Document getDocument() { Document document = FileDocumentManager.getInstance().getDocument(virtualFile); if (document == null) { Language language = (virtualFile instanceof LightVirtualFile) ? ((LightVirtualFile)virtualFile).getLanguage() : null; throw new AssertionError(String.format("no document for: %s (fileType: %s, language: %s, length: %s, valid: %s)", virtualFile, virtualFile.getFileType(), language, virtualFile.getLength(), virtualFile.isValid())); } return document; } public void setLanguage(Language language) { if (!(virtualFile instanceof LightVirtualFile)) { throw new UnsupportedOperationException(); } ((LightVirtualFile)virtualFile).setLanguage(language); ((LightVirtualFile)virtualFile).setContent(getDocument(), getDocument().getText(), false); FileContentUtil.reparseFiles(project, Collections.singletonList(virtualFile), false); } public void setupEditor(@NotNull EditorEx editor) { ConsoleViewUtil.setupConsoleEditor(editor, false, false); editor.getContentComponent().setFocusCycleRoot(false); editor.setHorizontalScrollbarVisible(true); editor.setVerticalScrollbarVisible(true); editor.setBorder(null); EditorSettings editorSettings = editor.getSettings(); editorSettings.setAdditionalLinesCount(1); editorSettings.setAdditionalColumnsCount(1); DataManager.registerDataProvider(editor.getComponent(), (dataId) -> getEditorData(editor, dataId)); } @NotNull PsiFile getFileSafe() { return file == null || !file.isValid() ? file = getFile() : file; } @Nullable protected Object getEditorData(@NotNull EditorEx editor, String dataId) { if (OpenFileDescriptor.NAVIGATE_IN_EDITOR.is(dataId)) { return editor; } else if (project.isInitialized()) { Caret caret = editor.getCaretModel().getCurrentCaret(); return FileEditorManagerEx.getInstanceEx(project).getData(dataId, editor, caret); } return null; } } private class MyLayout extends AbstractLayoutManager { @Override public Dimension preferredLayoutSize(final Container parent) { return new Dimension(0, 0); } @Override public void layoutContainer(@NotNull final Container parent) { final int componentCount = parent.getComponentCount(); if (componentCount == 0) { return; } final EditorEx history = myHistoryViewer; final EditorEx input = isConsoleEditorEnabled() ? myConsoleEditor : null; if (input == null) { parent.getComponent(0).setBounds(parent.getBounds()); return; } final Dimension panelSize = parent.getSize(); if (myScrollBar.isVisible()) { Dimension size = myScrollBar.getPreferredSize(); if (panelSize.height < size.height) return; panelSize.height -= size.height; myScrollBar.setBounds(0, panelSize.height, panelSize.width, size.height); } if (panelSize.getHeight() <= 0) { return; } final Dimension historySize = history.getContentSize(); final Dimension inputSize = input.getContentSize(); // deal with width if (isHistoryViewerForceAdditionalColumnsUsage()) { history.getSoftWrapModel().forceAdditionalColumnsUsage(); int minAdditionalColumns = 2; // calculate content size without additional columns except minimal amount int historySpaceWidth = EditorUtil.getPlainSpaceWidth(history); historySize.width += historySpaceWidth * (minAdditionalColumns - history.getSettings().getAdditionalColumnsCount()); // calculate content size without additional columns except minimal amount int inputSpaceWidth = EditorUtil.getPlainSpaceWidth(input); inputSize.width += inputSpaceWidth * (minAdditionalColumns - input.getSettings().getAdditionalColumnsCount()); // calculate additional columns according to the corresponding width int max = Math.max(historySize.width, inputSize.width); history.getSettings().setAdditionalColumnsCount(minAdditionalColumns + (max - historySize.width) / historySpaceWidth); input.getSettings().setAdditionalColumnsCount(minAdditionalColumns + (max - inputSize.width) / inputSpaceWidth); } int newInputHeight; // deal with height, WEB-11122 we cannot trust editor width - it could be 0 in case of soft wrap even if editor has text if (history.getDocument().getLineCount() == 0) { historySize.height = 0; } int minHistoryHeight = historySize.height > 0 ? getMinHistoryLineCount() * history.getLineHeight() : 0; int minInputHeight = input.isViewer() ? 0 : input.getLineHeight(); final int inputPreferredHeight = input.isViewer() ? 0 : Math.max(minInputHeight, inputSize.height); final int historyPreferredHeight = Math.max(minHistoryHeight, historySize.height); if (panelSize.height < minInputHeight) { newInputHeight = panelSize.height; } else if (panelSize.height < inputPreferredHeight) { newInputHeight = panelSize.height - minHistoryHeight; } else if (panelSize.height < (inputPreferredHeight + historyPreferredHeight) || inputPreferredHeight == 0) { newInputHeight = inputPreferredHeight; } else { newInputHeight = panelSize.height - historyPreferredHeight; } int oldHistoryHeight = history.getComponent().getHeight(); int newHistoryHeight = panelSize.height - newInputHeight; int delta = newHistoryHeight - ((newHistoryHeight / history.getLineHeight()) * history.getLineHeight()); newHistoryHeight -= delta; newInputHeight += delta; // apply new bounds & scroll history viewer input.getComponent().setBounds(0, newHistoryHeight, panelSize.width, newInputHeight); history.getComponent().setBounds(0, 0, panelSize.width, newHistoryHeight); input.getComponent().doLayout(); history.getComponent().doLayout(); if (newHistoryHeight < oldHistoryHeight) { JViewport viewport = history.getScrollPane().getViewport(); Point position = viewport.getViewPosition(); position.translate(0, oldHistoryHeight - newHistoryHeight); viewport.setViewPosition(position); } } } private static final class MyModel extends DefaultBoundedRangeModel { private volatile boolean myInternalChange; private final JScrollBar myBar; private final EditorEx myFirstEditor; private final EditorEx mySecondEditor; private int myFirstValue; private int mySecondValue; private MyModel(JScrollBar bar, EditorEx first, EditorEx second) { myBar = bar; myFirstEditor = first; mySecondEditor = second; addChangeListener(event -> onChange()); first.getScrollPane().getViewport().addChangeListener(event -> onUpdate(event.getSource())); second.getScrollPane().getViewport().addChangeListener(event -> onUpdate(event.getSource())); } private boolean isInternal() { return myInternalChange || !myFirstEditor.getComponent().isVisible() || !mySecondEditor.getComponent().isVisible(); } private void onChange() { if (isInternal()) return; myInternalChange = true; setValue(myFirstEditor.getScrollPane().getViewport(), getValue()); setValue(mySecondEditor.getScrollPane().getViewport(), getValue()); myInternalChange = false; } private void onUpdate(Object source) { if (isInternal()) return; JViewport first = myFirstEditor.getScrollPane().getViewport(); JViewport second = mySecondEditor.getScrollPane().getViewport(); int value = getValue(); if (source == first) { Point position = first.getViewPosition(); if (position.x != myFirstValue) { myFirstValue = value = position.x; } } else { Point position = second.getViewPosition(); if (position.x != mySecondValue) { mySecondValue = value = position.x; } } int ext = Math.min(first.getExtentSize().width, second.getExtentSize().width); int max = Math.max(first.getViewSize().width, second.getViewSize().width); setRangeProperties(value, ext, 0, max, false); myBar.setEnabled(ext < max); } private static void setValue(JViewport viewport, int value) { Point position = viewport.getViewPosition(); position.x = Math.max(0, Math.min(value, viewport.getViewSize().width - viewport.getExtentSize().width)); viewport.setViewPosition(position); } } }
/* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.heroic.suggest.elasticsearch; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.ImmutableList; import com.google.common.hash.HashCode; import com.google.common.util.concurrent.RateLimiter; import com.spotify.heroic.ExtraParameters; import com.spotify.heroic.common.Groups; import com.spotify.heroic.dagger.PrimaryComponent; import com.spotify.heroic.elasticsearch.BackendType; import com.spotify.heroic.elasticsearch.BackendTypeFactory; import com.spotify.heroic.elasticsearch.Connection; import com.spotify.heroic.elasticsearch.ConnectionModule; import com.spotify.heroic.elasticsearch.DefaultRateLimitedCache; import com.spotify.heroic.elasticsearch.DisabledRateLimitedCache; import com.spotify.heroic.elasticsearch.RateLimitedCache; import com.spotify.heroic.lifecycle.LifeCycle; import com.spotify.heroic.lifecycle.LifeCycleManager; import com.spotify.heroic.suggest.SuggestBackend; import com.spotify.heroic.suggest.SuggestModule; import dagger.Component; import dagger.Lazy; import dagger.Module; import dagger.Provides; import eu.toolchain.async.Managed; import lombok.Data; import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.Pair; import javax.inject.Named; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkNotNull; import static java.util.Optional.empty; import static java.util.Optional.of; import static java.util.Optional.ofNullable; @Data public final class ElasticsearchSuggestModule implements SuggestModule { public static final String ELASTICSEARCH_CONFIGURE_PARAM = "elasticsearch.configure"; private static final double DEFAULT_WRITES_PER_SECOND = 3000d; private static final long DEFAULT_WRITES_CACHE_DURATION_MINUTES = 240L; public static final String DEFAULT_GROUP = "elasticsearch"; public static final String DEFAULT_TEMPLATE_NAME = "heroic-suggest"; public static final String DEFAULT_BACKEND_TYPE = "default"; private final Optional<String> id; private final Groups groups; private final ConnectionModule connection; private final double writesPerSecond; private final long writeCacheDurationMinutes; private final String templateName; private final String backendType; private static BackendTypeFactory<SuggestBackend> defaultSetup = SuggestBackendKV.factory(); private static final Map<String, BackendTypeFactory<SuggestBackend>> backendTypes = new HashMap<>(); static { backendTypes.put("kv", defaultSetup); backendTypes.put("v1", SuggestBackendV1.factory()); } public static final List<String> types() { return ImmutableList.copyOf(backendTypes.keySet()); } @JsonIgnore private final BackendTypeFactory<SuggestBackend> backendTypeBuilder; @JsonCreator public ElasticsearchSuggestModule( @JsonProperty("id") Optional<String> id, @JsonProperty("groups") Optional<Groups> groups, @JsonProperty("connection") Optional<ConnectionModule> connection, @JsonProperty("writesPerSecond") Optional<Double> writesPerSecond, @JsonProperty("writeCacheDurationMinutes") Optional<Long> writeCacheDurationMinutes, @JsonProperty("templateName") Optional<String> templateName, @JsonProperty("backendType") Optional<String> backendType ) { this.id = id; this.groups = groups.orElseGet(Groups::empty).or(DEFAULT_GROUP); this.connection = connection.orElseGet(ConnectionModule::buildDefault); this.writesPerSecond = writesPerSecond.orElse(DEFAULT_WRITES_PER_SECOND); this.writeCacheDurationMinutes = writeCacheDurationMinutes.orElse(DEFAULT_WRITES_CACHE_DURATION_MINUTES); this.templateName = templateName.orElse(DEFAULT_TEMPLATE_NAME); this.backendType = backendType.orElse(DEFAULT_BACKEND_TYPE); this.backendTypeBuilder = backendType.flatMap(bt -> ofNullable(backendTypes.get(bt))).orElse(defaultSetup); } @Override public Exposed module(PrimaryComponent primary, Depends depends, final String id) { final BackendType<SuggestBackend> backendType = backendTypeBuilder.setup(); return DaggerElasticsearchSuggestModule_C .builder() .primaryComponent(primary) .depends(depends) .connectionModule(connection) .m(new M(backendType)) .build(); } @ElasticsearchScope @Component(modules = {M.class, ConnectionModule.class}, dependencies = {PrimaryComponent.class, Depends.class}) interface C extends Exposed { @Override SuggestBackend backend(); @Override LifeCycle life(); } @RequiredArgsConstructor @Module class M { private final BackendType<SuggestBackend> backendType; @Provides @ElasticsearchScope public Groups groups() { return groups; } @Provides @ElasticsearchScope public Managed<Connection> connection(ConnectionModule.Provider provider) { return provider.construct(templateName, backendType.mappings(), backendType.settings()); } @Provides @ElasticsearchScope @Named("configure") public boolean configure(ExtraParameters params) { return params.contains(ExtraParameters.CONFIGURE) || params.contains(ELASTICSEARCH_CONFIGURE_PARAM); } @Provides @ElasticsearchScope public RateLimitedCache<Pair<String, HashCode>> writeCache() { final Cache<Pair<String, HashCode>, Boolean> cache = CacheBuilder .newBuilder() .concurrencyLevel(4) .expireAfterWrite(writeCacheDurationMinutes, TimeUnit.MINUTES) .build(); if (writesPerSecond <= 0d) { return new DisabledRateLimitedCache<>(cache.asMap()); } return new DefaultRateLimitedCache<>(cache.asMap(), RateLimiter.create(writesPerSecond)); } @Provides @ElasticsearchScope public SuggestBackend suggestBackend(Lazy<SuggestBackendV1> v1, Lazy<SuggestBackendKV> kv) { if (backendType.type().equals(SuggestBackendV1.class)) { return v1.get(); } return kv.get(); } @Provides @ElasticsearchScope public LifeCycle life( LifeCycleManager manager, Lazy<SuggestBackendV1> v1, Lazy<SuggestBackendKV> kv ) { if (backendType.type().equals(SuggestBackendV1.class)) { return manager.build(v1.get()); } return manager.build(kv.get()); } } @Override public Optional<String> id() { return id; } @Override public String buildId(int i) { return String.format("elasticsearch-suggest#%d", i); } public static Builder builder() { return new Builder(); } public static class Builder { private Optional<String> id = empty(); private Optional<Groups> groups = empty(); private Optional<ConnectionModule> connection = empty(); private Optional<Double> writesPerSecond = empty(); private Optional<Long> writeCacheDurationMinutes = empty(); private Optional<String> templateName = empty(); private Optional<String> backendType = empty(); public Builder id(final String id) { checkNotNull(id, "id"); this.id = of(id); return this; } public Builder group(final Groups groups) { checkNotNull(groups, "groups"); this.groups = of(groups); return this; } public Builder connection(final ConnectionModule connection) { checkNotNull(connection, "connection"); this.connection = of(connection); return this; } public Builder writesPerSecond(double writesPerSecond) { checkNotNull(writesPerSecond, "writesPerSecond"); this.writesPerSecond = of(writesPerSecond); return this; } public Builder writeCacheDurationMinutes(long writeCacheDurationMinutes) { checkNotNull(writeCacheDurationMinutes, "writeCacheDurationMinutes"); this.writeCacheDurationMinutes = of(writeCacheDurationMinutes); return this; } public Builder templateName(final String templateName) { checkNotNull(templateName, "templateName"); this.templateName = of(templateName); return this; } public Builder backendType(final String backendType) { checkNotNull(backendType, "backendType"); this.backendType = of(backendType); return this; } public ElasticsearchSuggestModule build() { return new ElasticsearchSuggestModule(id, groups, connection, writesPerSecond, writeCacheDurationMinutes, templateName, backendType); } } }
/* * Copyright (c) 1997, 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.x509; import java.io.InputStream; import java.io.OutputStream; import java.io.IOException; import java.math.BigInteger; import java.security.Principal; import java.security.PublicKey; import java.security.PrivateKey; import java.security.Security; import java.security.Signature; import java.security.NoSuchAlgorithmException; import java.security.InvalidKeyException; import java.security.NoSuchProviderException; import java.security.SignatureException; import java.security.cert.Certificate; import java.security.cert.X509CRL; import java.security.cert.X509Certificate; import java.security.cert.X509CRLEntry; import java.security.cert.CRLException; import java.util.*; import javax.security.auth.x500.X500Principal; //import sun.security.provider.X509Factory; import sun.security.util.*; import sun.misc.HexDumpEncoder; /** * <p> * An implmentation for X509 CRL (Certificate Revocation List). * <p> * The X.509 v2 CRL format is described below in ASN.1: * <pre> * CertificateList ::= SEQUENCE { * tbsCertList TBSCertList, * signatureAlgorithm AlgorithmIdentifier, * signature BIT STRING } * </pre> * More information can be found in * <a href="http://www.ietf.org/rfc/rfc3280.txt">RFC 3280: Internet X.509 * Public Key Infrastructure Certificate and CRL Profile</a>. * <p> * The ASN.1 definition of <code>tbsCertList</code> is: * <pre> * TBSCertList ::= SEQUENCE { * version Version OPTIONAL, * -- if present, must be v2 * signature AlgorithmIdentifier, * issuer Name, * thisUpdate ChoiceOfTime, * nextUpdate ChoiceOfTime OPTIONAL, * revokedCertificates SEQUENCE OF SEQUENCE { * userCertificate CertificateSerialNumber, * revocationDate ChoiceOfTime, * crlEntryExtensions Extensions OPTIONAL * -- if present, must be v2 * } OPTIONAL, * crlExtensions [0] EXPLICIT Extensions OPTIONAL * -- if present, must be v2 * } * </pre> * * @author Hemma Prafullchandra * @see X509CRL */ public class X509CRLImpl extends X509CRL { // CRL data, and its envelope private byte[] signedCRL = null; // DER encoded crl private byte[] signature = null; // raw signature bits private byte[] tbsCertList = null; // DER encoded "to-be-signed" CRL private AlgorithmId sigAlgId = null; // sig alg in CRL // crl information private int version; private AlgorithmId infoSigAlgId; // sig alg in "to-be-signed" crl private X500Name issuer = null; private X500Principal issuerPrincipal = null; private Date thisUpdate = null; private Date nextUpdate = null; private Map<X509IssuerSerial,X509CRLEntry> revokedCerts = new LinkedHashMap<X509IssuerSerial,X509CRLEntry>(); private CRLExtensions extensions = null; private final static boolean isExplicit = true; private static final long YR_2050 = 2524636800000L; private boolean readOnly = false; /** * PublicKey that has previously been used to successfully verify * the signature of this CRL. Null if the CRL has not * yet been verified (successfully). */ private PublicKey verifiedPublicKey; /** * If verifiedPublicKey is not null, name of the provider used to * successfully verify the signature of this CRL, or the * empty String if no provider was explicitly specified. */ private String verifiedProvider; /** * Not to be used. As it would lead to cases of uninitialized * CRL objects. */ private X509CRLImpl() { } /** * Unmarshals an X.509 CRL from its encoded form, parsing the encoded * bytes. This form of constructor is used by agents which * need to examine and use CRL contents. Note that the buffer * must include only one CRL, and no "garbage" may be left at * the end. * * @param crlData the encoded bytes, with no trailing padding. * @exception CRLException on parsing errors. */ public X509CRLImpl(byte[] crlData) throws CRLException { try { parse(new DerValue(crlData)); } catch (IOException e) { signedCRL = null; throw new CRLException("Parsing error: " + e.getMessage()); } } /** * Unmarshals an X.509 CRL from an DER value. * * @param val a DER value holding at least one CRL * @exception CRLException on parsing errors. */ public X509CRLImpl(DerValue val) throws CRLException { try { parse(val); } catch (IOException e) { signedCRL = null; throw new CRLException("Parsing error: " + e.getMessage()); } } /** * Unmarshals an X.509 CRL from an input stream. Only one CRL * is expected at the end of the input stream. * * @param inStrm an input stream holding at least one CRL * @exception CRLException on parsing errors. */ public X509CRLImpl(InputStream inStrm) throws CRLException { try { parse(new DerValue(inStrm)); } catch (IOException e) { signedCRL = null; throw new CRLException("Parsing error: " + e.getMessage()); } } /** * Initial CRL constructor, no revoked certs, and no extensions. * * @param issuer the name of the CA issuing this CRL. * @param thisUpdate the Date of this issue. * @param nextUpdate the Date of the next CRL. */ public X509CRLImpl(X500Name issuer, Date thisDate, Date nextDate) { this.issuer = issuer; this.thisUpdate = thisDate; this.nextUpdate = nextDate; } /** * CRL constructor, revoked certs, no extensions. * * @param issuer the name of the CA issuing this CRL. * @param thisUpdate the Date of this issue. * @param nextUpdate the Date of the next CRL. * @param badCerts the array of CRL entries. * * @exception CRLException on parsing/construction errors. */ public X509CRLImpl(X500Name issuer, Date thisDate, Date nextDate, X509CRLEntry[] badCerts) throws CRLException { this.issuer = issuer; this.thisUpdate = thisDate; this.nextUpdate = nextDate; if (badCerts != null) { X500Principal crlIssuer = getIssuerX500Principal(); X500Principal badCertIssuer = crlIssuer; for (int i = 0; i < badCerts.length; i++) { X509CRLEntryImpl badCert = (X509CRLEntryImpl)badCerts[i]; try { badCertIssuer = getCertIssuer(badCert, badCertIssuer); } catch (IOException ioe) { throw new CRLException(ioe); } badCert.setCertificateIssuer(crlIssuer, badCertIssuer); X509IssuerSerial issuerSerial = new X509IssuerSerial (badCertIssuer, badCert.getSerialNumber()); this.revokedCerts.put(issuerSerial, badCert); if (badCert.hasExtensions()) { this.version = 1; } } } } /** * CRL constructor, revoked certs and extensions. * * @param issuer the name of the CA issuing this CRL. * @param thisUpdate the Date of this issue. * @param nextUpdate the Date of the next CRL. * @param badCerts the array of CRL entries. * @param crlExts the CRL extensions. * * @exception CRLException on parsing/construction errors. */ public X509CRLImpl(X500Name issuer, Date thisDate, Date nextDate, X509CRLEntry[] badCerts, CRLExtensions crlExts) throws CRLException { this(issuer, thisDate, nextDate, badCerts); if (crlExts != null) { this.extensions = crlExts; this.version = 1; } } /** * Returned the encoding as an uncloned byte array. Callers must * guarantee that they neither modify it nor expose it to untrusted * code. */ public byte[] getEncodedInternal() throws CRLException { if (signedCRL == null) { throw new CRLException("Null CRL to encode"); } return signedCRL; } /** * Returns the ASN.1 DER encoded form of this CRL. * * @exception CRLException if an encoding error occurs. */ public byte[] getEncoded() throws CRLException { return getEncodedInternal().clone(); } /** * Encodes the "to-be-signed" CRL to the OutputStream. * * @param out the OutputStream to write to. * @exception CRLException on encoding errors. */ public void encodeInfo(OutputStream out) throws CRLException { try { DerOutputStream tmp = new DerOutputStream(); DerOutputStream rCerts = new DerOutputStream(); DerOutputStream seq = new DerOutputStream(); if (version != 0) // v2 crl encode version tmp.putInteger(version); infoSigAlgId.encode(tmp); if ((version == 0) && (issuer.toString() == null)) throw new CRLException("Null Issuer DN not allowed in v1 CRL"); issuer.encode(tmp); if (thisUpdate.getTime() < YR_2050) tmp.putUTCTime(thisUpdate); else tmp.putGeneralizedTime(thisUpdate); if (nextUpdate != null) { if (nextUpdate.getTime() < YR_2050) tmp.putUTCTime(nextUpdate); else tmp.putGeneralizedTime(nextUpdate); } if (!revokedCerts.isEmpty()) { for (X509CRLEntry entry : revokedCerts.values()) { ((X509CRLEntryImpl)entry).encode(rCerts); } tmp.write(DerValue.tag_Sequence, rCerts); } if (extensions != null) extensions.encode(tmp, isExplicit); seq.write(DerValue.tag_Sequence, tmp); tbsCertList = seq.toByteArray(); out.write(tbsCertList); } catch (IOException e) { throw new CRLException("Encoding error: " + e.getMessage()); } } /** * Verifies that this CRL was signed using the * private key that corresponds to the given public key. * * @param key the PublicKey used to carry out the verification. * * @exception NoSuchAlgorithmException on unsupported signature * algorithms. * @exception InvalidKeyException on incorrect key. * @exception NoSuchProviderException if there's no default provider. * @exception SignatureException on signature errors. * @exception CRLException on encoding errors. */ public void verify(PublicKey key) throws CRLException, NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, SignatureException { verify(key, ""); } /** * Verifies that this CRL was signed using the * private key that corresponds to the given public key, * and that the signature verification was computed by * the given provider. * * @param key the PublicKey used to carry out the verification. * @param sigProvider the name of the signature provider. * * @exception NoSuchAlgorithmException on unsupported signature * algorithms. * @exception InvalidKeyException on incorrect key. * @exception NoSuchProviderException on incorrect provider. * @exception SignatureException on signature errors. * @exception CRLException on encoding errors. */ public synchronized void verify(PublicKey key, String sigProvider) throws CRLException, NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, SignatureException { if (sigProvider == null) { sigProvider = ""; } if ((verifiedPublicKey != null) && verifiedPublicKey.equals(key)) { // this CRL has already been successfully verified using // this public key. Make sure providers match, too. if (sigProvider.equals(verifiedProvider)) { return; } } if (signedCRL == null) { throw new CRLException("Uninitialized CRL"); } Signature sigVerf = null; if (sigProvider.length() == 0) { sigVerf = Signature.getInstance(sigAlgId.getName()); } else { sigVerf = Signature.getInstance(sigAlgId.getName(), sigProvider); } sigVerf.initVerify(key); if (tbsCertList == null) { throw new CRLException("Uninitialized CRL"); } sigVerf.update(tbsCertList, 0, tbsCertList.length); if (!sigVerf.verify(signature)) { throw new SignatureException("Signature does not match."); } verifiedPublicKey = key; verifiedProvider = sigProvider; } /** * Encodes an X.509 CRL, and signs it using the given key. * * @param key the private key used for signing. * @param algorithm the name of the signature algorithm used. * * @exception NoSuchAlgorithmException on unsupported signature * algorithms. * @exception InvalidKeyException on incorrect key. * @exception NoSuchProviderException on incorrect provider. * @exception SignatureException on signature errors. * @exception CRLException if any mandatory data was omitted. */ public void sign(PrivateKey key, String algorithm) throws CRLException, NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, SignatureException { sign(key, algorithm, null); } /** * Encodes an X.509 CRL, and signs it using the given key. * * @param key the private key used for signing. * @param algorithm the name of the signature algorithm used. * @param provider the name of the provider. * * @exception NoSuchAlgorithmException on unsupported signature * algorithms. * @exception InvalidKeyException on incorrect key. * @exception NoSuchProviderException on incorrect provider. * @exception SignatureException on signature errors. * @exception CRLException if any mandatory data was omitted. */ public void sign(PrivateKey key, String algorithm, String provider) throws CRLException, NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, SignatureException { try { if (readOnly) throw new CRLException("cannot over-write existing CRL"); Signature sigEngine = null; if ((provider == null) || (provider.length() == 0)) sigEngine = Signature.getInstance(algorithm); else sigEngine = Signature.getInstance(algorithm, provider); sigEngine.initSign(key); // in case the name is reset sigAlgId = AlgorithmId.get(sigEngine.getAlgorithm()); infoSigAlgId = sigAlgId; DerOutputStream out = new DerOutputStream(); DerOutputStream tmp = new DerOutputStream(); // encode crl info encodeInfo(tmp); // encode algorithm identifier sigAlgId.encode(tmp); // Create and encode the signature itself. sigEngine.update(tbsCertList, 0, tbsCertList.length); signature = sigEngine.sign(); tmp.putBitString(signature); // Wrap the signed data in a SEQUENCE { data, algorithm, sig } out.write(DerValue.tag_Sequence, tmp); signedCRL = out.toByteArray(); readOnly = true; } catch (IOException e) { throw new CRLException("Error while encoding data: " + e.getMessage()); } } /** * Returns a printable string of this CRL. * * @return value of this CRL in a printable form. */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("X.509 CRL v" + (version+1) + "\n"); if (sigAlgId != null) sb.append("Signature Algorithm: " + sigAlgId.toString() + ", OID=" + (sigAlgId.getOID()).toString() + "\n"); if (issuer != null) sb.append("Issuer: " + issuer.toString() + "\n"); if (thisUpdate != null) sb.append("\nThis Update: " + thisUpdate.toString() + "\n"); if (nextUpdate != null) sb.append("Next Update: " + nextUpdate.toString() + "\n"); if (revokedCerts.isEmpty()) sb.append("\nNO certificates have been revoked\n"); else { sb.append("\nRevoked Certificates: " + revokedCerts.size()); int i = 1; for (Iterator<X509CRLEntry> iter = revokedCerts.values().iterator(); iter.hasNext(); i++) sb.append("\n[" + i + "] " + iter.next().toString()); } if (extensions != null) { Collection<Extension> allExts = extensions.getAllExtensions(); Object[] objs = allExts.toArray(); sb.append("\nCRL Extensions: " + objs.length); for (int i = 0; i < objs.length; i++) { sb.append("\n[" + (i+1) + "]: "); Extension ext = (Extension)objs[i]; try { if (OIDMap.getClass(ext.getExtensionId()) == null) { sb.append(ext.toString()); byte[] extValue = ext.getExtensionValue(); if (extValue != null) { DerOutputStream out = new DerOutputStream(); out.putOctetString(extValue); extValue = out.toByteArray(); HexDumpEncoder enc = new HexDumpEncoder(); sb.append("Extension unknown: " + "DER encoded OCTET string =\n" + enc.encodeBuffer(extValue) + "\n"); } } else sb.append(ext.toString()); // sub-class exists } catch (Exception e) { sb.append(", Error parsing this extension"); } } } if (signature != null) { HexDumpEncoder encoder = new HexDumpEncoder(); sb.append("\nSignature:\n" + encoder.encodeBuffer(signature) + "\n"); } else sb.append("NOT signed yet\n"); return sb.toString(); } /** * Checks whether the given certificate is on this CRL. * * @param cert the certificate to check for. * @return true if the given certificate is on this CRL, * false otherwise. */ public boolean isRevoked(Certificate cert) { if (revokedCerts.isEmpty() || (!(cert instanceof X509Certificate))) { return false; } X509Certificate xcert = (X509Certificate) cert; X509IssuerSerial issuerSerial = new X509IssuerSerial(xcert); return revokedCerts.containsKey(issuerSerial); } /** * Gets the version number from this CRL. * The ASN.1 definition for this is: * <pre> * Version ::= INTEGER { v1(0), v2(1), v3(2) } * -- v3 does not apply to CRLs but appears for consistency * -- with definition of Version for certs * </pre> * @return the version number, i.e. 1 or 2. */ public int getVersion() { return version+1; } /** * Gets the issuer distinguished name from this CRL. * The issuer name identifies the entity who has signed (and * issued the CRL). The issuer name field contains an * X.500 distinguished name (DN). * The ASN.1 definition for this is: * <pre> * issuer Name * * Name ::= CHOICE { RDNSequence } * RDNSequence ::= SEQUENCE OF RelativeDistinguishedName * RelativeDistinguishedName ::= * SET OF AttributeValueAssertion * * AttributeValueAssertion ::= SEQUENCE { * AttributeType, * AttributeValue } * AttributeType ::= OBJECT IDENTIFIER * AttributeValue ::= ANY * </pre> * The Name describes a hierarchical name composed of attributes, * such as country name, and corresponding values, such as US. * The type of the component AttributeValue is determined by the * AttributeType; in general it will be a directoryString. * A directoryString is usually one of PrintableString, * TeletexString or UniversalString. * @return the issuer name. */ public Principal getIssuerDN() { return (Principal)issuer; } /** * Return the issuer as X500Principal. Overrides method in X509CRL * to provide a slightly more efficient version. */ public X500Principal getIssuerX500Principal() { if (issuerPrincipal == null) { issuerPrincipal = issuer.asX500Principal(); } return issuerPrincipal; } /** * Gets the thisUpdate date from the CRL. * The ASN.1 definition for this is: * * @return the thisUpdate date from the CRL. */ public Date getThisUpdate() { return (new Date(thisUpdate.getTime())); } /** * Gets the nextUpdate date from the CRL. * * @return the nextUpdate date from the CRL, or null if * not present. */ public Date getNextUpdate() { if (nextUpdate == null) return null; return (new Date(nextUpdate.getTime())); } /** * Gets the CRL entry with the given serial number from this CRL. * * @return the entry with the given serial number, or <code>null</code> if * no such entry exists in the CRL. * @see X509CRLEntry */ public X509CRLEntry getRevokedCertificate(BigInteger serialNumber) { if (revokedCerts.isEmpty()) { return null; } // assume this is a direct CRL entry (cert and CRL issuer are the same) X509IssuerSerial issuerSerial = new X509IssuerSerial (getIssuerX500Principal(), serialNumber); return revokedCerts.get(issuerSerial); } /** * Gets the CRL entry for the given certificate. */ public X509CRLEntry getRevokedCertificate(X509Certificate cert) { if (revokedCerts.isEmpty()) { return null; } X509IssuerSerial issuerSerial = new X509IssuerSerial(cert); return revokedCerts.get(issuerSerial); } /** * Gets all the revoked certificates from the CRL. * A Set of X509CRLEntry. * * @return all the revoked certificates or <code>null</code> if there are * none. * @see X509CRLEntry */ public Set<X509CRLEntry> getRevokedCertificates() { if (revokedCerts.isEmpty()) { return null; } else { return new HashSet<X509CRLEntry>(revokedCerts.values()); } } /** * Gets the DER encoded CRL information, the * <code>tbsCertList</code> from this CRL. * This can be used to verify the signature independently. * * @return the DER encoded CRL information. * @exception CRLException on encoding errors. */ public byte[] getTBSCertList() throws CRLException { if (tbsCertList == null) throw new CRLException("Uninitialized CRL"); byte[] dup = new byte[tbsCertList.length]; System.arraycopy(tbsCertList, 0, dup, 0, dup.length); return dup; } /** * Gets the raw Signature bits from the CRL. * * @return the signature. */ public byte[] getSignature() { if (signature == null) return null; byte[] dup = new byte[signature.length]; System.arraycopy(signature, 0, dup, 0, dup.length); return dup; } /** * Gets the signature algorithm name for the CRL * signature algorithm. For example, the string "SHA1withDSA". * The ASN.1 definition for this is: * <pre> * AlgorithmIdentifier ::= SEQUENCE { * algorithm OBJECT IDENTIFIER, * parameters ANY DEFINED BY algorithm OPTIONAL } * -- contains a value of the type * -- registered for use with the * -- algorithm object identifier value * </pre> * * @return the signature algorithm name. */ public String getSigAlgName() { if (sigAlgId == null) return null; return sigAlgId.getName(); } /** * Gets the signature algorithm OID string from the CRL. * An OID is represented by a set of positive whole number separated * by ".", that means,<br> * &lt;positive whole number&gt;.&lt;positive whole number&gt;.&lt;...&gt; * For example, the string "1.2.840.10040.4.3" identifies the SHA-1 * with DSA signature algorithm defined in * <a href="http://www.ietf.org/rfc/rfc3279.txt">RFC 3279: Algorithms and * Identifiers for the Internet X.509 Public Key Infrastructure Certificate * and CRL Profile</a>. * * @return the signature algorithm oid string. */ public String getSigAlgOID() { if (sigAlgId == null) return null; ObjectIdentifier oid = sigAlgId.getOID(); return oid.toString(); } /** * Gets the DER encoded signature algorithm parameters from this * CRL's signature algorithm. In most cases, the signature * algorithm parameters are null, the parameters are usually * supplied with the Public Key. * * @return the DER encoded signature algorithm parameters, or * null if no parameters are present. */ public byte[] getSigAlgParams() { if (sigAlgId == null) return null; try { return sigAlgId.getEncodedParams(); } catch (IOException e) { return null; } } /** * return the AuthorityKeyIdentifier, if any. * * @returns AuthorityKeyIdentifier or null * (if no AuthorityKeyIdentifierExtension) * @throws IOException on error */ public KeyIdentifier getAuthKeyId() throws IOException { AuthorityKeyIdentifierExtension aki = getAuthKeyIdExtension(); if (aki != null) { KeyIdentifier keyId = (KeyIdentifier)aki.get(aki.KEY_ID); return keyId; } else { return null; } } /** * return the AuthorityKeyIdentifierExtension, if any. * * @returns AuthorityKeyIdentifierExtension or null (if no such extension) * @throws IOException on error */ public AuthorityKeyIdentifierExtension getAuthKeyIdExtension() throws IOException { Object obj = getExtension(PKIXExtensions.AuthorityKey_Id); return (AuthorityKeyIdentifierExtension)obj; } /** * return the CRLNumberExtension, if any. * * @returns CRLNumberExtension or null (if no such extension) * @throws IOException on error */ public CRLNumberExtension getCRLNumberExtension() throws IOException { Object obj = getExtension(PKIXExtensions.CRLNumber_Id); return (CRLNumberExtension)obj; } /** * return the CRL number from the CRLNumberExtension, if any. * * @returns number or null (if no such extension) * @throws IOException on error */ public BigInteger getCRLNumber() throws IOException { CRLNumberExtension numExt = getCRLNumberExtension(); if (numExt != null) { BigInteger num = (BigInteger)numExt.get(numExt.NUMBER); return num; } else { return null; } } /** * return the DeltaCRLIndicatorExtension, if any. * * @returns DeltaCRLIndicatorExtension or null (if no such extension) * @throws IOException on error */ public DeltaCRLIndicatorExtension getDeltaCRLIndicatorExtension() throws IOException { Object obj = getExtension(PKIXExtensions.DeltaCRLIndicator_Id); return (DeltaCRLIndicatorExtension)obj; } /** * return the base CRL number from the DeltaCRLIndicatorExtension, if any. * * @returns number or null (if no such extension) * @throws IOException on error */ public BigInteger getBaseCRLNumber() throws IOException { DeltaCRLIndicatorExtension dciExt = getDeltaCRLIndicatorExtension(); if (dciExt != null) { BigInteger num = (BigInteger)dciExt.get(dciExt.NUMBER); return num; } else { return null; } } /** * return the IssuerAlternativeNameExtension, if any. * * @returns IssuerAlternativeNameExtension or null (if no such extension) * @throws IOException on error */ public IssuerAlternativeNameExtension getIssuerAltNameExtension() throws IOException { Object obj = getExtension(PKIXExtensions.IssuerAlternativeName_Id); return (IssuerAlternativeNameExtension)obj; } /** * return the IssuingDistributionPointExtension, if any. * * @returns IssuingDistributionPointExtension or null * (if no such extension) * @throws IOException on error */ public IssuingDistributionPointExtension getIssuingDistributionPointExtension() throws IOException { Object obj = getExtension(PKIXExtensions.IssuingDistributionPoint_Id); return (IssuingDistributionPointExtension) obj; } /** * Return true if a critical extension is found that is * not supported, otherwise return false. */ public boolean hasUnsupportedCriticalExtension() { if (extensions == null) return false; return extensions.hasUnsupportedCriticalExtension(); } /** * Gets a Set of the extension(s) marked CRITICAL in the * CRL. In the returned set, each extension is represented by * its OID string. * * @return a set of the extension oid strings in the * CRL that are marked critical. */ public Set<String> getCriticalExtensionOIDs() { if (extensions == null) { return null; } Set<String> extSet = new HashSet<String>(); for (Extension ex : extensions.getAllExtensions()) { if (ex.isCritical()) { extSet.add(ex.getExtensionId().toString()); } } return extSet; } /** * Gets a Set of the extension(s) marked NON-CRITICAL in the * CRL. In the returned set, each extension is represented by * its OID string. * * @return a set of the extension oid strings in the * CRL that are NOT marked critical. */ public Set<String> getNonCriticalExtensionOIDs() { if (extensions == null) { return null; } Set<String> extSet = new HashSet<String>(); for (Extension ex : extensions.getAllExtensions()) { if (!ex.isCritical()) { extSet.add(ex.getExtensionId().toString()); } } return extSet; } /** * Gets the DER encoded OCTET string for the extension value * (<code>extnValue</code>) identified by the passed in oid String. * The <code>oid</code> string is * represented by a set of positive whole number separated * by ".", that means,<br> * &lt;positive whole number&gt;.&lt;positive whole number&gt;.&lt;...&gt; * * @param oid the Object Identifier value for the extension. * @return the der encoded octet string of the extension value. */ public byte[] getExtensionValue(String oid) { if (extensions == null) return null; try { String extAlias = OIDMap.getName(new ObjectIdentifier(oid)); Extension crlExt = null; if (extAlias == null) { // may be unknown ObjectIdentifier findOID = new ObjectIdentifier(oid); Extension ex = null; ObjectIdentifier inCertOID; for (Enumeration<Extension> e = extensions.getElements(); e.hasMoreElements();) { ex = e.nextElement(); inCertOID = ex.getExtensionId(); if (inCertOID.equals(findOID)) { crlExt = ex; break; } } } else crlExt = extensions.get(extAlias); if (crlExt == null) return null; byte[] extData = crlExt.getExtensionValue(); if (extData == null) return null; DerOutputStream out = new DerOutputStream(); out.putOctetString(extData); return out.toByteArray(); } catch (Exception e) { return null; } } /** * get an extension * * @param oid ObjectIdentifier of extension desired * @returns Object of type <extension> or null, if not found * @throws IOException on error */ public Object getExtension(ObjectIdentifier oid) { if (extensions == null) return null; // XXX Consider cloning this return extensions.get(OIDMap.getName(oid)); } /* * Parses an X.509 CRL, should be used only by constructors. */ private void parse(DerValue val) throws CRLException, IOException { // check if can over write the certificate if (readOnly) throw new CRLException("cannot over-write existing CRL"); if ( val.getData() == null || val.tag != DerValue.tag_Sequence) throw new CRLException("Invalid DER-encoded CRL data"); signedCRL = val.toByteArray(); DerValue seq[] = new DerValue[3]; seq[0] = val.data.getDerValue(); seq[1] = val.data.getDerValue(); seq[2] = val.data.getDerValue(); if (val.data.available() != 0) throw new CRLException("signed overrun, bytes = " + val.data.available()); if (seq[0].tag != DerValue.tag_Sequence) throw new CRLException("signed CRL fields invalid"); sigAlgId = AlgorithmId.parse(seq[1]); signature = seq[2].getBitString(); if (seq[1].data.available() != 0) throw new CRLException("AlgorithmId field overrun"); if (seq[2].data.available() != 0) throw new CRLException("Signature field overrun"); // the tbsCertsList tbsCertList = seq[0].toByteArray(); // parse the information DerInputStream derStrm = seq[0].data; DerValue tmp; byte nextByte; // version (optional if v1) version = 0; // by default, version = v1 == 0 nextByte = (byte)derStrm.peekByte(); if (nextByte == DerValue.tag_Integer) { version = derStrm.getInteger(); if (version != 1) // i.e. v2 throw new CRLException("Invalid version"); } tmp = derStrm.getDerValue(); // signature AlgorithmId tmpId = AlgorithmId.parse(tmp); // the "inner" and "outer" signature algorithms must match if (! tmpId.equals(sigAlgId)) throw new CRLException("Signature algorithm mismatch"); infoSigAlgId = tmpId; // issuer issuer = new X500Name(derStrm); if (issuer.isEmpty()) { throw new CRLException("Empty issuer DN not allowed in X509CRLs"); } // thisUpdate // check if UTCTime encoded or GeneralizedTime nextByte = (byte)derStrm.peekByte(); if (nextByte == DerValue.tag_UtcTime) { thisUpdate = derStrm.getUTCTime(); } else if (nextByte == DerValue.tag_GeneralizedTime) { thisUpdate = derStrm.getGeneralizedTime(); } else { throw new CRLException("Invalid encoding for thisUpdate" + " (tag=" + nextByte + ")"); } if (derStrm.available() == 0) return; // done parsing no more optional fields present // nextUpdate (optional) nextByte = (byte)derStrm.peekByte(); if (nextByte == DerValue.tag_UtcTime) { nextUpdate = derStrm.getUTCTime(); } else if (nextByte == DerValue.tag_GeneralizedTime) { nextUpdate = derStrm.getGeneralizedTime(); } // else it is not present if (derStrm.available() == 0) return; // done parsing no more optional fields present // revokedCertificates (optional) nextByte = (byte)derStrm.peekByte(); if ((nextByte == DerValue.tag_SequenceOf) && (! ((nextByte & 0x0c0) == 0x080))) { DerValue[] badCerts = derStrm.getSequence(4); X500Principal crlIssuer = getIssuerX500Principal(); X500Principal badCertIssuer = crlIssuer; for (int i = 0; i < badCerts.length; i++) { X509CRLEntryImpl entry = new X509CRLEntryImpl(badCerts[i]); badCertIssuer = getCertIssuer(entry, badCertIssuer); entry.setCertificateIssuer(crlIssuer, badCertIssuer); X509IssuerSerial issuerSerial = new X509IssuerSerial (badCertIssuer, entry.getSerialNumber()); revokedCerts.put(issuerSerial, entry); } } if (derStrm.available() == 0) return; // done parsing no extensions // crlExtensions (optional) tmp = derStrm.getDerValue(); if (tmp.isConstructed() && tmp.isContextSpecific((byte)0)) { extensions = new CRLExtensions(tmp.data); } readOnly = true; } /** * Extract the issuer X500Principal from an X509CRL. Parses the encoded * form of the CRL to preserve the principal's ASN.1 encoding. * * Called by java.security.cert.X509CRL.getIssuerX500Principal(). */ public static X500Principal getIssuerX500Principal(X509CRL crl) { try { byte[] encoded = crl.getEncoded(); DerInputStream derIn = new DerInputStream(encoded); DerValue tbsCert = derIn.getSequence(3)[0]; DerInputStream tbsIn = tbsCert.data; DerValue tmp; // skip version number if present byte nextByte = (byte)tbsIn.peekByte(); if (nextByte == DerValue.tag_Integer) { tmp = tbsIn.getDerValue(); } tmp = tbsIn.getDerValue(); // skip signature tmp = tbsIn.getDerValue(); // issuer byte[] principalBytes = tmp.toByteArray(); return new X500Principal(principalBytes); } catch (Exception e) { throw new RuntimeException("Could not parse issuer", e); } } /** * Returned the encoding of the given certificate for internal use. * Callers must guarantee that they neither modify it nor expose it * to untrusted code. Uses getEncodedInternal() if the certificate * is instance of X509CertImpl, getEncoded() otherwise. */ public static byte[] getEncodedInternal(X509CRL crl) throws CRLException { if (crl instanceof X509CRLImpl) { return ((X509CRLImpl)crl).getEncodedInternal(); } else { return crl.getEncoded(); } } /** * Utility method to convert an arbitrary instance of X509CRL * to a X509CRLImpl. Does a cast if possible, otherwise reparses * the encoding. */ public static X509CRLImpl toImpl(X509CRL crl) throws CRLException { if (crl instanceof X509CRLImpl) { return (X509CRLImpl)crl; } else { return intern(crl); } } /** * Return an interned X509CRLImpl for the given certificate. * For more information, see intern(X509Certificate). */ public static synchronized X509CRLImpl intern(X509CRL c) throws CRLException { if (c == null) { return null; } boolean isImpl = c instanceof X509CRLImpl; byte[] encoding; if (isImpl) { encoding = ((X509CRLImpl)c).getEncodedInternal(); } else { encoding = c.getEncoded(); } X509CRLImpl newC; if (isImpl) { newC = (X509CRLImpl)c; } else { newC = new X509CRLImpl(encoding); encoding = newC.getEncodedInternal(); } return newC; } /** * Returns the X500 certificate issuer DN of a CRL entry. * * @param entry the entry to check * @param prevCertIssuer the previous entry's certificate issuer * @return the X500Principal in a CertificateIssuerExtension, or * prevCertIssuer if it does not exist */ private X500Principal getCertIssuer(X509CRLEntryImpl entry, X500Principal prevCertIssuer) throws IOException { CertificateIssuerExtension ciExt = entry.getCertificateIssuerExtension(); if (ciExt != null) { GeneralNames names = (GeneralNames) ciExt.get(CertificateIssuerExtension.ISSUER); X500Name issuerDN = (X500Name) names.get(0).getName(); return issuerDN.asX500Principal(); } else { return prevCertIssuer; } } /** * Immutable X.509 Certificate Issuer DN and serial number pair */ private final static class X509IssuerSerial { final X500Principal issuer; final BigInteger serial; volatile int hashcode = 0; /** * Create an X509IssuerSerial. * * @param issuer the issuer DN * @param serial the serial number */ X509IssuerSerial(X500Principal issuer, BigInteger serial) { this.issuer = issuer; this.serial = serial; } /** * Construct an X509IssuerSerial from an X509Certificate. */ X509IssuerSerial(X509Certificate cert) { this(cert.getIssuerX500Principal(), cert.getSerialNumber()); } /** * Returns the issuer. * * @return the issuer */ X500Principal getIssuer() { return issuer; } /** * Returns the serial number. * * @return the serial number */ BigInteger getSerial() { return serial; } /** * Compares this X509Serial with another and returns true if they * are equivalent. * * @param o the other object to compare with * @return true if equal, false otherwise */ public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof X509IssuerSerial)) { return false; } X509IssuerSerial other = (X509IssuerSerial) o; if (serial.equals(other.getSerial()) && issuer.equals(other.getIssuer())) { return true; } return false; } /** * Returns a hash code value for this X509IssuerSerial. * * @return the hash code value */ public int hashCode() { if (hashcode == 0) { int result = 17; result = 37*result + issuer.hashCode(); result = 37*result + serial.hashCode(); hashcode = result; } return hashcode; } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.synapse.v2019_06_01_preview.implementation; import com.microsoft.azure.management.synapse.v2019_06_01_preview.SqlPool; import com.microsoft.azure.arm.model.implementation.CreatableUpdatableImpl; import rx.Observable; import com.microsoft.azure.management.synapse.v2019_06_01_preview.SqlPoolPatchInfo; import java.util.Map; import org.joda.time.DateTime; import com.microsoft.azure.management.synapse.v2019_06_01_preview.Sku; import rx.functions.Func1; class SqlPoolImpl extends CreatableUpdatableImpl<SqlPool, SqlPoolInner, SqlPoolImpl> implements SqlPool, SqlPool.Definition, SqlPool.Update { private final SynapseManager manager; private String resourceGroupName; private String workspaceName; private String sqlPoolName; private SqlPoolPatchInfo updateParameter; SqlPoolImpl(String name, SynapseManager manager) { super(name, new SqlPoolInner()); this.manager = manager; // Set resource name this.sqlPoolName = name; // this.updateParameter = new SqlPoolPatchInfo(); } SqlPoolImpl(SqlPoolInner inner, SynapseManager manager) { super(inner.name(), inner); this.manager = manager; // Set resource name this.sqlPoolName = inner.name(); // set resource ancestor and positional variables this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups"); this.workspaceName = IdParsingUtils.getValueFromIdByName(inner.id(), "workspaces"); this.sqlPoolName = IdParsingUtils.getValueFromIdByName(inner.id(), "sqlPools"); // this.updateParameter = new SqlPoolPatchInfo(); } @Override public SynapseManager manager() { return this.manager; } @Override public Observable<SqlPool> createResourceAsync() { SqlPoolsInner client = this.manager().inner().sqlPools(); return client.createAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.inner()) .map(new Func1<SqlPoolInner, SqlPoolInner>() { @Override public SqlPoolInner call(SqlPoolInner resource) { resetCreateUpdateParameters(); return resource; } }) .map(innerToFluentMap(this)); } @Override public Observable<SqlPool> updateResourceAsync() { SqlPoolsInner client = this.manager().inner().sqlPools(); return client.updateAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName, this.updateParameter) .map(new Func1<SqlPoolInner, SqlPoolInner>() { @Override public SqlPoolInner call(SqlPoolInner resource) { resetCreateUpdateParameters(); return resource; } }) .map(innerToFluentMap(this)); } @Override protected Observable<SqlPoolInner> getInnerAsync() { SqlPoolsInner client = this.manager().inner().sqlPools(); return client.getAsync(this.resourceGroupName, this.workspaceName, this.sqlPoolName); } @Override public boolean isInCreateMode() { return this.inner().id() == null; } private void resetCreateUpdateParameters() { this.updateParameter = new SqlPoolPatchInfo(); } @Override public String collation() { return this.inner().collation(); } @Override public String createMode() { return this.inner().createMode(); } @Override public DateTime creationDate() { return this.inner().creationDate(); } @Override public String id() { return this.inner().id(); } @Override public String location() { return this.inner().location(); } @Override public Long maxSizeBytes() { return this.inner().maxSizeBytes(); } @Override public String name() { return this.inner().name(); } @Override public String provisioningState() { return this.inner().provisioningState(); } @Override public String recoverableDatabaseId() { return this.inner().recoverableDatabaseId(); } @Override public DateTime restorePointInTime() { return this.inner().restorePointInTime(); } @Override public Sku sku() { return this.inner().sku(); } @Override public String sourceDatabaseId() { return this.inner().sourceDatabaseId(); } @Override public String status() { return this.inner().status(); } @Override public Map<String, String> tags() { return this.inner().getTags(); } @Override public String type() { return this.inner().type(); } @Override public SqlPoolImpl withExistingWorkspace(String resourceGroupName, String workspaceName) { this.resourceGroupName = resourceGroupName; this.workspaceName = workspaceName; return this; } @Override public SqlPoolImpl withLocation(String location) { if (isInCreateMode()) { this.inner().withLocation(location); } else { this.updateParameter.withLocation(location); } return this; } @Override public SqlPoolImpl withCollation(String collation) { if (isInCreateMode()) { this.inner().withCollation(collation); } else { this.updateParameter.withCollation(collation); } return this; } @Override public SqlPoolImpl withCreateMode(String createMode) { if (isInCreateMode()) { this.inner().withCreateMode(createMode); } else { this.updateParameter.withCreateMode(createMode); } return this; } @Override public SqlPoolImpl withCreationDate(DateTime creationDate) { if (isInCreateMode()) { this.inner().withCreationDate(creationDate); } else { this.updateParameter.withCreationDate(creationDate); } return this; } @Override public SqlPoolImpl withMaxSizeBytes(Long maxSizeBytes) { if (isInCreateMode()) { this.inner().withMaxSizeBytes(maxSizeBytes); } else { this.updateParameter.withMaxSizeBytes(maxSizeBytes); } return this; } @Override public SqlPoolImpl withProvisioningState(String provisioningState) { if (isInCreateMode()) { this.inner().withProvisioningState(provisioningState); } else { this.updateParameter.withProvisioningState(provisioningState); } return this; } @Override public SqlPoolImpl withRecoverableDatabaseId(String recoverableDatabaseId) { if (isInCreateMode()) { this.inner().withRecoverableDatabaseId(recoverableDatabaseId); } else { this.updateParameter.withRecoverableDatabaseId(recoverableDatabaseId); } return this; } @Override public SqlPoolImpl withRestorePointInTime(DateTime restorePointInTime) { if (isInCreateMode()) { this.inner().withRestorePointInTime(restorePointInTime); } else { this.updateParameter.withRestorePointInTime(restorePointInTime); } return this; } @Override public SqlPoolImpl withSku(Sku sku) { if (isInCreateMode()) { this.inner().withSku(sku); } else { this.updateParameter.withSku(sku); } return this; } @Override public SqlPoolImpl withSourceDatabaseId(String sourceDatabaseId) { if (isInCreateMode()) { this.inner().withSourceDatabaseId(sourceDatabaseId); } else { this.updateParameter.withSourceDatabaseId(sourceDatabaseId); } return this; } @Override public SqlPoolImpl withStatus(String status) { if (isInCreateMode()) { this.inner().withStatus(status); } else { this.updateParameter.withStatus(status); } return this; } @Override public SqlPoolImpl withTags(Map<String, String> tags) { if (isInCreateMode()) { this.inner().withTags(tags); } else { this.updateParameter.withTags(tags); } return this; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.common.flogger.GoogleLogger; import com.google.common.io.BaseEncoding; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionInputMap; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.Artifact.ArchivedTreeArtifact; import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact; import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact; import com.google.devtools.build.lib.actions.ArtifactPathResolver; import com.google.devtools.build.lib.actions.FileArtifactValue; import com.google.devtools.build.lib.actions.FileStateType; import com.google.devtools.build.lib.actions.FileStateValue; import com.google.devtools.build.lib.actions.FilesetManifest; import com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.cache.MetadataHandler; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.DigestUtils; import com.google.devtools.build.lib.vfs.Dirent; import com.google.devtools.build.lib.vfs.FileStatus; import com.google.devtools.build.lib.vfs.FileStatusWithDigest; import com.google.devtools.build.lib.vfs.FileStatusWithDigestAdapter; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.Symlinks; import java.io.FileNotFoundException; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import javax.annotation.Nullable; /** * Handler provided by {@link ActionExecutionFunction} which allows the execution engine to obtain * {@linkplain FileArtifactValue metadata} about inputs and outputs and to store metadata about an * action's outputs for purposes of creating the final {@link ActionExecutionValue}. * * <p>The handler can be in one of two modes. After construction, it acts as a cache for input and * output metadata while {@link ActionCacheChecker} determines whether the action needs to be * executed. If the action needs to be executed (i.e. no action cache hit), {@link * #prepareForActionExecution} is called. This call switches the handler to a mode where it accepts * {@linkplain MetadataInjector injected output data}, or otherwise obtains metadata from the * filesystem. Freshly created output files are set read-only and executable <em>before</em> * statting them to ensure that the stat's ctime is up to date. * * <p>After action execution, {@link #getMetadata} should be called on each of the action's outputs * (except those that were {@linkplain #artifactOmitted omitted}) to ensure that declared outputs * were in fact created and are valid. */ final class ActionMetadataHandler implements MetadataHandler { private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); /** * Creates a new metadata handler. * * <p>If the handler is for use during input discovery, calling {@link #getMetadata} with an * artifact which is neither in {@code inputArtifactData} nor {@code outputs} is tolerated and * will return {@code null}. To subsequently transform the handler for regular action execution * (where such a call is not permitted), use {@link #transformAfterInputDiscovery}. */ static ActionMetadataHandler create( ActionInputMap inputArtifactData, boolean forInputDiscovery, boolean archivedTreeArtifactsEnabled, ImmutableSet<Artifact> outputs, TimestampGranularityMonitor tsgm, ArtifactPathResolver artifactPathResolver, PathFragment execRoot, PathFragment derivedPathPrefix, Map<Artifact, ImmutableList<FilesetOutputSymlink>> expandedFilesets) { return new ActionMetadataHandler( inputArtifactData, forInputDiscovery, archivedTreeArtifactsEnabled, outputs, tsgm, artifactPathResolver, execRoot, derivedPathPrefix, createFilesetMapping(expandedFilesets, execRoot), new OutputStore()); } private final ActionInputMap inputArtifactData; private final boolean forInputDiscovery; private final boolean archivedTreeArtifactsEnabled; private final ImmutableMap<PathFragment, FileArtifactValue> filesetMapping; private final Set<Artifact> omittedOutputs = Sets.newConcurrentHashSet(); private final ImmutableSet<Artifact> outputs; private final TimestampGranularityMonitor tsgm; private final ArtifactPathResolver artifactPathResolver; private final PathFragment execRoot; private final PathFragment derivedPathPrefix; private final AtomicBoolean executionMode = new AtomicBoolean(false); private final OutputStore store; private ActionMetadataHandler( ActionInputMap inputArtifactData, boolean forInputDiscovery, boolean archivedTreeArtifactsEnabled, ImmutableSet<Artifact> outputs, TimestampGranularityMonitor tsgm, ArtifactPathResolver artifactPathResolver, PathFragment execRoot, PathFragment derivedPathPrefix, ImmutableMap<PathFragment, FileArtifactValue> filesetMapping, OutputStore store) { this.inputArtifactData = checkNotNull(inputArtifactData); this.forInputDiscovery = forInputDiscovery; this.archivedTreeArtifactsEnabled = archivedTreeArtifactsEnabled; this.outputs = checkNotNull(outputs); this.tsgm = checkNotNull(tsgm); this.artifactPathResolver = checkNotNull(artifactPathResolver); this.execRoot = checkNotNull(execRoot); this.derivedPathPrefix = checkNotNull(derivedPathPrefix); this.filesetMapping = checkNotNull(filesetMapping); this.store = checkNotNull(store); } /** * Returns a new handler mostly identical to this one, except uses the given {@code store} and * does not permit {@link #getMetadata} to be called with an artifact which is neither in inputs * nor outputs. * * <p>The returned handler will be in the mode for action cache checking. To prepare it for action * execution, call {@link #prepareForActionExecution}. * * <p>This method is designed to be called after input discovery when a fresh handler is needed * but all of the parameters in {@link #create} would be the same as the original handler. */ ActionMetadataHandler transformAfterInputDiscovery(OutputStore store) { return new ActionMetadataHandler( inputArtifactData, /*forInputDiscovery=*/ false, archivedTreeArtifactsEnabled, outputs, tsgm, artifactPathResolver, execRoot, derivedPathPrefix, filesetMapping, store); } /** * If {@code value} represents an existing file, returns it as is, otherwise throws {@link * FileNotFoundException}. */ private static FileArtifactValue checkExists(FileArtifactValue value, Artifact artifact) throws FileNotFoundException { if (FileArtifactValue.MISSING_FILE_MARKER.equals(value) || FileArtifactValue.OMITTED_FILE_MARKER.equals(value)) { throw new FileNotFoundException(artifact + " does not exist"); } return checkNotNull(value, artifact); } /** * If {@code value} represents an existing tree artifact, returns it as is, otherwise throws * {@link FileNotFoundException}. */ private static TreeArtifactValue checkExists(TreeArtifactValue value, Artifact artifact) throws FileNotFoundException { if (TreeArtifactValue.MISSING_TREE_ARTIFACT.equals(value) || TreeArtifactValue.OMITTED_TREE_MARKER.equals(value)) { throw new FileNotFoundException(artifact + " does not exist"); } return checkNotNull(value, artifact); } private static ImmutableMap<PathFragment, FileArtifactValue> createFilesetMapping( Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesets, PathFragment execRoot) { Map<PathFragment, FileArtifactValue> filesetMap = new HashMap<>(); for (Map.Entry<Artifact, ImmutableList<FilesetOutputSymlink>> entry : filesets.entrySet()) { try { FilesetManifest fileset = FilesetManifest.constructFilesetManifest( entry.getValue(), execRoot, RelativeSymlinkBehavior.RESOLVE); for (Map.Entry<String, FileArtifactValue> favEntry : fileset.getArtifactValues().entrySet()) { if (favEntry.getValue().getDigest() != null) { filesetMap.put(PathFragment.create(favEntry.getKey()), favEntry.getValue()); } } } catch (IOException e) { // If we cannot get the FileArtifactValue, then we will make a FileSystem call to get the // digest, so it is okay to skip and continue here. logger.atWarning().withCause(e).log( "Could not properly get digest for %s", entry.getKey().getExecPath()); } } return ImmutableMap.copyOf(filesetMap); } private boolean isKnownOutput(Artifact artifact) { return outputs.contains(artifact) || (artifact.hasParent() && outputs.contains(artifact.getParent())); } @Override @Nullable public FileArtifactValue getMetadata(ActionInput actionInput) throws IOException { if (!(actionInput instanceof Artifact)) { PathFragment inputPath = actionInput.getExecPath(); PathFragment filesetKeyPath = inputPath.startsWith(execRoot) ? inputPath.relativeTo(execRoot) : inputPath; return filesetMapping.get(filesetKeyPath); } Artifact artifact = (Artifact) actionInput; FileArtifactValue value; if (!isKnownOutput(artifact)) { value = inputArtifactData.getMetadata(artifact); if (value != null) { return checkExists(value, artifact); } checkState(forInputDiscovery, "%s is not present in declared outputs: %s", artifact, outputs); return null; } if (artifact.isMiddlemanArtifact()) { // A middleman artifact's data was either already injected from the action cache checker using // #setDigestForVirtualArtifact, or it has the default middleman value. value = store.getArtifactData(artifact); if (value != null) { return checkExists(value, artifact); } store.putArtifactData(artifact, FileArtifactValue.DEFAULT_MIDDLEMAN); return FileArtifactValue.DEFAULT_MIDDLEMAN; } if (artifact.isTreeArtifact()) { TreeArtifactValue tree = getTreeArtifactValue((SpecialArtifact) artifact); return tree.getMetadata(); } if (artifact.isChildOfDeclaredDirectory()) { TreeArtifactValue tree = getTreeArtifactValue(artifact.getParent()); value = tree.getChildValues().getOrDefault(artifact, FileArtifactValue.MISSING_FILE_MARKER); return checkExists(value, artifact); } value = store.getArtifactData(artifact); if (value != null) { return checkExists(value, artifact); } // No existing metadata; this can happen if the output metadata is not injected after a spawn // is executed. SkyframeActionExecutor.checkOutputs calls this method for every output file of // the action, which hits this code path. Another possibility is that an action runs multiple // spawns, and a subsequent spawn requests the metadata of an output of a previous spawn. // If necessary, we first call chmod the output file. The FileArtifactValue may use a // FileContentsProxy, which is based on ctime (affected by chmod). if (executionMode.get()) { setPathReadOnlyAndExecutableIfFile(artifactPathResolver.toPath(artifact)); } value = constructFileArtifactValueFromFilesystem(artifact); store.putArtifactData(artifact, value); return checkExists(value, artifact); } @Override public ActionInput getInput(String execPath) { return inputArtifactData.getInput(execPath); } @Override public void setDigestForVirtualArtifact(Artifact artifact, byte[] digest) { checkArgument(artifact.isMiddlemanArtifact(), artifact); checkNotNull(digest, artifact); store.putArtifactData(artifact, FileArtifactValue.createProxy(digest)); } private TreeArtifactValue getTreeArtifactValue(SpecialArtifact artifact) throws IOException { checkState(artifact.isTreeArtifact(), "%s is not a tree artifact", artifact); TreeArtifactValue value = store.getTreeArtifactData(artifact); if (value != null) { return checkExists(value, artifact); } value = constructTreeArtifactValueFromFilesystem(artifact); store.putTreeArtifactData(artifact, value); return checkExists(value, artifact); } private TreeArtifactValue constructTreeArtifactValueFromFilesystem(SpecialArtifact parent) throws IOException { Path treeDir = artifactPathResolver.toPath(parent); boolean chmod = executionMode.get(); // Make sure the tree artifact root is a regular directory. Note that this is how the action is // initialized, so this should hold unless the action itself has deleted the root. if (!treeDir.isDirectory(Symlinks.NOFOLLOW)) { if (chmod) { setPathReadOnlyAndExecutableIfFile(treeDir); } return TreeArtifactValue.MISSING_TREE_ARTIFACT; } if (chmod) { setPathReadOnlyAndExecutable(treeDir); } TreeArtifactValue.Builder tree = TreeArtifactValue.newBuilder(parent); TreeArtifactValue.visitTree( treeDir, (parentRelativePath, type) -> { if (chmod && type != Dirent.Type.SYMLINK) { setPathReadOnlyAndExecutable(treeDir.getRelative(parentRelativePath)); } if (type == Dirent.Type.DIRECTORY) { return; // The final TreeArtifactValue does not contain child directories. } TreeFileArtifact child = TreeFileArtifact.createTreeOutput(parent, parentRelativePath); FileArtifactValue metadata; try { metadata = constructFileArtifactValueFromFilesystem(child); } catch (FileNotFoundException e) { String errorMessage = String.format( "Failed to resolve relative path %s inside TreeArtifact %s. " + "The associated file is either missing or is an invalid symlink.", parentRelativePath, treeDir); throw new IOException(errorMessage, e); } tree.putChild(child, metadata); }); if (archivedTreeArtifactsEnabled) { ArchivedTreeArtifact archivedTreeArtifact = ArchivedTreeArtifact.create(parent, derivedPathPrefix); tree.setArchivedRepresentation( archivedTreeArtifact, constructFileArtifactValueFromFilesystem(archivedTreeArtifact)); } return tree.build(); } @Override public ImmutableSet<TreeFileArtifact> getTreeArtifactChildren(SpecialArtifact treeArtifact) { checkArgument(treeArtifact.isTreeArtifact(), "%s is not a tree artifact", treeArtifact); TreeArtifactValue tree = store.getTreeArtifactData(treeArtifact); return tree != null ? tree.getChildren() : ImmutableSet.of(); } @Override public FileArtifactValue constructMetadataForDigest( Artifact output, FileStatus statNoFollow, byte[] digest) throws IOException { checkArgument(!output.isSymlink(), "%s is a symlink", output); checkNotNull(digest, "Missing digest for %s", output); checkNotNull(statNoFollow, "Missing stat for %s", output); checkState( executionMode.get(), "Tried to construct metadata for %s outside of execution", output); // We already have a stat, so no need to call chmod. return constructFileArtifactValue( output, FileStatusWithDigestAdapter.adapt(statNoFollow), digest); } @Override public void injectFile(Artifact output, FileArtifactValue metadata) { checkArgument(isKnownOutput(output), "%s is not a declared output of this action", output); checkArgument( !output.isTreeArtifact() && !output.isChildOfDeclaredDirectory(), "Tree artifacts and their children must be injected via injectTree: %s", output); checkState(executionMode.get(), "Tried to inject %s outside of execution", output); store.putArtifactData(output, metadata); } @Override public void injectTree(SpecialArtifact output, TreeArtifactValue tree) { checkArgument(isKnownOutput(output), "%s is not a declared output of this action", output); checkArgument(output.isTreeArtifact(), "Output must be a tree artifact: %s", output); checkState(executionMode.get(), "Tried to inject %s outside of execution", output); checkArgument( archivedTreeArtifactsEnabled == tree.getArchivedRepresentation().isPresent(), "Archived representation presence mismatched for: %s with archivedTreeArtifactsEnabled: %s", tree, archivedTreeArtifactsEnabled); store.putTreeArtifactData(output, tree); } @Override public void markOmitted(Artifact output) { checkState(executionMode.get(), "Tried to mark %s omitted outside of execution", output); boolean newlyOmitted = omittedOutputs.add(output); if (output.isTreeArtifact()) { // Tolerate marking a tree artifact as omitted multiple times so that callers don't have to // deduplicate when a tree artifact has several omitted children. if (newlyOmitted) { store.putTreeArtifactData((SpecialArtifact) output, TreeArtifactValue.OMITTED_TREE_MARKER); } } else { checkState(newlyOmitted, "%s marked as omitted twice", output); store.putArtifactData(output, FileArtifactValue.OMITTED_FILE_MARKER); } } @Override public boolean artifactOmitted(Artifact artifact) { return omittedOutputs.contains(artifact); } @Override public void resetOutputs(Iterable<? extends Artifact> outputs) { checkState( executionMode.get(), "resetOutputs() should only be called from within a running action."); for (Artifact output : outputs) { omittedOutputs.remove(output); store.remove(output); } } /** * Informs this handler that the action is about to be executed. * * <p>Any stale metadata cached in the underlying {@link OutputStore} from action cache checking * is cleared. */ void prepareForActionExecution() { checkState(!executionMode.getAndSet(true), "Already in execution mode"); store.clear(); } /** * Returns the underlying {@link OutputStore} containing metadata cached during the lifetime of * this handler. * * <p>The store may be passed to {@link ActionExecutionValue#createFromOutputStore}. */ OutputStore getOutputStore() { return store; } /** Constructs a new {@link FileArtifactValue} by reading from the file system. */ private FileArtifactValue constructFileArtifactValueFromFilesystem(Artifact artifact) throws IOException { return constructFileArtifactValue(artifact, /*statNoFollow=*/ null, /*injectedDigest=*/ null); } /** Constructs a new {@link FileArtifactValue}, optionally taking a known stat and digest. */ private FileArtifactValue constructFileArtifactValue( Artifact artifact, @Nullable FileStatusWithDigest statNoFollow, @Nullable byte[] injectedDigest) throws IOException { checkState(!artifact.isTreeArtifact(), "%s is a tree artifact", artifact); FileArtifactValue value = fileArtifactValueFromArtifact( artifact, artifactPathResolver, statNoFollow, injectedDigest != null, // Prevent constant metadata artifacts from notifying the timestamp granularity monitor // and potentially delaying the build for no reason. artifact.isConstantMetadata() ? null : tsgm); // Ensure that we don't have both an injected digest and a digest from the filesystem. byte[] fileDigest = value.getDigest(); if (fileDigest != null && injectedDigest != null) { throw new IllegalStateException( String.format( "Digest %s was injected for artifact %s, but got %s from the filesystem (%s)", BaseEncoding.base16().encode(injectedDigest), artifact, BaseEncoding.base16().encode(fileDigest), value)); } FileStateType type = value.getType(); if (!type.exists()) { // Nonexistent files should only occur before executing an action. throw new FileNotFoundException(artifact.prettyPrint() + " does not exist"); } if (type.isSymlink()) { // We always create a FileArtifactValue for an unresolved symlink with a digest (calling // readlink() is easy, unlike checksumming a potentially huge file). checkNotNull(fileDigest, "%s missing digest", value); return value; } if (type.isFile() && fileDigest != null) { // The digest is in the file value and that is all that is needed for this file's metadata. return value; } if (type.isDirectory()) { // This branch is taken when the output of an action is a directory: // - A Fileset (in this case, Blaze is correct) // - A directory someone created in a local action (in this case, changes under the // directory may not be detected since we use the mtime of the directory for // up-to-dateness checks) // - A symlink to a source directory due to Filesets return FileArtifactValue.createForDirectoryWithMtime( artifactPathResolver.toPath(artifact).getLastModifiedTime()); } if (injectedDigest == null && type.isFile()) { // We don't have an injected digest and there is no digest in the file value (which attempts a // fast digest). Manually compute the digest instead. injectedDigest = DigestUtils.manuallyComputeDigest(artifactPathResolver.toPath(artifact), value.getSize()); } return FileArtifactValue.createFromInjectedDigest( value, injectedDigest, /*isShareable=*/ !artifact.isConstantMetadata()); } /** * Constructs a {@link FileArtifactValue} for a regular (non-tree, non-middleman) artifact for the * purpose of determining whether an existing {@link FileArtifactValue} is still valid. * * <p>The returned metadata may be compared with metadata present in an {@link * ActionExecutionValue} using {@link FileArtifactValue#couldBeModifiedSince} to check for * inter-build modifications. */ static FileArtifactValue fileArtifactValueFromArtifact( Artifact artifact, @Nullable FileStatusWithDigest statNoFollow, @Nullable TimestampGranularityMonitor tsgm) throws IOException { return fileArtifactValueFromArtifact( artifact, ArtifactPathResolver.IDENTITY, statNoFollow, /*digestWillBeInjected=*/ false, tsgm); } private static FileArtifactValue fileArtifactValueFromArtifact( Artifact artifact, ArtifactPathResolver artifactPathResolver, @Nullable FileStatusWithDigest statNoFollow, boolean digestWillBeInjected, @Nullable TimestampGranularityMonitor tsgm) throws IOException { checkState(!artifact.isTreeArtifact() && !artifact.isMiddlemanArtifact(), artifact); Path pathNoFollow = artifactPathResolver.toPath(artifact); RootedPath rootedPathNoFollow = RootedPath.toRootedPath( artifactPathResolver.transformRoot(artifact.getRoot().getRoot()), artifact.getRootRelativePath()); if (statNoFollow == null) { // Stat the file. All output artifacts of an action are deleted before execution, so if a file // exists, it was most likely created by the current action. There is a race condition here if // an external process creates (or modifies) the file between the deletion and this stat, // which we cannot solve. statNoFollow = FileStatusWithDigestAdapter.adapt(pathNoFollow.statIfFound(Symlinks.NOFOLLOW)); } if (statNoFollow == null || !statNoFollow.isSymbolicLink()) { return fileArtifactValueFromStat( rootedPathNoFollow, statNoFollow, digestWillBeInjected, artifact.isConstantMetadata(), tsgm); } if (artifact.isSymlink()) { return FileArtifactValue.createForUnresolvedSymlink(pathNoFollow); } // We use FileStatus#isSymbolicLink over Path#isSymbolicLink to avoid the unnecessary stat // done by the latter. We need to protect against symlink cycles since // ArtifactFileMetadata#value assumes it's dealing with a file that's not in a symlink cycle. Path realPath = pathNoFollow.resolveSymbolicLinks(); if (realPath.equals(pathNoFollow)) { throw new IOException("symlink cycle"); } RootedPath realRootedPath = RootedPath.toRootedPathMaybeUnderRoot( realPath, ImmutableList.of(artifactPathResolver.transformRoot(artifact.getRoot().getRoot()))); // TODO(bazel-team): consider avoiding a 'stat' here when the symlink target hasn't changed // and is a source file (since changes to those are checked separately). FileStatus realStat = realRootedPath.asPath().statIfFound(Symlinks.NOFOLLOW); FileStatusWithDigest realStatWithDigest = FileStatusWithDigestAdapter.adapt(realStat); return fileArtifactValueFromStat( realRootedPath, realStatWithDigest, digestWillBeInjected, artifact.isConstantMetadata(), tsgm); } private static FileArtifactValue fileArtifactValueFromStat( RootedPath rootedPath, FileStatusWithDigest stat, boolean digestWillBeInjected, boolean isConstantMetadata, @Nullable TimestampGranularityMonitor tsgm) throws IOException { if (stat == null) { return FileArtifactValue.MISSING_FILE_MARKER; } FileStateValue fileStateValue = FileStateValue.createWithStatNoFollow(rootedPath, stat, digestWillBeInjected, tsgm); return stat.isDirectory() ? FileArtifactValue.createForDirectoryWithMtime(stat.getLastModifiedTime()) : FileArtifactValue.createForNormalFile( fileStateValue.getDigest(), fileStateValue.getContentsProxy(), stat.getSize(), /*isShareable=*/ !isConstantMetadata); } private static void setPathReadOnlyAndExecutableIfFile(Path path) throws IOException { if (path.isFile(Symlinks.NOFOLLOW)) { setPathReadOnlyAndExecutable(path); } } private static void setPathReadOnlyAndExecutable(Path path) throws IOException { path.chmod(0555); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.common.json; import org.apache.dubbo.common.utils.Stack; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; /** * JSON Writer. * <p> * w.objectBegin().objectItem("name").valueString("qianlei").objectEnd() = {name:"qianlei"}. */ @Deprecated public class JSONWriter { private static final byte UNKNOWN = 0, ARRAY = 1, OBJECT = 2, OBJECT_VALUE = 3; private static final String[] CONTROL_CHAR_MAP = new String[]{ "\\u0000", "\\u0001", "\\u0002", "\\u0003", "\\u0004", "\\u0005", "\\u0006", "\\u0007", "\\b", "\\t", "\\n", "\\u000b", "\\f", "\\r", "\\u000e", "\\u000f", "\\u0010", "\\u0011", "\\u0012", "\\u0013", "\\u0014", "\\u0015", "\\u0016", "\\u0017", "\\u0018", "\\u0019", "\\u001a", "\\u001b", "\\u001c", "\\u001d", "\\u001e", "\\u001f" }; private Writer mWriter; private State mState = new State(UNKNOWN); private Stack<State> mStack = new Stack<State>(); public JSONWriter(Writer writer) { mWriter = writer; } public JSONWriter(OutputStream is, String charset) throws UnsupportedEncodingException { mWriter = new OutputStreamWriter(is, charset); } private static String escape(String str) { if (str == null) { return str; } int len = str.length(); if (len == 0) { return str; } char c; StringBuilder sb = null; for (int i = 0; i < len; i++) { c = str.charAt(i); if (c < ' ') // control char. { if (sb == null) { sb = new StringBuilder(len << 1); sb.append(str, 0, i); } sb.append(CONTROL_CHAR_MAP[c]); } else { switch (c) { case '\\': case '/': case '"': if (sb == null) { sb = new StringBuilder(len << 1); sb.append(str, 0, i); } sb.append('\\').append(c); break; default: if (sb != null) { sb.append(c); } } } } return sb == null ? str : sb.toString(); } /** * object begin. * * @return this. * @throws IOException */ public JSONWriter objectBegin() throws IOException { beforeValue(); mWriter.write(JSON.LBRACE); mStack.push(mState); mState = new State(OBJECT); return this; } /** * object end. * * @return this. * @throws IOException */ public JSONWriter objectEnd() throws IOException { mWriter.write(JSON.RBRACE); mState = mStack.pop(); return this; } /** * object item. * * @param name name. * @return this. * @throws IOException */ public JSONWriter objectItem(String name) throws IOException { beforeObjectItem(); mWriter.write(JSON.QUOTE); mWriter.write(escape(name)); mWriter.write(JSON.QUOTE); mWriter.write(JSON.COLON); return this; } /** * array begin. * * @return this. * @throws IOException */ public JSONWriter arrayBegin() throws IOException { beforeValue(); mWriter.write(JSON.LSQUARE); mStack.push(mState); mState = new State(ARRAY); return this; } /** * array end, return array value. * * @return this. * @throws IOException */ public JSONWriter arrayEnd() throws IOException { mWriter.write(JSON.RSQUARE); mState = mStack.pop(); return this; } /** * value. * * @return this. * @throws IOException */ public JSONWriter valueNull() throws IOException { beforeValue(); mWriter.write(JSON.NULL); return this; } /** * value. * * @param value value. * @return this. * @throws IOException */ public JSONWriter valueString(String value) throws IOException { beforeValue(); mWriter.write(JSON.QUOTE); mWriter.write(escape(value)); mWriter.write(JSON.QUOTE); return this; } /** * value. * * @param value value. * @return this. * @throws IOException */ public JSONWriter valueBoolean(boolean value) throws IOException { beforeValue(); mWriter.write(value ? "true" : "false"); return this; } /** * value. * * @param value value. * @return this. * @throws IOException */ public JSONWriter valueInt(int value) throws IOException { beforeValue(); mWriter.write(String.valueOf(value)); return this; } /** * value. * * @param value value. * @return this. * @throws IOException */ public JSONWriter valueLong(long value) throws IOException { beforeValue(); mWriter.write(String.valueOf(value)); return this; } /** * value. * * @param value value. * @return this. * @throws IOException */ public JSONWriter valueFloat(float value) throws IOException { beforeValue(); mWriter.write(String.valueOf(value)); return this; } /** * value. * * @param value value. * @return this. * @throws IOException */ public JSONWriter valueDouble(double value) throws IOException { beforeValue(); mWriter.write(String.valueOf(value)); return this; } private void beforeValue() throws IOException { switch (mState.type) { case ARRAY: if (mState.itemCount++ > 0) { mWriter.write(JSON.COMMA); } return; case OBJECT: throw new IOException("Must call objectItem first."); case OBJECT_VALUE: mState.type = OBJECT; return; default: } } private void beforeObjectItem() throws IOException { switch (mState.type) { case OBJECT_VALUE: mWriter.write(JSON.NULL); case OBJECT: mState.type = OBJECT_VALUE; if (mState.itemCount++ > 0) { mWriter.write(JSON.COMMA); } return; default: throw new IOException("Must call objectBegin first."); } } private static class State { private byte type; private int itemCount = 0; State(byte t) { type = t; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.job; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeExecutionRejectedException; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.compute.ComputeJobContext; import org.apache.ignite.compute.ComputeJobMasterLeaveAware; import org.apache.ignite.compute.ComputeUserUndeclaredException; import org.apache.ignite.events.JobEvent; import org.apache.ignite.failure.FailureContext; import org.apache.ignite.failure.FailureType; import org.apache.ignite.igfs.IgfsOutOfSpaceException; import org.apache.ignite.internal.GridInternalException; import org.apache.ignite.internal.GridJobContextImpl; import org.apache.ignite.internal.GridJobExecuteResponse; import org.apache.ignite.internal.GridJobSessionImpl; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.NodeStoppingException; import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException; import org.apache.ignite.internal.managers.deployment.GridDeployment; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.distributed.dht.GridReservable; import org.apache.ignite.internal.processors.query.GridQueryProcessor; import org.apache.ignite.internal.processors.security.OperationSecurityContext; import org.apache.ignite.internal.processors.security.SecurityContext; import org.apache.ignite.internal.processors.security.SecurityUtils; import org.apache.ignite.internal.processors.service.GridServiceNotFoundException; import org.apache.ignite.internal.processors.task.GridInternal; import org.apache.ignite.internal.processors.timeout.GridTimeoutObject; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.LT; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.util.worker.GridWorker; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteRunnable; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.marshaller.Marshaller; import org.apache.ignite.marshaller.MarshallerUtils; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.events.EventType.EVT_JOB_CANCELLED; import static org.apache.ignite.events.EventType.EVT_JOB_FAILED; import static org.apache.ignite.events.EventType.EVT_JOB_FINISHED; import static org.apache.ignite.events.EventType.EVT_JOB_QUEUED; import static org.apache.ignite.events.EventType.EVT_JOB_REJECTED; import static org.apache.ignite.events.EventType.EVT_JOB_STARTED; import static org.apache.ignite.events.EventType.EVT_JOB_TIMEDOUT; import static org.apache.ignite.internal.GridTopic.TOPIC_JOB; import static org.apache.ignite.internal.GridTopic.TOPIC_TASK; import static org.apache.ignite.internal.managers.communication.GridIoPolicy.MANAGEMENT_POOL; import static org.apache.ignite.internal.managers.communication.GridIoPolicy.SYSTEM_POOL; /** * Job worker. */ public class GridJobWorker extends GridWorker implements GridTimeoutObject { /** Per-thread held flag. */ private static final ThreadLocal<Boolean> HOLD = new ThreadLocal<Boolean>() { @Override protected Boolean initialValue() { return false; } }; /** Static logger to avoid re-creation. */ private static final AtomicReference<IgniteLogger> logRef = new AtomicReference<>(); /** */ private final long createTime; /** */ private volatile long startTime; /** */ private volatile long finishTime; /** */ private final GridKernalContext ctx; /** */ private final Object jobTopic; /** */ private final Object taskTopic; /** */ private byte[] jobBytes; /** Task originating node. */ private final ClusterNode taskNode; /** Flag set when visor or internal task is running. */ private final boolean internal; /** */ private final IgniteLogger log; /** */ private final Marshaller marsh; /** */ private final GridJobSessionImpl ses; /** */ private final GridJobContextImpl jobCtx; /** */ private final GridJobEventListener evtLsnr; /** Deployment. */ private final GridDeployment dep; /** */ private final AtomicBoolean finishing = new AtomicBoolean(); /** Guard ensuring that master-leave callback is not execute more than once. */ private final AtomicBoolean masterLeaveGuard = new AtomicBoolean(); /** */ private volatile boolean timedOut; /** */ private volatile boolean sysCancelled; /** */ private volatile boolean sysStopping; /** */ private volatile boolean isStarted; /** Deployed job. */ private ComputeJob job; /** Halted flag (if greater than 0, job is halted). */ private final AtomicInteger held = new AtomicInteger(); /** Hold/unhold listener to notify job processor. */ private final GridJobHoldListener holdLsnr; /** Partitions to reservations. */ private final GridReservable partsReservation; /** Request topology version. */ private final AffinityTopologyVersion reqTopVer; /** Request topology version. */ private final String execName; /** Security context. */ private final SecurityContext secCtx; /** * @param ctx Kernal context. * @param dep Grid deployment. * @param createTime Create time. * @param ses Grid task session. * @param jobCtx Job context. * @param jobBytes Grid job bytes. * @param job Job. * @param taskNode Grid task node. * @param internal Whether or not task was marked with {@link GridInternal} * @param evtLsnr Job event listener. * @param holdLsnr Hold listener. * @param partsReservation Reserved partitions (must be released at the job finish). * @param reqTopVer Affinity topology version of the job request. * @param execName Custom executor name. */ GridJobWorker( GridKernalContext ctx, GridDeployment dep, long createTime, GridJobSessionImpl ses, GridJobContextImpl jobCtx, byte[] jobBytes, ComputeJob job, ClusterNode taskNode, boolean internal, GridJobEventListener evtLsnr, GridJobHoldListener holdLsnr, GridReservable partsReservation, AffinityTopologyVersion reqTopVer, String execName) { super(ctx.igniteInstanceName(), "grid-job-worker", ctx.log(GridJobWorker.class)); assert ctx != null; assert ses != null; assert jobCtx != null; assert taskNode != null; assert evtLsnr != null; assert dep != null; assert holdLsnr != null; this.ctx = ctx; this.createTime = createTime; this.evtLsnr = evtLsnr; this.dep = dep; this.ses = ses; this.jobCtx = jobCtx; this.jobBytes = jobBytes; this.taskNode = taskNode; this.internal = internal; this.holdLsnr = holdLsnr; this.partsReservation = partsReservation; this.reqTopVer = reqTopVer; this.execName = execName; if (job != null) this.job = job; log = U.logger(ctx, logRef, this); marsh = ctx.config().getMarshaller(); UUID locNodeId = ctx.discovery().localNode().id(); jobTopic = TOPIC_JOB.topic(ses.getJobId(), locNodeId); taskTopic = TOPIC_TASK.topic(ses.getJobId(), locNodeId); secCtx = ctx.security().securityContext(); } /** * Gets deployed job or {@code null} of job could not be deployed. * * @return Deployed job. */ @Nullable public ComputeJob getJob() { return job; } /** * @return Deployed task. */ public GridDeployment getDeployment() { return dep; } /** * Returns {@code True} if job was cancelled by the system. * * @return {@code True} if job was cancelled by the system. */ boolean isSystemCanceled() { return sysCancelled; } /** * @return Create time. */ public long getCreateTime() { return createTime; } /** @return Start time. */ public long getStartTime() { return startTime; } /** @return Finish time. */ public long getFinishTime() { return finishTime; } /** @return Is started. */ public boolean isStarted() { return isStarted; } /** @return Grid reservable resource. */ public GridReservable getPartsReservation() { return partsReservation; } /** * @return Unique job ID. */ public IgniteUuid getJobId() { IgniteUuid jobId = ses.getJobId(); assert jobId != null; return jobId; } /** * @return Job context. */ public ComputeJobContext getJobContext() { return jobCtx; } /** * @return Job communication topic. */ Object getJobTopic() { return jobTopic; } /** * @return Task communication topic. */ Object getTaskTopic() { return taskTopic; } /** * @return Session. */ public GridJobSessionImpl getSession() { return ses; } /** * Gets job finishing state. * * @return {@code true} if job is being finished after execution * and {@code false} otherwise. */ public boolean isFinishing() { return finishing.get(); } /** * @return Parent task node ID. */ public ClusterNode getTaskNode() { return taskNode; } /** * @return Job execution time. */ long getExecuteTime() { long startTime0 = startTime; long finishTime0 = finishTime; return startTime0 == 0 ? 0 : finishTime0 == 0 ? U.currentTimeMillis() - startTime0 : finishTime0 - startTime0; } /** * @return Time job spent on waiting queue. */ long getQueuedTime() { long startTime0 = startTime; return startTime0 == 0 ? U.currentTimeMillis() - createTime : startTime0 - createTime; } /** {@inheritDoc} */ @Override public long endTime() { return ses.getEndTime(); } /** {@inheritDoc} */ @Override public IgniteUuid timeoutId() { IgniteUuid jobId = ses.getJobId(); assert jobId != null; return jobId; } /** * @return {@code True} if job is timed out. */ public boolean isTimedOut() { return timedOut; } /** * @return {@code True} if parent task is internal or Visor-related. */ public boolean isInternal() { return internal; } /** {@inheritDoc} */ @Override public void onTimeout() { if (finishing.get()) return; timedOut = true; U.warn(log, "Job has timed out: " + ses); cancel(); if (!internal && ctx.event().isRecordable(EVT_JOB_TIMEDOUT)) recordEvent(EVT_JOB_TIMEDOUT, "Job has timed out: " + job); } /** * Callback for whenever grid is stopping. */ public void onStopping() { sysStopping = true; } /** * @return {@code True} if job was halted. */ public boolean held() { return held.get() > 0; } /** * Sets halt flags. */ public boolean hold() { HOLD.set(true); boolean res; if (res = holdLsnr.onHeld(this)) held.incrementAndGet(); return res; } /** * Initializes job. Handles deployments and event recording. * * @param dep Job deployed task. * @param taskCls Task class. * @return {@code True} if job was successfully initialized. */ boolean initialize(GridDeployment dep, Class<?> taskCls) { assert dep != null; IgniteException ex = null; try { if (job == null) { MarshallerUtils.jobSenderVersion(taskNode.version()); try { job = U.unmarshal(marsh, jobBytes, U.resolveClassLoader(dep.classLoader(), ctx.config())); } finally { MarshallerUtils.jobSenderVersion(null); } // No need to hold reference any more. jobBytes = null; } // Inject resources. ctx.resource().inject(dep, taskCls, job, ses, jobCtx); if (!internal && ctx.event().isRecordable(EVT_JOB_QUEUED)) recordEvent(EVT_JOB_QUEUED, "Job got queued for computation."); job = SecurityUtils.sandboxedProxy(ctx, ComputeJob.class, job); } catch (IgniteCheckedException e) { U.error(log, "Failed to initialize job [jobId=" + ses.getJobId() + ", ses=" + ses + ']', e); ex = new IgniteException(e); } catch (Throwable e) { ex = handleThrowable(e); assert ex != null; if (e instanceof Error) throw e; } finally { if (ex != null) finishJob(null, ex, true); } return ex == null; } /** {@inheritDoc} */ @Override protected void body() { assert job != null; startTime = U.currentTimeMillis(); isStarted = true; // Event notification. evtLsnr.onJobStarted(this); if (!internal && ctx.event().isRecordable(EVT_JOB_STARTED)) recordEvent(EVT_JOB_STARTED, /*no message for success*/null); execute0(true); } /** * Executes the job. */ public void execute() { execute0(false); } /** * @param skipNtf {@code True} to skip job processor {@code onUnheld()} * notification (only from {@link #body()}). */ private void execute0(boolean skipNtf) { // Make sure flag is not set for current thread. HOLD.set(false); try { if (partsReservation != null) { try { if (!partsReservation.reserve()) { finishJob(null, null, true, true); return; } } catch (Exception e) { IgniteException ex = new IgniteException("Failed to lock partitions " + "[jobId=" + ses.getJobId() + ", ses=" + ses + ']', e); U.error(log, "Failed to lock partitions [jobId=" + ses.getJobId() + ", ses=" + ses + ']', e);; finishJob(null, ex, true); return; } } if (isCancelled()) // If job was cancelled prior to assigning runner to it? super.cancel(); if (!skipNtf) { if (holdLsnr.onUnheld(this)) held.decrementAndGet(); else { if (log.isDebugEnabled()) log.debug("Ignoring job execution (job was not held)."); return; } } boolean sndRes = true; Object res = null; IgniteException ex = null; try { ctx.job().currentTaskSession(ses); if (reqTopVer != null) GridQueryProcessor.setRequestAffinityTopologyVersion(reqTopVer); // If job has timed out, then // avoid computation altogether. if (isTimedOut()) sndRes = false; else { res = U.wrapThreadLoader(dep.classLoader(), new Callable<Object>() { @Nullable @Override public Object call() { try { if (internal && ctx.config().isPeerClassLoadingEnabled()) ctx.job().internal(true); return job.execute(); } finally { if (internal && ctx.config().isPeerClassLoadingEnabled()) ctx.job().internal(false); } } }); if (log.isDebugEnabled()) { log.debug(S.toString("Job execution has successfully finished", "job", job, false, "res", res, true)); } } } catch (IgniteException e) { if (sysStopping && e.hasCause(IgniteInterruptedCheckedException.class, InterruptedException.class)) { ex = handleThrowable(e); assert ex != null; } else { if (X.hasCause(e, GridInternalException.class) || X.hasCause(e, IgfsOutOfSpaceException.class)) { // Print exception for internal errors only if debug is enabled. if (log.isDebugEnabled()) U.error(log, "Failed to execute job [jobId=" + ses.getJobId() + ", ses=" + ses + ']', e); } else if (X.hasCause(e, InterruptedException.class)) { String msg = "Job was cancelled [jobId=" + ses.getJobId() + ", ses=" + ses + ']'; if (log.isDebugEnabled()) U.error(log, msg, e); else U.warn(log, msg); } else if (X.hasCause(e, GridServiceNotFoundException.class) || X.hasCause(e, ClusterTopologyCheckedException.class)) // Should be throttled, because GridServiceProxy continuously retry getting service. LT.error(log, e, "Failed to execute job [jobId=" + ses.getJobId() + ", ses=" + ses + ']'); else { U.error(log, "Failed to execute job [jobId=" + ses.getJobId() + ", ses=" + ses + ']', e); if (X.hasCause(e, OutOfMemoryError.class)) ctx.failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); } ex = e; } } // Catch Throwable to protect against bad user code except // InterruptedException if job is being cancelled. catch (Throwable e) { ex = handleThrowable(e); assert ex != null; if (e instanceof Error) throw (Error)e; } finally { // Finish here only if not held by this thread. if (!HOLD.get()) finishJob(res, ex, sndRes); else // Make sure flag is not set for current thread. // This may happen in case of nested internal task call with continuation. HOLD.set(false); ctx.job().currentTaskSession(null); if (reqTopVer != null) GridQueryProcessor.setRequestAffinityTopologyVersion(null); } } finally { if (partsReservation != null) partsReservation.release(); } } /** * Handles {@link Throwable} generic exception for task * deployment and execution. * * @param e Exception. * @return Wrapped exception. */ private IgniteException handleThrowable(Throwable e) { String msg = null; IgniteException ex = null; // Special handling for weird interrupted exception which // happens due to JDk 1.5 bug. if (e instanceof InterruptedException && !sysStopping) { msg = "Failed to execute job due to interrupted exception."; // Turn interrupted exception into checked exception. ex = new IgniteException(msg, e); } // Special 'NoClassDefFoundError' handling if P2P is on. We had many questions // about this exception and decided to change error message. else if ((e instanceof NoClassDefFoundError || e instanceof ClassNotFoundException) && ctx.config().isPeerClassLoadingEnabled()) { msg = "Failed to execute job due to class or resource loading exception (make sure that task " + "originating node is still in grid and requested class is in the task class path) [jobId=" + ses.getJobId() + ", ses=" + ses + ']'; ex = new ComputeUserUndeclaredException(msg, e); } else if (sysStopping && X.hasCause(e, InterruptedException.class, IgniteInterruptedCheckedException.class)) { msg = "Job got interrupted due to system stop (will attempt failover)."; ex = new ComputeExecutionRejectedException(e); } if (msg == null) { msg = "Failed to execute job due to unexpected runtime exception [jobId=" + ses.getJobId() + ", ses=" + ses + ", err=" + e.getMessage() + ']'; ex = new ComputeUserUndeclaredException(msg, e); } assert msg != null; assert ex != null; U.error(log, msg, e); return ex; } /** {@inheritDoc} */ @Override public void cancel() { cancel(false); } /** * @param sys System flag. */ public void cancel(boolean sys) { try { super.cancel(); final ComputeJob job0 = job; if (sys) sysCancelled = true; if (job0 != null) { if (log.isDebugEnabled()) log.debug("Cancelling job: " + ses); U.wrapThreadLoader(dep.classLoader(), new IgniteRunnable() { @Override public void run() { try (OperationSecurityContext c = ctx.security().withContext(secCtx)) { job0.cancel(); } } }); } if (!internal && ctx.event().isRecordable(EVT_JOB_CANCELLED)) recordEvent(EVT_JOB_CANCELLED, "Job was cancelled: " + job0); } // Catch throwable to protect against bad user code. catch (Throwable e) { U.error(log, "Failed to cancel job due to undeclared user exception [jobId=" + ses.getJobId() + ", ses=" + ses + ']', e); if (e instanceof Error) throw e; } } /** * @return Custom executor name. */ public String executorName() { return execName; } /** * @param evtType Event type. * @param msg Message. */ private void recordEvent(int evtType, @Nullable String msg) { assert ctx.event().isRecordable(evtType); assert !internal; JobEvent evt = new JobEvent(); evt.jobId(ses.getJobId()); evt.message(msg); evt.node(ctx.discovery().localNode()); evt.taskName(ses.getTaskName()); evt.taskClassName(ses.getTaskClassName()); evt.taskSessionId(ses.getId()); evt.type(evtType); evt.taskNode(taskNode); evt.taskSubjectId(ses.subjectId()); ctx.event().record(evt); } /** * @param res Result. * @param ex Error. * @param sndReply If {@code true}, reply will be sent. */ void finishJob(@Nullable Object res, @Nullable IgniteException ex, boolean sndReply) { finishJob(res, ex, sndReply, false); } /** * @param res Resuilt. * @param ex Exception * @param sndReply If {@code true}, reply will be sent. * @param retry If {@code true}, retry response will be sent. */ void finishJob(@Nullable Object res, @Nullable IgniteException ex, boolean sndReply, boolean retry) { // Avoid finishing a job more than once from different threads. if (!finishing.compareAndSet(false, true)) return; // Do not send reply if job has been cancelled from system. if (sndReply) sndReply = !sysCancelled; // We should save message ID here since listener callback will reset sequence. ClusterNode sndNode = ctx.discovery().node(taskNode.id()); finishTime = U.currentTimeMillis(); Collection<IgniteBiTuple<Integer, String>> evts = null; try { if (ses.isFullSupport()) evtLsnr.onBeforeJobResponseSent(this); // Send response back only if job has not timed out. if (!isTimedOut()) { if (sndReply) { if (sndNode == null) { onMasterNodeLeft(); U.warn(log, "Failed to reply to sender node because it left grid [nodeId=" + taskNode.id() + ", ses=" + ses + ", jobId=" + ses.getJobId() + ", job=" + job + ']'); // Record job reply failure. if (!internal && ctx.event().isRecordable(EVT_JOB_FAILED)) evts = addEvent(evts, EVT_JOB_FAILED, "Job reply failed (task node left grid): " + job); } else { try { byte[] resBytes = null; byte[] exBytes = null; byte[] attrBytes = null; boolean loc = ctx.localNodeId().equals(sndNode.id()) && !ctx.config().isMarshalLocalJobs(); Map<Object, Object> attrs = jobCtx.getAttributes(); // Try serialize response, and if exception - return to client. if (!loc) { try { resBytes = U.marshal(marsh, res); } catch (IgniteCheckedException e) { resBytes = U.marshal(marsh, null); if (ex != null) ex.addSuppressed(e); else ex = U.convertException(e); logError("Failed to serialize job response [nodeId=" + taskNode.id() + ", ses=" + ses + ", jobId=" + ses.getJobId() + ", job=" + job + ", resCls=" + (res == null ? null : res.getClass()) + ']', e); } try { attrBytes = U.marshal(marsh, attrs); } catch (IgniteCheckedException e) { attrBytes = U.marshal(marsh, Collections.emptyMap()); if (ex != null) ex.addSuppressed(e); else ex = U.convertException(e); logError("Failed to serialize job attributes [nodeId=" + taskNode.id() + ", ses=" + ses + ", jobId=" + ses.getJobId() + ", job=" + job + ", attrs=" + attrs + ']', e); } try { exBytes = U.marshal(marsh, ex); } catch (IgniteCheckedException e) { String msg = "Failed to serialize job exception [nodeId=" + taskNode.id() + ", ses=" + ses + ", jobId=" + ses.getJobId() + ", job=" + job + ", msg=\"" + e.getMessage() + "\"]"; ex = new IgniteException(msg); logError(msg, e); exBytes = U.marshal(marsh, ex); } } if (ex != null) { if (isStarted) { // Job failed. if (!internal && ctx.event().isRecordable(EVT_JOB_FAILED)) evts = addEvent(evts, EVT_JOB_FAILED, "Job failed due to exception [ex=" + ex + ", job=" + job + ']'); } else if (!internal && ctx.event().isRecordable(EVT_JOB_REJECTED)) evts = addEvent(evts, EVT_JOB_REJECTED, "Job has not been started " + "[ex=" + ex + ", job=" + job + ']'); } else if (!internal && ctx.event().isRecordable(EVT_JOB_FINISHED)) evts = addEvent(evts, EVT_JOB_FINISHED, /*no message for success. */null); GridJobExecuteResponse jobRes = new GridJobExecuteResponse( ctx.localNodeId(), ses.getId(), ses.getJobId(), exBytes, loc ? ex : null, resBytes, loc ? res : null, attrBytes, loc ? attrs : null, isCancelled(), retry ? ctx.cache().context().exchange().readyAffinityVersion() : null); long timeout = ses.getEndTime() - U.currentTimeMillis(); if (timeout <= 0) // Ignore the actual timeout and send response anyway. timeout = 1; if (ses.isFullSupport()) { // Send response to designated job topic. // Always go through communication to preserve order, // if attributes are enabled. ctx.io().sendOrderedMessage( sndNode, taskTopic, jobRes, internal ? MANAGEMENT_POOL : SYSTEM_POOL, timeout, false); } else if (ctx.localNodeId().equals(sndNode.id())) ctx.task().processJobExecuteResponse(ctx.localNodeId(), jobRes); else // Send response to common topic as unordered message. ctx.io().sendToGridTopic(sndNode, TOPIC_TASK, jobRes, internal ? MANAGEMENT_POOL : SYSTEM_POOL); } catch (IgniteCheckedException e) { // Log and invoke the master-leave callback. if ((e instanceof ClusterTopologyCheckedException) || isDeadNode(taskNode.id())) { onMasterNodeLeft(); // Avoid stack trace for left nodes. U.warn(log, "Failed to reply to sender node because it left grid " + "[nodeId=" + taskNode.id() + ", jobId=" + ses.getJobId() + ", ses=" + ses + ", job=" + job + ']'); } else logError("Error sending reply for job [nodeId=" + sndNode.id() + ", jobId=" + ses.getJobId() + ", ses=" + ses + ", job=" + job + ']', e); if (!internal && ctx.event().isRecordable(EVT_JOB_FAILED)) evts = addEvent(evts, EVT_JOB_FAILED, "Failed to send reply for job [nodeId=" + taskNode.id() + ", job=" + job + ']'); } // Catching interrupted exception because // it gets thrown for some reason. catch (Exception e) { String msg = "Failed to send reply for job [nodeId=" + taskNode.id() + ", job=" + job + ']'; logError(msg, e); if (!internal && ctx.event().isRecordable(EVT_JOB_FAILED)) evts = addEvent(evts, EVT_JOB_FAILED, msg); } } } else { if (ex != null) { if (isStarted) { if (!internal && ctx.event().isRecordable(EVT_JOB_FAILED)) evts = addEvent(evts, EVT_JOB_FAILED, "Job failed due to exception [ex=" + ex + ", job=" + job + ']'); } else if (!internal && ctx.event().isRecordable(EVT_JOB_REJECTED)) evts = addEvent(evts, EVT_JOB_REJECTED, "Job has not been started [ex=" + ex + ", job=" + job + ']'); } else if (!internal && ctx.event().isRecordable(EVT_JOB_FINISHED)) evts = addEvent(evts, EVT_JOB_FINISHED, /*no message for success. */null); } } // Job timed out. else if (!internal && ctx.event().isRecordable(EVT_JOB_FAILED)) evts = addEvent(evts, EVT_JOB_FAILED, "Job failed due to timeout: " + job); } finally { if (evts != null) { for (IgniteBiTuple<Integer, String> t : evts) recordEvent(t.get1(), t.get2()); } // Listener callback. evtLsnr.onJobFinished(this); } } /** * This method wraps U.error invocations to check node stopping. * Log message will be skipped if node is stopping and debug is disabled. * * @param msg Message to log using quiet logger. * @param e Optional exception. */ private void logError(String msg, @Nullable Throwable e) { if (e != null && (log.isDebugEnabled() || !X.hasCause(e, NodeStoppingException.class))) U.error(log, msg, e); } /** * If the job implements {@link org.apache.ignite.compute.ComputeJobMasterLeaveAware#onMasterNodeLeft} interface then invoke * {@link org.apache.ignite.compute.ComputeJobMasterLeaveAware#onMasterNodeLeft(org.apache.ignite.compute.ComputeTaskSession)} method. * * @return {@code True} if master leave has been handled (either by this call or before). */ boolean onMasterNodeLeft() { if (job instanceof ComputeJobMasterLeaveAware) { if (masterLeaveGuard.compareAndSet(false, true)) { try { ((ComputeJobMasterLeaveAware)job).onMasterNodeLeft(ses.session()); if (log.isDebugEnabled()) log.debug("Successfully executed ComputeJobMasterLeaveAware.onMasterNodeLeft() callback " + "[nodeId=" + taskNode.id() + ", jobId=" + ses.getJobId() + ", job=" + job + ']'); } catch (Exception e) { U.error(log, "Failed to execute ComputeJobMasterLeaveAware.onMasterNodeLeft() callback " + "[nodeId=" + taskNode.id() + ", jobId=" + ses.getJobId() + ", job=" + job + ']', e); } } return true; } return false; } /** * @param evts Collection (created if {@code null}). * @param evt Event. * @param msg Message (optional). * @return Collection with event added. */ Collection<IgniteBiTuple<Integer, String>> addEvent(@Nullable Collection<IgniteBiTuple<Integer, String>> evts, Integer evt, @Nullable String msg) { assert ctx.event().isRecordable(evt); assert !internal; if (evts == null) evts = new ArrayList<>(); evts.add(F.t(evt, msg)); return evts; } /** * Checks whether node is alive or dead. * * @param uid UID of node to check. * @return {@code true} if node is dead, {@code false} is node is alive. */ private boolean isDeadNode(UUID uid) { return ctx.discovery().node(uid) == null || !ctx.discovery().pingNodeNoError(uid); } /** {@inheritDoc} */ @Override public int hashCode() { IgniteUuid jobId = ses.getJobId(); assert jobId != null; return jobId.hashCode(); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridJobWorker.class, this); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.postgresql.implementation; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.postgresql.fluent.RecoverableServersClient; import com.azure.resourcemanager.postgresql.fluent.models.RecoverableServerResourceInner; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in RecoverableServersClient. */ public final class RecoverableServersClientImpl implements RecoverableServersClient { private final ClientLogger logger = new ClientLogger(RecoverableServersClientImpl.class); /** The proxy service used to perform REST calls. */ private final RecoverableServersService service; /** The service client containing this operation class. */ private final PostgreSqlManagementClientImpl client; /** * Initializes an instance of RecoverableServersClientImpl. * * @param client the instance of the service client containing this operation class. */ RecoverableServersClientImpl(PostgreSqlManagementClientImpl client) { this.service = RestProxy.create(RecoverableServersService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for PostgreSqlManagementClientRecoverableServers to be used by the proxy * service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "PostgreSqlManagement") private interface RecoverableServersService { @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforPostgreSQL" + "/servers/{serverName}/recoverableServers") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<RecoverableServerResourceInner>> get( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("serverName") String serverName, @HeaderParam("Accept") String accept, Context context); } /** * Gets a recoverable PostgreSQL Server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a recoverable PostgreSQL Server along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<RecoverableServerResourceInner>> getWithResponseAsync( String resourceGroupName, String serverName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (serverName == null) { return Mono.error(new IllegalArgumentException("Parameter serverName is required and cannot be null.")); } final String apiVersion = "2017-12-01"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .get( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, serverName, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Gets a recoverable PostgreSQL Server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a recoverable PostgreSQL Server along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<RecoverableServerResourceInner>> getWithResponseAsync( String resourceGroupName, String serverName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (serverName == null) { return Mono.error(new IllegalArgumentException("Parameter serverName is required and cannot be null.")); } final String apiVersion = "2017-12-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .get( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, serverName, accept, context); } /** * Gets a recoverable PostgreSQL Server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a recoverable PostgreSQL Server on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<RecoverableServerResourceInner> getAsync(String resourceGroupName, String serverName) { return getWithResponseAsync(resourceGroupName, serverName) .flatMap( (Response<RecoverableServerResourceInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Gets a recoverable PostgreSQL Server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a recoverable PostgreSQL Server. */ @ServiceMethod(returns = ReturnType.SINGLE) public RecoverableServerResourceInner get(String resourceGroupName, String serverName) { return getAsync(resourceGroupName, serverName).block(); } /** * Gets a recoverable PostgreSQL Server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a recoverable PostgreSQL Server along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<RecoverableServerResourceInner> getWithResponse( String resourceGroupName, String serverName, Context context) { return getWithResponseAsync(resourceGroupName, serverName, context).block(); } }
/** * Copyright 2014 Grafos.ml * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ml.grafos.okapi.graphs; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.giraph.aggregators.LongSumAggregator; import org.apache.giraph.edge.Edge; import org.apache.giraph.graph.AbstractComputation; import org.apache.giraph.graph.Vertex; import org.apache.giraph.io.formats.TextVertexValueInputFormat; import org.apache.giraph.master.DefaultMasterCompute; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.TaskAttemptContext; /** * This is an implementation of the SybilRank algorithm. In fact, this is an * extension of the original SybilRank algorithm published by Cao et al at * NSDI'12. This version of the algorithm assumes a weighted graph. The modified * algorithm has been developed by Boshmaf et al. * * @author dl * */ public class SybilRank { /** * Property name for the total trust. */ public static final String TOTAL_TRUST = "sybilrank.total.trust"; /** * Property name for the iteration multiplier. */ public static final String ITERATION_MULTIPLIER = "sybilrank.iteration.multiplier"; /** * Default multiplier for the iterations. */ public static final int ITERATION_MULTIPLIER_DEFAULT = 1; /** * Name of aggregator used to calculate the total number of trusted nodes. */ public static final String AGGREGATOR_NUM_TRUSTED = "AGG_NUM_TRUSTED"; public static final LongWritable ONE = new LongWritable(1); /** * This method computes the degree of a vertex as the sum of its edge weights. * @param v * @return */ public static double computeDegree( Vertex<LongWritable,VertexValue,DoubleWritable> v) { double degree = 0.0; for (Edge<LongWritable, DoubleWritable> edge : v.getEdges()) { degree += edge.getValue().get(); } return degree; } /** * This computation class is used to calculate the aggregate number of * trusted nodes. This value is necessary to initialize the rank of the nodes * before the power iterations starts. * * @author dl * */ public static class TrustAggregation extends AbstractComputation<LongWritable, VertexValue, DoubleWritable, DoubleWritable, DoubleWritable> { @Override public void compute( Vertex<LongWritable, VertexValue, DoubleWritable> vertex, Iterable<DoubleWritable> messages) throws IOException { if (vertex.getValue().isTrusted()) { aggregate(AGGREGATOR_NUM_TRUSTED, ONE); } } } /** * This class is used only to initialize the rank of the vertices. It assumes * that the trust aggregation computations has occurred in the previous step. * * After the initialization it also distributes the rank of every vertex to * it friends, so that the power iterations start. * * @author dl * */ public static class Initializer extends AbstractComputation<LongWritable, VertexValue, DoubleWritable, DoubleWritable, DoubleWritable> { private double totalTrust; @Override public void compute( Vertex<LongWritable, VertexValue, DoubleWritable> vertex, Iterable<DoubleWritable> messages) throws IOException { if (vertex.getValue().isTrusted()) { vertex.getValue().setRank( totalTrust/(double)((LongWritable)getAggregatedValue( AGGREGATOR_NUM_TRUSTED)).get()); } else { vertex.getValue().setRank(0.0); } double degree = computeDegree(vertex); // Distribute rank to edges proportionally to the edge weights for (Edge<LongWritable, DoubleWritable> edge : vertex.getEdges()) { double distRank = vertex.getValue().getRank()*(edge.getValue().get()/degree); sendMessage(edge.getTargetVertexId(), new DoubleWritable(distRank)); } } @Override public void preSuperstep() { String s_totalTrust = getContext().getConfiguration().get(TOTAL_TRUST); if (s_totalTrust != null) { totalTrust = Double.parseDouble(s_totalTrust); } else { // The default value of the total trust is equal to the number of // vertices in the graph. totalTrust = getTotalNumVertices(); } } } /** * This class implements the main part of the SybilRank algorithms, that is, * the power iterations. * * @author dl * */ public static class SybilRankComputation extends AbstractComputation<LongWritable, VertexValue, DoubleWritable, DoubleWritable, DoubleWritable> { @Override public void compute( Vertex<LongWritable, VertexValue, DoubleWritable> vertex, Iterable<DoubleWritable> messages) throws IOException { // Aggregate rank from friends. double newRank = 0.0; for (DoubleWritable message : messages) { newRank += message.get(); } double degree = computeDegree(vertex); // Distribute rank to edges proportionally to the edge weights for (Edge<LongWritable, DoubleWritable> edge : vertex.getEdges()) { double distRank = newRank*(edge.getValue().get()/degree); sendMessage(edge.getTargetVertexId(), new DoubleWritable(distRank)); } // The final value of the rank is normalized by the degree of the vertex. vertex.getValue().setRank(newRank/degree); } } /** * This implementation coordinates the execution of the SybilRank algorithm. * * @author dl * */ public static class SybilRankMasterCompute extends DefaultMasterCompute { private int iterationMultiplier; @Override public void initialize() throws InstantiationException, IllegalAccessException { iterationMultiplier = getContext().getConfiguration().getInt( ITERATION_MULTIPLIER, ITERATION_MULTIPLIER_DEFAULT); // Register the aggregator that will be used to count the number of // trusted nodes. registerPersistentAggregator(AGGREGATOR_NUM_TRUSTED, LongSumAggregator.class); } @Override public void compute() { long superstep = getSuperstep(); if (superstep == 0) { setComputation(TrustAggregation.class); } else if (superstep == 1) { setComputation(Initializer.class); } else { setComputation(SybilRankComputation.class); } // The number of power iterations we execute is equal to c*log10(N), where // N is the number of vertices in the graph and c is the iteration // multiplier. if (superstep>0) { int maxPowerIterations = (int)Math.ceil( iterationMultiplier*Math.log10((double)getTotalNumVertices())); // Before the power iterations, we execute 2 initial supersteps, so we // count those in when deciding to stop. if (superstep >= 2+maxPowerIterations) { haltComputation(); } } } } /** * Represents the state of a vertex for this algorithm. This state indicates * the current rank of the vertex and whether this vertex is considered * trusted or not. * * Unless explicitly set, a vertex is initialized to be untrusted. * * @author dl * */ public static class VertexValue implements Writable { // Indicates whether this vertex is considered trusted. private boolean isTrusted; // This holds the current rank of the vertex. private double rank; public VertexValue() { isTrusted = false; } public VertexValue(double rank, boolean isTrusted) { this.rank = rank; this.isTrusted = isTrusted; } public void setRank(double rank) { this.rank = rank; } public double getRank() { return rank; } public void setTrusted(boolean isTrusted) { this.isTrusted = isTrusted; } public boolean isTrusted() { return isTrusted; } @Override public void readFields(DataInput in) throws IOException { rank = in.readDouble(); isTrusted = in.readBoolean(); } @Override public void write(DataOutput out) throws IOException { out.writeDouble(rank); out.writeBoolean(isTrusted); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } VertexValue that = (VertexValue) o; if (rank != that.rank || isTrusted != that.isTrusted) { return false; } return true; } @Override public String toString() { return String.valueOf(rank); } } /** * This InputFormat class is used to read the set of vertices that are * considered trusted. The actual input is expected to contain one vertex * ID per line. * @author dl * */ public static class SybilRankVertexValueInputFormat extends TextVertexValueInputFormat<LongWritable, VertexValue, DoubleWritable> { @Override public SybilRankVertexValueReader createVertexValueReader( InputSplit split, TaskAttemptContext context) throws IOException { return new SybilRankVertexValueReader(); } public class SybilRankVertexValueReader extends TextVertexValueReaderFromEachLineProcessed<String> { @Override protected String preprocessLine(Text line) throws IOException { return line.toString(); } @Override protected LongWritable getId(String data) throws IOException { return new LongWritable(Long.parseLong(data)); } @Override protected VertexValue getValue(String data) throws IOException { VertexValue value = new VertexValue(); value.setTrusted(true); return value; } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AlreadyExpiredException; import org.elasticsearch.index.analysis.NumericLongAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue; public class TTLFieldMapper extends MetadataFieldMapper { public static final String NAME = "_ttl"; public static final String CONTENT_TYPE = "_ttl"; public static class Defaults extends LongFieldMapper.Defaults { public static final String NAME = TTLFieldMapper.CONTENT_TYPE; public static final TTLFieldType TTL_FIELD_TYPE = new TTLFieldType(); static { TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); TTL_FIELD_TYPE.setStored(true); TTL_FIELD_TYPE.setTokenized(false); TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); TTL_FIELD_TYPE.setIndexAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_64_BIT)); TTL_FIELD_TYPE.setSearchAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE)); TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); TTL_FIELD_TYPE.freeze(); } public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; public static final long DEFAULT = -1; } public static class Builder extends MetadataFieldMapper.Builder<Builder, TTLFieldMapper> { private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; private long defaultTTL = Defaults.DEFAULT; public Builder() { super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.FIELD_TYPE); } public Builder enabled(EnabledAttributeMapper enabled) { this.enabledState = enabled; return builder; } public Builder defaultTTL(long defaultTTL) { this.defaultTTL = defaultTTL; return builder; } @Override public TTLFieldMapper build(BuilderContext context) { setupFieldType(context); fieldType.setHasDocValues(false); return new TTLFieldMapper(fieldType, enabledState, defaultTTL, fieldDataSettings, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { Builder builder = new Builder(); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); } else if (fieldName.equals("default")) { TimeValue ttlTimeValue = nodeTimeValue(fieldNode, null); if (ttlTimeValue != null) { builder.defaultTTL(ttlTimeValue.millis()); } iterator.remove(); } } return builder; } @Override public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { return new TTLFieldMapper(indexSettings); } } public static final class TTLFieldType extends LongFieldMapper.LongFieldType { public TTLFieldType() { } protected TTLFieldType(TTLFieldType ref) { super(ref); } @Override public TTLFieldType clone() { return new TTLFieldType(this); } // Overrides valueForSearch to display live value of remaining ttl @Override public Object valueForSearch(Object value) { long now; SearchContext searchContext = SearchContext.current(); if (searchContext != null) { now = searchContext.nowInMillis(); } else { now = System.currentTimeMillis(); } long val = value(value); return val - now; } } private EnabledAttributeMapper enabledState; private long defaultTTL; private TTLFieldMapper(Settings indexSettings) { this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, null, indexSettings); } private TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, @Nullable Settings fieldDataSettings, Settings indexSettings) { super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings); this.enabledState = enabled; this.defaultTTL = defaultTTL; } public boolean enabled() { return this.enabledState.enabled; } public long defaultTTL() { return this.defaultTTL; } // Other implementation for realtime get display public Object valueForSearch(long expirationTime) { return expirationTime - System.currentTimeMillis(); } @Override public void preParse(ParseContext context) throws IOException { } @Override public void postParse(ParseContext context) throws IOException { super.parse(context); } @Override public Mapper parse(ParseContext context) throws IOException, MapperParsingException { if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally long ttl; if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) { ttl = TimeValue.parseTimeValue(context.parser().text(), null, "ttl").millis(); } else { ttl = context.parser().longValue(true); } if (ttl <= 0) { throw new MapperParsingException("TTL value must be > 0. Illegal value provided [" + ttl + "]"); } context.sourceToParse().ttl(ttl); } return null; } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException { if (enabledState.enabled && !context.sourceToParse().flyweight()) { long ttl = context.sourceToParse().ttl(); if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value ttl = defaultTTL; context.sourceToParse().ttl(ttl); } if (ttl > 0) { // a ttl has been provided either externally or in the _source long timestamp = context.sourceToParse().timestamp(); long expire = new Date(timestamp + ttl).getTime(); long now = System.currentTimeMillis(); // there is not point indexing already expired doc if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) { throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now); } // the expiration timestamp (timestamp + ttl) is set as field fields.add(new LongFieldMapper.CustomLongNumericField(expire, fieldType())); } } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all are defaults, no sense to write it at all if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && defaultTTL == Defaults.DEFAULT) { return builder; } builder.startObject(CONTENT_TYPE); if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { builder.field("enabled", enabledState.enabled); } if (includeDefaults || defaultTTL != Defaults.DEFAULT && enabledState.enabled) { builder.field("default", defaultTTL); } builder.endObject(); return builder; } @Override protected String contentType() { return NAME; } @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith; if (ttlMergeWith.enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with if (this.enabledState == EnabledAttributeMapper.ENABLED && ttlMergeWith.enabledState == EnabledAttributeMapper.DISABLED) { throw new IllegalArgumentException("_ttl cannot be disabled once it was enabled."); } else { this.enabledState = ttlMergeWith.enabledState; } } if (ttlMergeWith.defaultTTL != -1) { // we never build the default when the field is disabled so we should also not set it // (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster) if (enabledState == EnabledAttributeMapper.ENABLED) { this.defaultTTL = ttlMergeWith.defaultTTL; } } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.palette.impl; import com.intellij.ide.dnd.DnDEvent; import com.intellij.ide.dnd.DnDManager; import com.intellij.ide.dnd.DnDTarget; import com.intellij.ide.palette.PaletteGroup; import com.intellij.ide.palette.PaletteItem; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.PopupHandler; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.CompoundBorder; import java.awt.*; import java.awt.event.*; /** * @author yole */ public class PaletteGroupHeader extends JCheckBox implements DataProvider { private final PaletteWindow myPaletteWindow; private PaletteComponentList myComponentList; private final PaletteGroup myGroup; public PaletteGroupHeader(PaletteWindow paletteWindow, PaletteGroup group) { myPaletteWindow = paletteWindow; myGroup = group; if (group.getName() == null) { setVisible(false); } else { setText(group.getName()); } setSelected(true); addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (myComponentList != null) { myComponentList.setVisible(isSelected()); } } }); addMouseListener(new PopupHandler() { public void invokePopup(Component comp, int x, int y) { myPaletteWindow.setLastFocusedGroup(PaletteGroupHeader.this); showGroupPopupMenu(comp, x, y); } }); setIcon(UIUtil.getTreeCollapsedIcon()); setSelectedIcon(UIUtil.getTreeExpandedIcon()); setFont(getFont().deriveFont(Font.BOLD)); setFocusPainted(false); setMargin(new Insets(0, 3, 0, 3)); setOpaque(true); if (getBorder() instanceof CompoundBorder) { // from BasicLookAndFeel Dimension pref = getPreferredSize(); pref.height -= 3; setPreferredSize(pref); } DnDManager.getInstance().registerTarget(new DnDTarget() { public boolean update(DnDEvent aEvent) { setBorderPainted(true); aEvent.setDropPossible(aEvent.getAttachedObject() instanceof PaletteItem); return true; } public void drop(DnDEvent aEvent) { setBorderPainted(false); if (aEvent.getAttachedObject() instanceof PaletteItem) { myGroup.handleDrop(myPaletteWindow.getProject(), (PaletteItem) aEvent.getAttachedObject(), -1); } } public void cleanUpOnLeave() { setBorderPainted(false); } public void updateDraggedImage(Image image, Point dropPoint, Point imageOffset) { } }, this); addFocusListener(new FocusAdapter() { @Override public void focusGained(FocusEvent e) { myPaletteWindow.setLastFocusedGroup(PaletteGroupHeader.this); } }); initActions(); } public void showGroupPopupMenu(final Component comp, final int x, final int y) { ActionGroup group = myGroup.getPopupActionGroup(); if (group != null) { ActionPopupMenu popupMenu = ActionManager.getInstance().createActionPopupMenu(ActionPlaces.UNKNOWN, group); popupMenu.getComponent().show(comp, x, y); } } private void initActions() { @NonNls InputMap inputMap = getInputMap(WHEN_FOCUSED); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_DOWN, 0, false), "moveFocusDown"); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_UP, 0, false), "moveFocusUp"); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, 0, false), "collapse"); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, 0, false), "expand"); @NonNls ActionMap actionMap = getActionMap(); actionMap.put("moveFocusDown", new MoveFocusAction(true)); actionMap.put("moveFocusUp", new MoveFocusAction(false)); actionMap.put("collapse", new ExpandAction(false)); actionMap.put("expand", new ExpandAction(true)); } @Override public Color getBackground() { if (isFocusOwner()) { return UIUtil.getListSelectionBackground(); } return super.getBackground(); } @Override public Color getForeground() { if (isFocusOwner()) { return UIUtil.getListSelectionForeground(); } return super.getForeground(); } public void setComponentList(final PaletteComponentList componentList) { myComponentList = componentList; } public PaletteComponentList getComponentList() { return myComponentList; } public PaletteGroup getGroup() { return myGroup; } @Nullable public Object getData(String dataId) { Object data = myPaletteWindow.getData(dataId); if (data != null) return data; Project project = CommonDataKeys.PROJECT.getData(myPaletteWindow); return myGroup.getData(project, dataId); } private class MoveFocusAction extends AbstractAction { private final boolean moveDown; public MoveFocusAction(boolean moveDown) { this.moveDown = moveDown; } public void actionPerformed(ActionEvent e) { KeyboardFocusManager kfm = KeyboardFocusManager.getCurrentKeyboardFocusManager(); Container container = kfm.getCurrentFocusCycleRoot(); IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { FocusTraversalPolicy policy = container.getFocusTraversalPolicy(); if (null == policy) policy = kfm.getDefaultFocusTraversalPolicy(); Component next = moveDown ? policy.getComponentAfter(container, PaletteGroupHeader.this) : policy.getComponentBefore(container, PaletteGroupHeader.this); if (null != next && next instanceof PaletteComponentList) { final PaletteComponentList list = (PaletteComponentList)next; if (list.getModel().getSize() != 0) { list.takeFocusFrom(PaletteGroupHeader.this, list == myComponentList ? 0 : -1); return; } else { next = moveDown ? policy.getComponentAfter(container, next) : policy.getComponentBefore(container, next); } } if (null != next && next instanceof PaletteGroupHeader) { IdeFocusManager.getGlobalInstance().requestFocus(next, true); } }); } } private class ExpandAction extends AbstractAction { private final boolean expand; public ExpandAction(boolean expand) { this.expand = expand; } public void actionPerformed(ActionEvent e) { if (expand == isSelected()) return; setSelected(expand); if (myComponentList != null) { myComponentList.setVisible(isSelected()); } } } }
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.mgt.store; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.wso2.carbon.identity.base.IdentityException; import org.wso2.carbon.identity.core.persistence.JDBCPersistenceManager; import org.wso2.carbon.identity.core.util.IdentityDatabaseUtil; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.mgt.dto.UserRecoveryDataDO; /** * This class is used to access the identity metadata. Schema of the identity * metadata is as follows : * ==================================================== * ||UserName|TenantID|MetadataType|Metadata|Valid|| * ==================================================== * */ public class JDBCUserRecoveryDataStore implements UserRecoveryDataStore{ /** * invalidate recovery data. it means delete user recovery data entry from store * * @param recoveryDataDO * @throws IdentityException */ public void invalidate(UserRecoveryDataDO recoveryDataDO) throws IdentityException { Connection connection = JDBCPersistenceManager.getInstance().getDBConnection(); PreparedStatement prepStmt = null; try { prepStmt = connection.prepareStatement(SQLQuery.INVALIDATE_METADATA); // TODO Delete entry prepStmt.setString(1, recoveryDataDO.getUserName()); prepStmt.setInt(2, recoveryDataDO.getTenantId()); prepStmt.setString(3, recoveryDataDO.getCode()); prepStmt.execute(); connection.setAutoCommit(false); connection.commit(); } catch (SQLException e) { throw new IdentityException("Error while storing user identity data", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); IdentityDatabaseUtil.closeConnection(connection); } } /** * * @param userId * @param tenant * @throws IdentityException */ public void invalidate(String userId, int tenant) throws IdentityException { Connection connection = JDBCPersistenceManager.getInstance().getDBConnection(); PreparedStatement prepStmt = null; try { prepStmt = connection.prepareStatement(SQLQuery.INVALIDATE_METADATA); prepStmt.setString(1, userId); prepStmt.setInt(2, tenant); connection.setAutoCommit(false); connection.commit(); } catch (SQLException e) { throw new IdentityException("Error while invalidating user identity data", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); IdentityDatabaseUtil.closeConnection(connection); } } /** * Stores identity data. * * * @throws IdentityException */ public void store(UserRecoveryDataDO recoveryDataDO) throws IdentityException { Connection connection = JDBCPersistenceManager.getInstance().getDBConnection(); PreparedStatement prepStmt = null; try { prepStmt = connection.prepareStatement(SQLQuery.STORE_META_DATA); prepStmt.setString(1, recoveryDataDO.getUserName()); prepStmt.setInt(2, recoveryDataDO.getTenantId()); prepStmt.setString(3, recoveryDataDO.getCode()); prepStmt.setString(4, recoveryDataDO.getSecret()); prepStmt.execute(); connection.setAutoCommit(false); connection.commit(); } catch (SQLException e) { throw new IdentityException("Error while storing user identity data", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); IdentityDatabaseUtil.closeConnection(connection); } } /** * Stores identity data set. * * * @throws IdentityException */ public void store(UserRecoveryDataDO[] recoveryDataDOs) throws IdentityException { Connection connection = JDBCPersistenceManager.getInstance().getDBConnection(); PreparedStatement prepStmt = null; try { connection.setAutoCommit(false); prepStmt = connection.prepareStatement(SQLQuery.STORE_META_DATA); for (UserRecoveryDataDO dataDO : recoveryDataDOs) { prepStmt.setString(1, dataDO.getUserName()); prepStmt.setInt(2, dataDO.getTenantId()); prepStmt.setString(3, dataDO.getCode()); prepStmt.setString(4, dataDO.getSecret()); prepStmt.addBatch(); } prepStmt.executeBatch(); connection.commit(); } catch (SQLException e) { throw new IdentityException("Error while storing user identity data", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); IdentityDatabaseUtil.closeConnection(connection); } } /** * This method should return only one result. An exception will be thrown if * duplicate entries found. * This can be used to check if the given metada exist in the database or to * check the validity. * * @return * @throws IdentityException */ /** * * @param userName * @param tenantId * @return * @throws IdentityException */ public UserRecoveryDataDO[] load(String userName, int tenantId)throws IdentityException { Connection connection = JDBCPersistenceManager.getInstance().getDBConnection(); PreparedStatement prepStmt = null; ResultSet results = null; try { prepStmt = connection.prepareStatement(SQLQuery.LOAD_USER_METADATA); prepStmt.setString(1, userName); prepStmt.setInt(2, IdentityUtil.getTenantIdOFUser(userName)); results = prepStmt.executeQuery(); List<UserRecoveryDataDO> metada = new ArrayList<UserRecoveryDataDO>(); while (results.next()) { metada.add(new UserRecoveryDataDO(results.getString(1), results.getInt(2), results.getString(3), results.getString(4))); } UserRecoveryDataDO[] resultMetadata = new UserRecoveryDataDO[metada.size()]; return metada.toArray(resultMetadata); } catch (SQLException e) { throw new IdentityException("Error while reading user identity data", e); } finally { IdentityDatabaseUtil.closeResultSet(results); IdentityDatabaseUtil.closeStatement(prepStmt); IdentityDatabaseUtil.closeConnection(connection); } } @Override public UserRecoveryDataDO load(String code) throws IdentityException { return null; } // /** // * Can be used to return primary security questions etc // * // * @param userName // * @param tenantId // * @param metadataType // * @return // * @throws IdentityException // */ // public UserRecoveryDataDO[] load(String userName, int tenantId, String metadataType) // throws IdentityException { // Connection connection = JDBCPersistenceManager.getInstance().getDBConnection(); // PreparedStatement prepStmt = null; // ResultSet results = null; // try { // prepStmt = connection.prepareStatement(SQLQuery.LOAD_TENANT_METADATA); // prepStmt.setInt(1, tenantId); // prepStmt.setString(2, metadataType); // results = prepStmt.executeQuery(); // List<UserRecoveryDataDO> metada = new ArrayList<UserRecoveryDataDO>(); // while (results.next()) { // metada.add(new UserRecoveryDataDO(results.getString(1), results.getInt(2), // results.getString(3), results.getString(4), // Boolean.parseBoolean(results.getString(5)))); // } // UserRecoveryDataDO[] resultMetadata = new UserRecoveryDataDO[metada.size()]; // return metada.toArray(resultMetadata); // } catch (SQLException e) { // throw new IdentityException("Error while reading user identity data", e); // } finally { // IdentityDatabaseUtil.closeResultSet(results); // IdentityDatabaseUtil.closeStatement(prepStmt); // IdentityDatabaseUtil.closeConnection(connection); // } // } /** * This class contains the SQL queries. */ private static class SQLQuery { /** * Query to load temporary passwords and confirmation codes */ public static final String LOAD_META_DATA = "SELECT " + "USER_NAME, TENANT_ID, METADATA_TYPE, METADATA, VALID " + "FROM IDN_IDENTITY_META_DATA " + "WHERE USER_NAME = ? AND TENANT_ID = ? AND METADATA_TYPE = ? AND METADATA = ?"; /** * Query to load user metadata */ public static final String LOAD_USER_METADATA = "SELECT " + "USER_NAME, TENANT_ID, METADATA_TYPE, METADATA, VALID " + "FROM IDN_IDENTITY_META_DATA " + "WHERE USER_NAME = ? AND TENANT_ID = ? "; /** * Query to load security questions */ public static final String LOAD_TENANT_METADATA = "SELECT " + "USER_NAME, TENANT_ID, METADATA_TYPE, METADATA, VALID " + "FROM IDN_IDENTITY_META_DATA " + "WHERE TENANT_ID = ? AND METADATA_TYPE = ?"; public static final String STORE_META_DATA = "INSERT " + "INTO IDN_IDENTITY_META_DATA " + "(USER_NAME, TENANT_ID, METADATA_TYPE, METADATA, VALID)" + "VALUES (?,?,?,?,?)"; public static final String INVALIDATE_METADATA = "UPDATE " + "IDN_IDENTITY_META_DATA " + "SET VALID = 'false' " + "WHERE USER_NAME = ? AND TENANT_ID = ? AND METADATA_TYPE = ? AND METADATA = ?"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.test.integration.functions.compress; import org.apache.sysml.runtime.compress.BitmapEncoder; import org.apache.sysml.runtime.compress.CompressedMatrixBlock; import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer; import org.apache.sysml.runtime.instructions.InstructionUtils; import org.apache.sysml.runtime.matrix.data.MatrixBlock; import org.apache.sysml.runtime.matrix.operators.AggregateUnaryOperator; import org.apache.sysml.runtime.util.DataConverter; import org.apache.sysml.test.integration.AutomatedTestBase; import org.apache.sysml.test.utils.TestUtils; import org.junit.Test; /** * */ public class LargeParUnaryAggregateTest extends AutomatedTestBase { private static final int rows = 5*BitmapEncoder.BITMAP_BLOCK_SZ; private static final int cols = 20; private static final double sparsity1 = 0.9; private static final double sparsity2 = 0.1; private static final double sparsity3 = 0.0; public enum SparsityType { DENSE, SPARSE, EMPTY, } public enum ValueType { RAND, //UC CONST, //RLE RAND_ROUND_OLE, //OLE RAND_ROUND_DDC, //RLE } public enum AggType { ROWSUMS, COLSUMS, SUM, ROWSUMSSQ, COLSUMSSQ, SUMSQ, ROWMAXS, COLMAXS, MAX, ROWMINS, COLMINS, MIN, } @Override public void setUp() { } @Test public void testRowSumsDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWSUMS, true); } @Test public void testRowSumsSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWSUMS, true); } @Test public void testRowSumsEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWSUMS, true); } @Test public void testRowSumsDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMS, true); } @Test public void testRowSumsSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMS, true); } @Test public void testRowSumsDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.ROWSUMS, true); } @Test public void testRowSumsSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.ROWSUMS, true); } @Test public void testRowSumsDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWSUMS, true); } @Test public void testRowSumsSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWSUMS, true); } @Test public void testRowSumsDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWSUMS, false); } @Test public void testRowSumsSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWSUMS, false); } @Test public void testRowSumsEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWSUMS, false); } @Test public void testRowSumsDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMS, false); } @Test public void testRowSumsSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMS, false); } @Test public void testRowSumsDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWSUMS, false); } @Test public void testRowSumsSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWSUMS, false); } @Test public void testColSumsDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLSUMS, true); } @Test public void testColSumsSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLSUMS, true); } @Test public void testColSumsEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLSUMS, true); } @Test public void testColSumsDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMS, true); } @Test public void testColSumsSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMS, true); } @Test public void testColSumsDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.COLSUMS, true); } @Test public void testColSumsSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.COLSUMS, true); } @Test public void testColSumsDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLSUMS, true); } @Test public void testColSumsSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLSUMS, true); } @Test public void testColSumsDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLSUMS, false); } @Test public void testColSumsSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLSUMS, false); } @Test public void testColSumsEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLSUMS, false); } @Test public void testColSumsDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMS, false); } @Test public void testColSumsSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMS, false); } @Test public void testColSumsDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLSUMS, false); } @Test public void testColSumsSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLSUMS, false); } @Test public void testSumDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.SUM, true); } @Test public void testSumSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.SUM, true); } @Test public void testSumEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.SUM, true); } @Test public void testSumDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.SUM, true); } @Test public void testSumSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.SUM, true); } @Test public void testSumDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.SUM, true); } @Test public void testSumSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.SUM, true); } @Test public void testSumDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.SUM, true); } @Test public void testSumSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.SUM, true); } @Test public void testSumDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.SUM, false); } @Test public void testSumSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.SUM, false); } @Test public void testSumEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.SUM, false); } @Test public void testSumDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.SUM, false); } @Test public void testSumSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.SUM, false); } @Test public void testSumDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.SUM, false); } @Test public void testSumSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.SUM, false); } @Test public void testRowSumsSqDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWSUMSSQ, true); } @Test public void testRowSumsSqDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWSUMSSQ, false); } @Test public void testRowSumsSqSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWSUMSSQ, false); } @Test public void testRowSumsSqEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWSUMSSQ, false); } @Test public void testRowSumsSqDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMSSQ, false); } @Test public void testRowSumsSqSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWSUMSSQ, false); } @Test public void testRowSumsSqDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWSUMSSQ, false); } @Test public void testRowSumsSqSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWSUMSSQ, false); } @Test public void testColSumsSqDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLSUMSSQ, true); } @Test public void testColSumsSqDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLSUMSSQ, false); } @Test public void testColSumsSqSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLSUMSSQ, false); } @Test public void testColSumsSqEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLSUMSSQ, false); } @Test public void testColSumsSqDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMSSQ, false); } @Test public void testColSumsSqSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLSUMSSQ, false); } @Test public void testColSumsSqDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLSUMSSQ, false); } @Test public void testColSumsSqSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLSUMSSQ, false); } @Test public void testSumSqDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.SUMSQ, true); } @Test public void testSumSqSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.SUMSQ, true); } @Test public void testSumSqEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.SUMSQ, true); } @Test public void testSumSqDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.SUMSQ, true); } @Test public void testSumSqSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.SUMSQ, true); } @Test public void testSumSqDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.SUMSQ, true); } @Test public void testSumSqSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.SUMSQ, true); } @Test public void testSumSqDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.SUMSQ, true); } @Test public void testSumSqSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.SUMSQ, true); } @Test public void testSumSqDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.SUMSQ, false); } @Test public void testSumSqSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.SUMSQ, false); } @Test public void testSumSqEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.SUMSQ, false); } @Test public void testSumSqDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.SUMSQ, false); } @Test public void testSumSqSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.SUMSQ, false); } @Test public void testSumSqDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.SUMSQ, false); } @Test public void testSumSqSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.SUMSQ, false); } @Test public void testRowMaxsDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWMAXS, true); } @Test public void testRowMaxsSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWMAXS, true); } @Test public void testRowMaxsEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWMAXS, true); } @Test public void testRowMaxsDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWMAXS, true); } @Test public void testRowMaxsSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWMAXS, true); } @Test public void testRowMaxsDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.ROWMAXS, true); } @Test public void testRowMaxsSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.ROWMAXS, true); } @Test public void testRowMaxsDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWMAXS, true); } @Test public void testRowMaxsSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWMAXS, true); } @Test public void testRowMaxsDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWMAXS, false); } @Test public void testRowMaxsSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWMAXS, false); } @Test public void testRowMaxsEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWMAXS, false); } @Test public void testRowMaxsDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWMAXS, false); } @Test public void testRowMaxsSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWMAXS, false); } @Test public void testRowMaxsDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWMAXS, false); } @Test public void testRowMaxsSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWMAXS, false); } @Test public void testColMaxsDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLMAXS, true); } @Test public void testColMaxsSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLMAXS, true); } @Test public void testColMaxsEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLMAXS, true); } @Test public void testColMaxsDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLMAXS, true); } @Test public void testColMaxsSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLMAXS, true); } @Test public void testColMaxsDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.COLMAXS, true); } @Test public void testColMaxsSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.COLMAXS, true); } @Test public void testColMaxsDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLMAXS, true); } @Test public void testColMaxsSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLMAXS, true); } @Test public void testColMaxsDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLMAXS, false); } @Test public void testColMaxsSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLMAXS, false); } @Test public void testColMaxsEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLMAXS, false); } @Test public void testColMaxsDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLMAXS, false); } @Test public void testColMaxsSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLMAXS, false); } @Test public void testColMaxsDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLMAXS, false); } @Test public void testColMaxsSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLMAXS, false); } @Test public void testMaxDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.MAX, true); } @Test public void testMaxSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.MAX, true); } @Test public void testMaxEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.MAX, true); } @Test public void testMaxDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.MAX, true); } @Test public void testMaxSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.MAX, true); } @Test public void testMaxDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.MAX, true); } @Test public void testMaxSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.MAX, true); } @Test public void testMaxDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.MAX, true); } @Test public void testMaxSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.MAX, true); } @Test public void testMaxDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.MAX, false); } @Test public void testMaxSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.MAX, false); } @Test public void testMaxEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.MAX, false); } @Test public void testMaxDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.MAX, false); } @Test public void testMaxSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.MAX, false); } @Test public void testMaxDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.MAX, false); } @Test public void testMaxSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.MAX, false); } @Test public void testRowMinsDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWMINS, true); } @Test public void testRowMinsSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWMINS, true); } @Test public void testRowMinsEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWMINS, true); } @Test public void testRowMinsDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWMINS, true); } @Test public void testRowMinsSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWMINS, true); } @Test public void testRowMinsDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.ROWMINS, true); } @Test public void testRowMinsSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.ROWMINS, true); } @Test public void testRowMinsDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWMINS, true); } @Test public void testRowMinsSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWMINS, true); } @Test public void testRowMinsDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.ROWMINS, false); } @Test public void testRowMinsSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.ROWMINS, false); } @Test public void testRowMinsEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.ROWMINS, false); } @Test public void testRowMinsDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.ROWMINS, false); } @Test public void testRowMinsSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.ROWMINS, false); } @Test public void testRowMinsDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.ROWMINS, false); } @Test public void testRowMinsSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.ROWMINS, false); } @Test public void testColMinsDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLMINS, true); } @Test public void testColMinsSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLMINS, true); } @Test public void testColMinsEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLMINS, true); } @Test public void testColMinsDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLMINS, true); } @Test public void testColMinsSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLMINS, true); } @Test public void testColMinsDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.COLMINS, true); } @Test public void testColMinsSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.COLMINS, true); } @Test public void testColMinsDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLMINS, true); } @Test public void testColMinsSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLMINS, true); } @Test public void testColMinsDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.COLMINS, false); } @Test public void testColMinsSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.COLMINS, false); } @Test public void testColMinsEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.COLMINS, false); } @Test public void testColMinsDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.COLMINS, false); } @Test public void testColMinsSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.COLMINS, false); } @Test public void testColMinsDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.COLMINS, false); } @Test public void testColMinsSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.COLMINS, false); } @Test public void testMinDenseRandDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.MIN, true); } @Test public void testMinSparseRandDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.MIN, true); } @Test public void testMinEmptyCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.MIN, true); } @Test public void testMinDenseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.MIN, true); } @Test public void testMinSparseRoundRandDataOLECompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.MIN, true); } @Test public void testMinDenseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_DDC, AggType.MIN, true); } @Test public void testMinSparseRoundRandDataDDCCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_DDC, AggType.MIN, true); } @Test public void testMinDenseConstantDataCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.MIN, true); } @Test public void testMinSparseConstDataCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.MIN, true); } @Test public void testMinDenseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND, AggType.MIN, false); } @Test public void testMinSparseRandDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND, AggType.MIN, false); } @Test public void testMinEmptyNoCompression() { runUnaryAggregateTest(SparsityType.EMPTY, ValueType.RAND, AggType.MIN, false); } @Test public void testMinDenseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.RAND_ROUND_OLE, AggType.MIN, false); } @Test public void testMinSparseRoundRandDataOLENoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.RAND_ROUND_OLE, AggType.MIN, false); } @Test public void testMinDenseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.DENSE, ValueType.CONST, AggType.MIN, false); } @Test public void testMinSparseConstDataNoCompression() { runUnaryAggregateTest(SparsityType.SPARSE, ValueType.CONST, AggType.MIN, false); } private void runUnaryAggregateTest(SparsityType sptype, ValueType vtype, AggType aggtype, boolean compress) { if(shouldSkipTest()) return; try { //prepare sparsity for input data double sparsity = -1; switch( sptype ){ case DENSE: sparsity = sparsity1; break; case SPARSE: sparsity = sparsity2; break; case EMPTY: sparsity = sparsity3; break; } //generate input data double min = (vtype==ValueType.CONST)? 10 : -10; double[][] input = TestUtils.generateTestMatrix(rows, cols, min, 10, sparsity, 7); if( vtype==ValueType.RAND_ROUND_OLE || vtype==ValueType.RAND_ROUND_DDC ) { CompressedMatrixBlock.ALLOW_DDC_ENCODING = (vtype==ValueType.RAND_ROUND_DDC); input = TestUtils.round(input); } MatrixBlock mb = DataConverter.convertToMatrixBlock(input); mb = mb.append(MatrixBlock.seqOperations(0.1, rows-0.1, 1), new MatrixBlock()); //uc group //prepare unary aggregate operator AggregateUnaryOperator auop = null; int k = InfrastructureAnalyzer.getLocalParallelism(); switch (aggtype) { case SUM: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uak+",k); break; case ROWSUMS: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uark+",k); break; case COLSUMS: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uack+",k); break; case SUMSQ: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uasqk+",k); break; case ROWSUMSSQ: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uarsqk+",k); break; case COLSUMSSQ: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uacsqk+",k); break; case MAX: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uamax",k); break; case ROWMAXS: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uarmax",k); break; case COLMAXS: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uacmax",k); break; case MIN: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uamin",k); break; case ROWMINS: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uarmin",k); break; case COLMINS: auop = InstructionUtils.parseBasicAggregateUnaryOperator("uacmin",k); break; } //compress given matrix block CompressedMatrixBlock cmb = new CompressedMatrixBlock(mb); if( compress ) cmb.compress(); //matrix-vector uncompressed MatrixBlock ret1 = (MatrixBlock)mb.aggregateUnaryOperations(auop, new MatrixBlock(), 1000, 1000, null, true); //matrix-vector compressed MatrixBlock ret2 = (MatrixBlock)cmb.aggregateUnaryOperations(auop, new MatrixBlock(), 1000, 1000, null, true); //compare result with input double[][] d1 = DataConverter.convertToDoubleMatrix(ret1); double[][] d2 = DataConverter.convertToDoubleMatrix(ret2); int dim1 = (aggtype == AggType.ROWSUMS || aggtype == AggType.ROWSUMSSQ || aggtype == AggType.ROWMINS || aggtype == AggType.ROWMINS)?rows:1; int dim2 = (aggtype == AggType.COLSUMS || aggtype == AggType.COLSUMSSQ || aggtype == AggType.COLMAXS || aggtype == AggType.COLMINS)?cols+1:1; TestUtils.compareMatrices(d1, d2, dim1, dim2, 0.000000001); } catch(Exception ex) { throw new RuntimeException(ex); } finally { CompressedMatrixBlock.ALLOW_DDC_ENCODING = true; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.algebricks.core.algebra.prettyprint; import java.util.List; import org.apache.commons.lang3.mutable.Mutable; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.common.utils.Pair; import org.apache.hyracks.algebricks.common.utils.Triple; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan; import org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator; import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestMapOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.ForwardOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator.Kind; import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.WindowOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteOperator; import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator; import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor; public class LogicalOperatorPrettyPrintVisitor extends AbstractLogicalOperatorPrettyPrintVisitor { public LogicalOperatorPrettyPrintVisitor() { super(); } public LogicalOperatorPrettyPrintVisitor(AlgebricksAppendable buffer, ILogicalExpressionVisitor<String, Integer> exprVisitor) { super(buffer, exprVisitor); } public LogicalOperatorPrettyPrintVisitor(AlgebricksAppendable buffer) { super(buffer); } public LogicalOperatorPrettyPrintVisitor(Appendable app) { super(app); } @Override public void printOperator(AbstractLogicalOperator op, int indent) throws AlgebricksException { final AlgebricksAppendable out = this.get(); op.accept(this, indent); IPhysicalOperator pOp = op.getPhysicalOperator(); if (pOp != null) { out.append("\n"); pad(out, indent); appendln(out, "-- " + pOp.toString() + " |" + op.getExecutionMode() + "|"); } else { appendln(out, " -- |" + op.getExecutionMode() + "|"); } for (Mutable<ILogicalOperator> i : op.getInputs()) { printOperator((AbstractLogicalOperator) i.getValue(), indent + 2); } } @Override public Void visitAggregateOperator(AggregateOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("aggregate ").append(str(op.getVariables())).append(" <- "); pprintExprList(op.getExpressions(), indent); return null; } @Override public Void visitRunningAggregateOperator(RunningAggregateOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("running-aggregate ").append(str(op.getVariables())).append(" <- "); pprintExprList(op.getExpressions(), indent); return null; } @Override public Void visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("empty-tuple-source"); return null; } @Override public Void visitGroupByOperator(GroupByOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("group by" + (op.isGroupAll() ? " (all)" : "") + " ("); pprintVeList(op.getGroupByList(), indent); buffer.append(") decor ("); pprintVeList(op.getDecorList(), indent); buffer.append(") {"); printNestedPlans(op, indent); return null; } @Override public Void visitDistinctOperator(DistinctOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("distinct ("); pprintExprList(op.getExpressions(), indent); buffer.append(")"); return null; } @Override public Void visitInnerJoinOperator(InnerJoinOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("join (").append(op.getCondition().getValue().accept(exprVisitor, indent)).append(")"); return null; } @Override public Void visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("left outer join (").append(op.getCondition().getValue().accept(exprVisitor, indent)) .append(")"); return null; } @Override public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("nested tuple source"); return null; } @Override public Void visitOrderOperator(OrderOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("order "); if (op.getTopK() != -1) { buffer.append("(topK: " + op.getTopK() + ") "); } pprintOrderList(op.getOrderExpressions(), indent); return null; } private String getOrderString(OrderOperator.IOrder first) { switch (first.getKind()) { case ASC: return "ASC"; case DESC: return "DESC"; default: return first.getExpressionRef().toString(); } } @Override public Void visitAssignOperator(AssignOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("assign ").append(str(op.getVariables())).append(" <- "); pprintExprList(op.getExpressions(), indent); return null; } @Override public Void visitWriteOperator(WriteOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("write "); pprintExprList(op.getExpressions(), indent); return null; } @Override public Void visitDistributeResultOperator(DistributeResultOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("distribute result "); pprintExprList(op.getExpressions(), indent); return null; } @Override public Void visitWriteResultOperator(WriteResultOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("load ").append(str(op.getDataSource())).append(" from ") .append(op.getPayloadExpression().getValue().accept(exprVisitor, indent)).append(" partitioned by "); pprintExprList(op.getKeyExpressions(), indent); return null; } @Override public Void visitSelectOperator(SelectOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("select (").append(op.getCondition().getValue().accept(exprVisitor, indent)) .append(")"); return null; } @Override public Void visitProjectOperator(ProjectOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("project " + "(" + op.getVariables() + ")"); return null; } @Override public Void visitSubplanOperator(SubplanOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("subplan {"); printNestedPlans(op, indent); return null; } @Override public Void visitUnionOperator(UnionAllOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("union"); for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> v : op.getVariableMappings()) { buffer.append(" (" + v.first + ", " + v.second + ", " + v.third + ")"); } return null; } @Override public Void visitIntersectOperator(IntersectOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("intersect ("); buffer.append('['); for (int i = 0; i < op.getOutputVars().size(); i++) { if (i > 0) { buffer.append(", "); } buffer.append(str(op.getOutputVars().get(i))); } buffer.append("] <- ["); for (int i = 0; i < op.getNumInput(); i++) { if (i > 0) { buffer.append(", "); } buffer.append('['); for (int j = 0; j < op.getInputVariables(i).size(); j++) { if (j > 0) { buffer.append(", "); } buffer.append(str(op.getInputVariables(i).get(j))); } buffer.append(']'); } buffer.append("])"); return null; } @Override public Void visitUnnestOperator(UnnestOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("unnest " + op.getVariable()); if (op.getPositionalVariable() != null) { buffer.append(" at " + op.getPositionalVariable()); } buffer.append(" <- " + op.getExpressionRef().getValue().accept(exprVisitor, indent)); return null; } @Override public Void visitLeftOuterUnnestOperator(LeftOuterUnnestOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("outer-unnest " + op.getVariable()); if (op.getPositionalVariable() != null) { buffer.append(" at " + op.getPositionalVariable()); } buffer.append(" <- " + op.getExpressionRef().getValue().accept(exprVisitor, indent)); return null; } @Override public Void visitUnnestMapOperator(UnnestMapOperator op, Integer indent) throws AlgebricksException { AlgebricksAppendable plan = printAbstractUnnestMapOperator(op, indent, "unnest-map"); appendSelectConditionInformation(plan, op.getSelectCondition(), indent); appendLimitInformation(plan, op.getOutputLimit()); return null; } @Override public Void visitLeftOuterUnnestMapOperator(LeftOuterUnnestMapOperator op, Integer indent) throws AlgebricksException { printAbstractUnnestMapOperator(op, indent, "left-outer-unnest-map"); return null; } private AlgebricksAppendable printAbstractUnnestMapOperator(AbstractUnnestMapOperator op, Integer indent, String opSignature) throws AlgebricksException { AlgebricksAppendable plan = addIndent(indent).append(opSignature + " " + op.getVariables() + " <- " + op.getExpressionRef().getValue().accept(exprVisitor, indent)); appendFilterInformation(plan, op.getMinFilterVars(), op.getMaxFilterVars()); return plan; } @Override public Void visitDataScanOperator(DataSourceScanOperator op, Integer indent) throws AlgebricksException { AlgebricksAppendable plan = addIndent(indent).append( "data-scan " + op.getProjectVariables() + "<-" + op.getVariables() + " <- " + op.getDataSource()); appendFilterInformation(plan, op.getMinFilterVars(), op.getMaxFilterVars()); appendSelectConditionInformation(plan, op.getSelectCondition(), indent); appendLimitInformation(plan, op.getOutputLimit()); return null; } private Void appendSelectConditionInformation(AlgebricksAppendable plan, Mutable<ILogicalExpression> selectCondition, Integer indent) throws AlgebricksException { if (selectCondition != null) { plan.append(" condition (").append(selectCondition.getValue().accept(exprVisitor, indent)).append(")"); } return null; } private Void appendLimitInformation(AlgebricksAppendable plan, long outputLimit) throws AlgebricksException { if (outputLimit >= 0) { plan.append(" limit ").append(String.valueOf(outputLimit)); } return null; } private Void appendFilterInformation(AlgebricksAppendable plan, List<LogicalVariable> minFilterVars, List<LogicalVariable> maxFilterVars) throws AlgebricksException { if (minFilterVars != null || maxFilterVars != null) { plan.append(" with filter on"); } if (minFilterVars != null) { plan.append(" min:" + minFilterVars); } if (maxFilterVars != null) { plan.append(" max:" + maxFilterVars); } return null; } @Override public Void visitLimitOperator(LimitOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("limit " + op.getMaxObjects().getValue().accept(exprVisitor, indent)); ILogicalExpression offset = op.getOffset().getValue(); if (offset != null) { buffer.append(", " + offset.accept(exprVisitor, indent)); } return null; } @Override public Void visitExchangeOperator(ExchangeOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("exchange"); return null; } @Override public Void visitScriptOperator(ScriptOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("script (in: " + op.getInputVariables() + ") (out: " + op.getOutputVariables() + ")"); return null; } @Override public Void visitReplicateOperator(ReplicateOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("replicate"); return null; } @Override public Void visitSplitOperator(SplitOperator op, Integer indent) throws AlgebricksException { Mutable<ILogicalExpression> branchingExpression = op.getBranchingExpression(); addIndent(indent).append("split (" + branchingExpression.getValue().accept(exprVisitor, indent) + ")"); return null; } @Override public Void visitMaterializeOperator(MaterializeOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("materialize"); return null; } @Override public Void visitInsertDeleteUpsertOperator(InsertDeleteUpsertOperator op, Integer indent) throws AlgebricksException { String header = getIndexOpString(op.getOperation()); addIndent(indent).append(header).append(str(op.getDataSource())).append(" from record: ") .append(op.getPayloadExpression().getValue().accept(exprVisitor, indent)); if (op.getAdditionalNonFilteringExpressions() != null) { buffer.append(", meta: "); pprintExprList(op.getAdditionalNonFilteringExpressions(), indent); } buffer.append(" partitioned by "); pprintExprList(op.getPrimaryKeyExpressions(), indent); if (op.getOperation() == Kind.UPSERT) { buffer.append(" out: ([record-before-upsert:" + op.getBeforeOpRecordVar() + ((op.getBeforeOpAdditionalNonFilteringVars() != null) ? (", additional-before-upsert: " + op.getBeforeOpAdditionalNonFilteringVars()) : "") + "]) "); } if (op.isBulkload()) { buffer.append(" [bulkload]"); } return null; } @Override public Void visitIndexInsertDeleteUpsertOperator(IndexInsertDeleteUpsertOperator op, Integer indent) throws AlgebricksException { String header = getIndexOpString(op.getOperation()); addIndent(indent).append(header).append(op.getIndexName()).append(" on ") .append(str(op.getDataSourceIndex().getDataSource())).append(" from "); if (op.getOperation() == Kind.UPSERT) { buffer.append(" replace:"); pprintExprList(op.getPrevSecondaryKeyExprs(), indent); buffer.append(" with:"); pprintExprList(op.getSecondaryKeyExpressions(), indent); } else { pprintExprList(op.getSecondaryKeyExpressions(), indent); } if (op.isBulkload()) { buffer.append(" [bulkload]"); } return null; } public String getIndexOpString(Kind opKind) { switch (opKind) { case DELETE: return "delete from "; case INSERT: return "insert into "; case UPSERT: return "upsert into "; } return null; } @Override public Void visitTokenizeOperator(TokenizeOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("tokenize ").append(str(op.getTokenizeVars())).append(" <- "); pprintExprList(op.getSecondaryKeyExpressions(), indent); return null; } @Override public Void visitForwardOperator(ForwardOperator op, Integer indent) throws AlgebricksException { addIndent(indent) .append("forward: range-map = " + op.getRangeMapExpression().getValue().accept(exprVisitor, indent)); return null; } @Override public Void visitSinkOperator(SinkOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("sink"); return null; } @Override public Void visitDelegateOperator(DelegateOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append(op.toString()); return null; } @Override public Void visitWindowOperator(WindowOperator op, Integer indent) throws AlgebricksException { addIndent(indent).append("window ").append(str(op.getVariables())).append(" <- "); pprintExprList(op.getExpressions(), indent); if (!op.getPartitionExpressions().isEmpty()) { buffer.append(" partition "); pprintExprList(op.getPartitionExpressions(), indent); } if (!op.getOrderExpressions().isEmpty()) { buffer.append(" order "); pprintOrderList(op.getOrderExpressions(), indent); } if (op.hasNestedPlans()) { buffer.append(" frame on "); pprintOrderList(op.getFrameValueExpressions(), indent); buffer.append("start "); List<Mutable<ILogicalExpression>> frameStartExpressions = op.getFrameStartExpressions(); if (!frameStartExpressions.isEmpty()) { pprintExprList(frameStartExpressions, indent); } else { buffer.append("unbounded"); } buffer.append(" end "); List<Mutable<ILogicalExpression>> frameEndExpressions = op.getFrameEndExpressions(); if (!frameEndExpressions.isEmpty()) { pprintExprList(frameEndExpressions, indent); } else { buffer.append("unbounded"); } List<Mutable<ILogicalExpression>> frameExcludeExpressions = op.getFrameExcludeExpressions(); if (!frameExcludeExpressions.isEmpty()) { buffer.append(" exclude "); int negStartIdx = op.getFrameExcludeNegationStartIdx(); if (negStartIdx >= 0 && op.getFrameExcludeNegationStartIdx() < frameExcludeExpressions.size()) { pprintExprList(frameExcludeExpressions.subList(0, negStartIdx), indent); buffer.append(" and not "); pprintExprList(frameExcludeExpressions.subList(negStartIdx, frameExcludeExpressions.size()), indent); } else { pprintExprList(frameExcludeExpressions, indent); } } Mutable<ILogicalExpression> frameOffset = op.getFrameOffset(); if (frameOffset.getValue() != null) { buffer.append(" offset "); buffer.append(frameOffset.getValue().accept(exprVisitor, indent)); } int frameMaxObjects = op.getFrameMaxObjects(); if (frameMaxObjects != -1) { buffer.append(" maxObjects " + frameMaxObjects); } buffer.append(" {"); printNestedPlans(op, indent); } return null; } protected void printNestedPlans(AbstractOperatorWithNestedPlans op, Integer indent) throws AlgebricksException { boolean first = true; if (op.getNestedPlans().isEmpty()) { buffer.append("}"); } else { for (ILogicalPlan p : op.getNestedPlans()) { // PrettyPrintUtil.indent(buffer, level + 10).append("var " + // p.first + ":\n"); buffer.append("\n"); if (first) { first = false; } else { addIndent(indent).append(" {\n"); } printPlan(p, indent + 10); addIndent(indent).append(" }"); } } } protected void pprintExprList(List<Mutable<ILogicalExpression>> expressions, Integer indent) throws AlgebricksException { buffer.append("["); boolean first = true; for (Mutable<ILogicalExpression> exprRef : expressions) { if (first) { first = false; } else { buffer.append(", "); } buffer.append(exprRef.getValue().accept(exprVisitor, indent)); } buffer.append("]"); } protected void pprintVeList(List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> vePairList, Integer indent) throws AlgebricksException { buffer.append("["); boolean fst = true; for (Pair<LogicalVariable, Mutable<ILogicalExpression>> ve : vePairList) { if (fst) { fst = false; } else { buffer.append("; "); } if (ve.first != null) { buffer.append(ve.first + " := " + ve.second); } else { buffer.append(ve.second.getValue().accept(exprVisitor, indent)); } } buffer.append("]"); } protected void pprintOrderList(List<Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>> orderList, Integer indent) throws AlgebricksException { for (Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>> p : orderList) { String fst = getOrderString(p.first); buffer.append("(" + fst + ", " + p.second.getValue().accept(exprVisitor, indent) + ") "); } } }
/* * Copyright (c) 2013, EMC Corporation ("EMC"). * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. * */ /* * @author Vijayanand Bharadwaj * @author John P. Field */ package com.emc.cto.ridagent.rid; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.namespace.QName; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.log4j.Logger; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; import org.w3c.dom.Document; import com.emc.cto.ridagent.rid.jaxb.*; import com.emc.cto.ridagent.rid.jaxb.RegistryKeyModified.Key; import com.emc.cto.ridagent.rid.util.HTTPSender; import com.emc.cto.ridagent.rid.webform.DNSRecord; import com.emc.cto.ridagent.rid.webform.DigitalSig; import com.emc.cto.ridagent.rid.webform.EmailInfo; import com.emc.cto.ridagent.rid.webform.Event; import com.emc.cto.ridagent.rid.webform.FileData; import com.emc.cto.ridagent.rid.webform.Hash; import com.emc.cto.ridagent.rid.webform.HashData; import com.emc.cto.ridagent.rid.webform.IncidentData; import com.emc.cto.ridagent.rid.webform.NetworkInfo; import com.emc.cto.ridagent.rid.webform.PhishingData; import com.emc.cto.ridagent.rid.webform.RegistryValues; import com.emc.cto.ridagent.rid.webform.SystemData; import com.emc.cto.xproc.PipelineInputCache; import com.emc.cto.xproc.XProcXMLProcessingContext; import com.emc.documentum.xml.xproc.XProcException; import com.emc.documentum.xml.xproc.pipeline.model.PipelineOutput; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.datatype.DatatypeConfigurationException; @Controller @RequestMapping("/RIDSender") public class RIDSender { private static XProcXMLProcessingContext m_createReport = null; // POST private static XProcXMLProcessingContext m_getReport = null; // POST private static XProcXMLProcessingContext m_createQuery = null; // POST private static XProcXMLProcessingContext m_getQuery = null; // POST private static XProcXMLProcessingContext m_createWatchList = null; private static XProcXMLProcessingContext m_getWatchList = null; //get log4j handler private static final Logger logger = Logger.getLogger(RIDSender.class); public RIDSender() { super(); // TODO Auto-generated constructor stub } public void setCreateReport (XProcXMLProcessingContext val) { m_createReport = val; } public void setGetReport (XProcXMLProcessingContext val) { m_getReport = val; } public void setCreateQuery (XProcXMLProcessingContext val) { m_createQuery = val; } public void setGetQuery (XProcXMLProcessingContext val) { m_getQuery = val; } public void setCreateWatchList (XProcXMLProcessingContext val) { m_createWatchList = val; } public void setGetWatchList (XProcXMLProcessingContext val) { m_getWatchList = val; } @RequestMapping(method = RequestMethod.GET) public ModelAndView displayMainPage(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displayMainPage"); } return new ModelAndView("index"); } finally { ; //TODO add finally handler } } /************************************************************************* * * RID REPORTS * *************************************************************************/ @RequestMapping(method = RequestMethod.GET, value="/createReportForm") public ModelAndView displayCreateReportForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displayCreateReportForm"); } return new ModelAndView("createReportForm", "report", new RIDReport()); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/create/report") public String createReport(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("report")RIDReport report, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException, ServletException { try { if(logger.isDebugEnabled()){ logger.debug("In createReport"); logger.debug("The Report text= " + report.getReportText()); } //request.getPart("reportText"); PipelineInputCache pi = new PipelineInputCache(); // supply the source for the resource Create pipeline as an InputStream pi.setInputPort("source", new ByteArrayInputStream(report.getReportText().getBytes())); PipelineOutput output = m_createReport.executeOn(pi); return "createReportSuccess"; } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.GET, value="/sendReportForm") public ModelAndView displaySendReportForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displaySendReportForm"); } return new ModelAndView("sendReportForm", "params", new SendReportParams()); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/send/report") public ModelAndView sendReport(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("params")SendReportParams params, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException { try { Map<String,Object> postResponse = null; if(logger.isDebugEnabled()){ logger.debug("In sendReport"); logger.debug("Report id = "+params.getId()); logger.debug("Report destination = "+params.getDestination()); } /* Get a report based on the id */ PipelineInputCache pi = new PipelineInputCache(); // pass the report ID into the pipeline for use in the xQuery (to look up the right report) pi.addParameter("xqueryParameters", new QName("id"), params.getId()); // supply current resource URL as the base URL to craft hyperlinks //pi.addParameter("stylesheetParameters", new QName("baseURL"), request.getRequestURL().toString()); // SessionedPipelineOutput output = new SessionedPipelineOutput(m_getReport.executeOn(pi)); PipelineOutput output = m_getReport.executeOn(pi); /* POST the report */ postResponse = HTTPSender.httpSend(output,params.getDestination()); return new ModelAndView("sendReportStatus","postResponse",postResponse); } finally { ; //TODO add finally handler } } /************************************************************************* * * RID QUERIES * *************************************************************************/ @RequestMapping(method = RequestMethod.GET, value="/createQueryForm") public ModelAndView displayCreateQueryForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displayCreateQueryForm"); } return new ModelAndView("createQueryForm", "query", new RIDQuery()); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/create/query") public String createQuery(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("query")RIDQuery query, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In createQuery"); logger.debug("Query text = " + query.getQueryText()); } PipelineInputCache pi = new PipelineInputCache(); // supply the source for the resource Create pipeline as an InputStream pi.setInputPort("source", new ByteArrayInputStream(query.getQueryText().getBytes())); PipelineOutput output = m_createQuery.executeOn(pi); return "createQuerySuccess"; } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.GET, value="/sendQueryForm") public ModelAndView displaySendQueryForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displaySendQueryForm"); } return new ModelAndView("sendQueryForm", "params", new SendQueryParams()); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/send/query") public ModelAndView sendQuery(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("params")SendQueryParams params, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException { try { Map<String,Object> postResponse = null; if(logger.isDebugEnabled()){ logger.debug("In sendQuery"); logger.debug("Id = "+params.getId()); logger.debug("Destination = "+params.getDestination()); } /* Get a report based on the id */ PipelineInputCache pi = new PipelineInputCache(); // pass the report ID into the pipeline for use in the xQuery (to look up the right report) pi.addParameter("xqueryParameters", new QName("id"), params.getId()); // supply current resource URL as the base URL to craft hyperlinks //pi.addParameter("stylesheetParameters", new QName("baseURL"), request.getRequestURL().toString()); PipelineOutput output = m_getQuery.executeOn(pi); /* POST the query */ postResponse = HTTPSender.httpSend(output,params.getDestination()); return new ModelAndView("sendQueryStatus","postResponse",postResponse); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.GET, value="/createWatchListForm") public ModelAndView displaycreateWatchListForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displayCreateWatchListForm"); } IncidentData ed=new IncidentData(); return new ModelAndView("createMalwareWatchListForm", "classCommand", ed); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/create/WatchList") public String createWatchList(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("classCommand")IncidentData watchList, BindingResult result) throws IOException, DatatypeConfigurationException, ParserConfigurationException, JAXBException { try { if(logger.isDebugEnabled()){ //ipv4address a=(ipv4address)watchList.getIpv4address().get(0); logger.debug("In createWatchList "); logger.debug("Size of the Nodes "+ watchList.getNode().size()); } Document doc=JAXBBind.createWatchList(watchList); PipelineInputCache pi = new PipelineInputCache(); pi.setInputPort("source",doc); PipelineOutput output = m_createWatchList.executeOn(pi); } finally { ; //TODO add finally handler } return "createWatchListSuccess"; } @RequestMapping(method = RequestMethod.GET, value="/createIndicatorsForm") public ModelAndView displaycreateIndicatorsForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displayCreateIndicatorsForm"); } IncidentData ed=new IncidentData(); return new ModelAndView("createWatchListForm", "classCommand", ed); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/create/Indicators") public String createIndicators(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("classCommand")IncidentData watchList, BindingResult result) throws IOException, DatatypeConfigurationException, ParserConfigurationException, JAXBException { try { if(logger.isDebugEnabled()){ logger.debug("In createIndicators"); logger.debug("Size of the Nodes "+ watchList.getNode().size()); } Document doc=JAXBBind.createIndicators(watchList); PipelineInputCache pi = new PipelineInputCache(); pi.setInputPort("source",doc); PipelineOutput output = m_createWatchList.executeOn(pi); } finally { ; //TODO add finally handler } return "createWatchListSuccess"; } @RequestMapping(method = RequestMethod.GET, value="/sendWatchListForm") public ModelAndView displaySendWatchListForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displaySendWatchListForm"); } return new ModelAndView("sendWatchList", "params", new sendWatchList()); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/send/WatchList") public ModelAndView sendWatchList(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("params")SendQueryParams params, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException { try { Map<String,Object> postResponse = null; if(logger.isDebugEnabled()){ logger.debug("In sendWatchList"); logger.debug("Id = "+params.getId()); logger.debug("Destination = "+params.getDestination()); } /* Get a report based on the id */ PipelineInputCache pi = new PipelineInputCache(); // pass the report ID into the pipeline for use in the xQuery (to look up the right report) pi.addParameter("xqueryParameters", new QName("id"), params.getId()); // supply current resource URL as the base URL to craft hyperlinks //pi.addParameter("stylesheetParameters", new QName("baseURL"), request.getRequestURL().toString()); PipelineOutput output = m_getWatchList.executeOn(pi); /* POST the query */ postResponse = HTTPSender.httpSend(output,params.getDestination()); return new ModelAndView("sendWatchListStatus","postResponse",postResponse); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.GET, value="/createPhishingForm") public ModelAndView displaycreatePhishingForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displayCreateIndicatorsForm"); } IncidentData ed=new IncidentData(); return new ModelAndView("createPhishingForm", "classCommand", ed); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/create/Phishing") public String createPhishing(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("classCommand")IncidentData watchList, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException, DatatypeConfigurationException, ParserConfigurationException, JAXBException { try { if(logger.isDebugEnabled()){ logger.debug("In createPhishing"); logger.debug("Size of the nodes "+ watchList.getNode().size()); } Document doc=JAXBBind.createPhishing(watchList); PipelineInputCache pi = new PipelineInputCache(); pi.setInputPort("source",doc); PipelineOutput output = m_createWatchList.executeOn(pi); } finally { ; //TODO add finally handler } return "createPhishingSuccess"; } @RequestMapping(method = RequestMethod.GET, value="/createDDOSForm") public ModelAndView displaycreateDDOSForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { try { if(logger.isDebugEnabled()){ logger.debug("In displayCreateIndicatorsForm"); } IncidentData ed=new IncidentData(); return new ModelAndView("createDDOSForm", "classCommand", ed); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/create/DDOS") public String createDDOS(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("classCommand")IncidentData watchList, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException, DatatypeConfigurationException, ParserConfigurationException, JAXBException { try { if(logger.isDebugEnabled()){ logger.debug("In createDDOS"); logger.debug("Size of the nodes "+ watchList.getNode().size()); } Document doc=JAXBBind.createDDOS(watchList); PipelineInputCache pi = new PipelineInputCache(); pi.setInputPort("source",doc); PipelineOutput output = m_createWatchList.executeOn(pi); } finally { ; //TODO add finally handler } return "createDDOSSuccess"; } @RequestMapping(method = RequestMethod.GET, value="/viewWatchListForm") public ModelAndView displayViewWatchListForm(HttpServletRequest request, HttpServletResponse response, Model model) throws XProcException, IOException, URISyntaxException, TransformerException { //not implemented yet try { if(logger.isDebugEnabled()){ logger.debug("In displaySendWatchListForm"); } return new ModelAndView("viewReport", "params", new sendWatchList()); } finally { ; //TODO add finally handler } } @RequestMapping(method = RequestMethod.POST, value="/listWatchList") public ModelAndView viewWatchList(HttpServletRequest request, HttpServletResponse response, Model model, @ModelAttribute("params")SendQueryParams params, BindingResult result) throws XProcException, IOException, URISyntaxException, TransformerException, JAXBException { try { if(logger.isDebugEnabled()){ logger.debug("In viewWatchList"); logger.debug("Id = "+params.getId()); } /* Get a report based on the id */ PipelineInputCache pi = new PipelineInputCache(); pi.addParameter("xqueryParameters", new QName("id"), params.getId()); PipelineOutput output = m_getWatchList.executeOn(pi); IncidentData eventdata=new IncidentData(); List<com.emc.documentum.xml.xproc.io.Source> sources = output.getSources(output.getPrimaryOutputPort()); if (sources != null && !sources.isEmpty()) { // pipeline should only return a single value - we return the first as the output org.w3c.dom.Node node = sources.get(0).getNode(); JAXBContext jc = JAXBContext.newInstance( "com.emc.cto.ridagent.rid.jaxb" ); Unmarshaller u = jc.createUnmarshaller(); JAXBElement element = (JAXBElement) u.unmarshal(node); RIDType d= (RIDType)element.getValue(); RIDPolicyType policy=d.getRIDPolicy(); if(policy.getReportSchema()!=null){ ReportSchemaType rst=policy.getReportSchema(); if(rst.getXMLDocument()!=null){ ExtensionType et=rst.getXMLDocument(); IODEFDocument document=(IODEFDocument)et.getContent().get(0); Incident incident=document.getIncident().get(0); if(incident.getDescription().size()!=0){ MLStringType des=incident.getDescription().get(0); eventdata.setDescription(des.getValue()); } if(incident.getReportTime()!=null) eventdata.setReporttime(incident.getReportTime().toString()); if(incident.getStartTime()!=null) eventdata.setStarttime(incident.getStartTime().toString()); if(incident.getEndTime()!=null) eventdata.setStoptime(incident.getEndTime().toString()); if(incident.getDetectTime()!=null) eventdata.setDetecttime(incident.getDetectTime().toString()); for (int eventCount=0;eventCount<incident.getEventData().size();eventCount++){ EventData one=incident.getEventData().get(eventCount); Event nodeobj= new Event(); eventdata.getNode().add(nodeobj); if(one.getMethod().size()!=0){ Method methodobj=one.getMethod().get(0); Reference ref=(Reference)methodobj.getReferenceOrDescription().get(0); nodeobj.setRefName(ref.getReferenceName().getValue()); nodeobj.setRefURL(ref.getURL().get(0)); } boolean domainPresent=false; for(int flowindex=0;flowindex<one.getFlow().size();flowindex++){ Flow flowobject=one.getFlow().get(flowindex); if(flowobject.getSystem().size()>0){ com.emc.cto.ridagent.rid.jaxb.System sysobject=flowobject.getSystem().get(0); com.emc.cto.ridagent.rid.jaxb.Node nodes=sysobject.getNode().get(0); for(int addressindex=0;addressindex<nodes.getNodeNameOrDomainDataOrAddress().size();addressindex++){ Object obj=nodes.getNodeNameOrDomainDataOrAddress().get(addressindex); if(obj instanceof DomainData ){ domainPresent=true; break; } } } if(domainPresent){ //domaindata in flow element Flow flowobj=one.getFlow().get(flowindex); for(int systemindex=0;systemindex<flowobj.getSystem().size();systemindex++){ com.emc.cto.ridagent.rid.jaxb.System sysobj=flowobj.getSystem().get(systemindex); PhishingData ip=new PhishingData(); nodeobj.getPhishing().add(ip); ip.setSystemcategory(sysobj.getCategory()); com.emc.cto.ridagent.rid.jaxb.Node nodes=sysobj.getNode().get(0); //get the first node for(int addressindex=0;addressindex<nodes.getNodeNameOrDomainDataOrAddress().size();addressindex++){ Object obj=nodes.getNodeNameOrDomainDataOrAddress().get(addressindex); if(obj instanceof Address){ SystemData address=new SystemData(); ip.getSystem().add(address); Address addressobj=(Address)obj; address.setType(addressobj.getCategory()); address.setValue(addressobj.getValue()); } else if (obj instanceof MLStringType){ SystemData address=new SystemData(); ip.getSystem().add(address); MLStringType nodename=(MLStringType)obj; address.setType("Name"); address.setValue(nodename.getValue()); } else if(obj instanceof DomainData ){ EmailInfo emailobj=new EmailInfo(); ip.getEmailinfo().add(emailobj); DomainData domainobj=(DomainData)obj; emailobj.setDomain(domainobj.getName().getValue()); emailobj.setDomaindate(domainobj.getDateDomainWasChecked().toString()); for(int dnsindex=0;dnsindex<domainobj.getRelatedDNS().size();dnsindex++){ RelatedDNSEntryType dnsobj=domainobj.getRelatedDNS().get(dnsindex); DNSRecord dnsrecordobj=new DNSRecord(); emailobj.getDns().add(dnsrecordobj); dnsrecordobj.setType(dnsobj.getRecordType()); dnsrecordobj.setValue(dnsobj.getValue()); } } } for(int serviceindex=0;serviceindex<sysobj.getService().size();serviceindex++){ Service serviceobj=sysobj.getService().get(serviceindex); if(serviceobj.getEmailInfo()!=null){ EmailInfo emailinfoobj=ip.getEmailinfo().get(serviceindex); emailinfoobj.setEmailid(serviceobj.getEmailInfo().getEmail().getValue()); emailinfoobj.setSubject(serviceobj.getEmailInfo().getEmailSubject().getValue()); emailinfoobj.setMailerid(serviceobj.getEmailInfo().getXMailer().getValue()); } else{ SystemData address=ip.getSystem().get(serviceindex); address.setProtocolno(address.getProtocolno()); address.setPortno(address.getPortno()); if(serviceobj.getApplication()!=null){ SoftwareType useragent=serviceobj.getApplication(); address.setUseragent(useragent.getUserAgent()); } } } NodeRole noderole=nodes.getNodeRole().get(0); //get the only node role ip.setCategory(noderole.getCategory()); if(noderole.getAttacktype()!=null) ip.setRole(noderole.getAttacktype().name()); } } else{ Flow flowobj=one.getFlow().get(flowindex); for(int systemindex=0;systemindex<flowobj.getSystem().size();systemindex++){ com.emc.cto.ridagent.rid.jaxb.System sysobj=flowobj.getSystem().get(systemindex); NetworkInfo ip=new NetworkInfo(); nodeobj.getAddress().add(ip); ip.setSystemcategory(sysobj.getCategory()); for (int nodeindex=0;nodeindex<sysobj.getNode().size();nodeindex++){ com.emc.cto.ridagent.rid.jaxb.Node nodes=sysobj.getNode().get(nodeindex); if(nodes.getNodeRole().size()!=0){ NodeRole noderole=nodes.getNodeRole().get(0); //get the only node role ip.setCategory(noderole.getCategory()); if(noderole.getAttacktype()!=null) ip.setRole(noderole.getAttacktype().name()); } for(int addressindex=0;addressindex<nodes.getNodeNameOrDomainDataOrAddress().size();addressindex++){ SystemData address=new SystemData(); ip.getSystem().add(address); Object obj=nodes.getNodeNameOrDomainDataOrAddress().get(addressindex); if(obj instanceof Address){ Address addressobj=(Address)obj; address.setType(addressobj.getCategory()); address.setValue(addressobj.getValue()); } else if (obj instanceof MLStringType){ MLStringType nodename=(MLStringType)obj; address.setType("Name"); address.setValue(nodename.getValue()); } } } for(int serviceindex=0;serviceindex<sysobj.getService().size();serviceindex++){ Service serviceobj=sysobj.getService().get(serviceindex); SystemData address=ip.getSystem().get(serviceindex); address.setProtocolno(serviceobj.getIpProtocol()); address.setPortno(serviceobj.getPort()); if(serviceobj.getApplication()!=null){ SoftwareType useragent=serviceobj.getApplication(); address.setUseragent(useragent.getUserAgent()); } } } } } Record recordobj=one.getRecord(); if(recordobj!=null){ for(int recorddataindex=0;recorddataindex<recordobj.getRecordData().size();recorddataindex++){ RecordData recorddataobj=recordobj.getRecordData().get(recorddataindex); for(int hashindex=0;hashindex<recorddataobj.getHashInformation().size();hashindex++){ HashSigDetails hashobj=recorddataobj.getHashInformation().get(hashindex); if(hashobj.getSignature().size()!=0){ DigitalSig signatureobj=new DigitalSig(); nodeobj.getDsig().add(signatureobj); signatureobj.setType(hashobj.getType()); signatureobj.setValidity(hashobj.isValid().toString()); SignatureType signature=hashobj.getSignature().get(0); //get the signature SignedInfoType signedinfo= signature.getSignedInfo(); signatureobj.setCan_method(signedinfo.getCanonicalizationMethod().getAlgorithm()); signatureobj.setSignature_method(signedinfo.getSignatureMethod().getAlgorithm()); ReferenceType reference=signedinfo.getReference().get(0); signatureobj.setHash_type(reference.getDigestMethod().getAlgorithm()); signatureobj.setHash_value(reference.getDigestValue()); signatureobj.setSignature_value(signature.getSignatureValue().getValue()); } else{ Hash hash=new Hash(); nodeobj.getHash().add(hash); hash.setType(hashobj.getType()); for(int fileindex=0;fileindex<hashobj.getFileName().size();fileindex++){ MLStringType fileinfo=hashobj.getFileName().get(fileindex); FileData filedataobj=new FileData(); hash.getFile().add(filedataobj); filedataobj.setFilename(fileinfo.getValue()); } for(int anotherhashindex=0;anotherhashindex<hashobj.getReference().size();anotherhashindex++){ ReferenceType referenceobj=hashobj.getReference().get(anotherhashindex); HashData hashvalueobj=new HashData(); hash.getValue().add(hashvalueobj); DigestMethodType digestobj=referenceobj.getDigestMethod(); hashvalueobj.setHash_type(digestobj.getAlgorithm()); hashvalueobj.setValue(referenceobj.getDigestValue()); } } } for(int registryindex=0;registryindex<recorddataobj.getWindowsRegistryKeysModified().size();registryindex++){ RegistryKeyModified rkmobj=recorddataobj.getWindowsRegistryKeysModified().get(registryindex); for(int keyindex=0;keyindex<rkmobj.getKey().size();keyindex++){ Key key=rkmobj.getKey().get(keyindex); RegistryValues registryvaluesobj=new RegistryValues(); nodeobj.getRegistry().add(registryvaluesobj); registryvaluesobj.setAction(key.getRegistryaction()); registryvaluesobj.setKey(key.getKeyName()); registryvaluesobj.setValue(key.getValue()); } } } } } } } } return new ModelAndView("viewWebReport","eventdata",eventdata); } finally { ; //TODO add finally handler } } }
/* * Copyright 2017-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.gcp.data.spanner.core; import java.util.function.Supplier; import javax.annotation.Nullable; import com.google.api.core.ApiFuture; import com.google.cloud.spanner.AbortedException; import com.google.cloud.spanner.AsyncResultSet; import com.google.cloud.spanner.DatabaseClient; import com.google.cloud.spanner.Key; import com.google.cloud.spanner.KeySet; import com.google.cloud.spanner.Mutation; import com.google.cloud.spanner.Options; import com.google.cloud.spanner.Options.QueryOption; import com.google.cloud.spanner.Options.ReadOption; import com.google.cloud.spanner.ReadContext; import com.google.cloud.spanner.ResultSet; import com.google.cloud.spanner.SpannerException; import com.google.cloud.spanner.Statement; import com.google.cloud.spanner.Struct; import com.google.cloud.spanner.TransactionContext; import com.google.cloud.spanner.TransactionManager; import org.springframework.dao.DuplicateKeyException; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionException; import org.springframework.transaction.UnexpectedRollbackException; import org.springframework.transaction.support.AbstractPlatformTransactionManager; import org.springframework.transaction.support.DefaultTransactionStatus; import org.springframework.transaction.support.TransactionSynchronizationManager; /** * Spanner transaction manager. * * @author Alexander Khimich * @author Chengyuan Zhao * @author Mike Eltsufin * * @since 1.1 */ public class SpannerTransactionManager extends AbstractPlatformTransactionManager { private final Supplier<DatabaseClient> databaseClientProvider; public SpannerTransactionManager(final Supplier databaseClientProvider) { this.databaseClientProvider = databaseClientProvider; } @Override protected Object doGetTransaction() throws TransactionException { Tx tx = (Tx) TransactionSynchronizationManager.getResource(databaseClientProvider.get()); if (tx != null && tx.getTransactionContext() != null && (tx.getTransactionManager() != null && tx.getTransactionManager().getState() == TransactionManager.TransactionState.STARTED || tx.isReadOnly())) { return tx; } return new Tx(databaseClientProvider.get()); } @Override protected void doBegin(Object transactionObject, TransactionDefinition transactionDefinition) throws TransactionException { if (transactionDefinition.getIsolationLevel() != TransactionDefinition.ISOLATION_DEFAULT) { throw new IllegalStateException( "SpannerTransactionManager supports only isolation level TransactionDefinition.ISOLATION_DEFAULT"); } if (transactionDefinition.getPropagationBehavior() != TransactionDefinition.PROPAGATION_REQUIRED) { throw new IllegalStateException( "SpannerTransactionManager supports only propagation behavior " + "TransactionDefinition.PROPAGATION_REQUIRED"); } Tx tx = (Tx) transactionObject; if (transactionDefinition.isReadOnly()) { final ReadContext targetTransactionContext = this.databaseClientProvider.get() .readOnlyTransaction(); tx.isReadOnly = true; tx.transactionManager = null; tx.transactionContext = new TransactionContext() { @Override public void buffer(Mutation mutation) { throw new IllegalStateException("Spanner transaction cannot apply" + " mutation because it is in readonly mode"); } @Override public void buffer(Iterable<Mutation> iterable) { throw new IllegalStateException("Spanner transaction cannot apply" + " mutations because it is in readonly mode"); } @Override public long executeUpdate(Statement statement) { throw new IllegalStateException("Spanner transaction cannot execute DML " + "because it is in readonly mode"); } @Override public ApiFuture<Long> executeUpdateAsync(Statement statement) { throw new IllegalStateException("Spanner transaction cannot execute DML " + "because it is in readonly mode"); } @Override public long[] batchUpdate(Iterable<Statement> iterable) { throw new IllegalStateException("Spanner transaction cannot execute DML " + "because it is in readonly mode"); } @Override public ApiFuture<long[]> batchUpdateAsync(Iterable<Statement> iterable) { throw new IllegalStateException("Spanner transaction cannot execute DML " + "because it is in readonly mode"); } @Override public ResultSet read( String s, KeySet keySet, Iterable<String> iterable, Options.ReadOption... readOptions) { return targetTransactionContext.read(s, keySet, iterable, readOptions); } @Override public AsyncResultSet readAsync(String s, KeySet keySet, Iterable<String> iterable, ReadOption... readOptions) { return targetTransactionContext.readAsync(s, keySet, iterable, readOptions); } @Override public ResultSet readUsingIndex( String s, String s1, KeySet keySet, Iterable<String> iterable, Options.ReadOption... readOptions) { return targetTransactionContext.readUsingIndex(s, s1, keySet, iterable, readOptions); } @Override public AsyncResultSet readUsingIndexAsync( String s, String s1, KeySet keySet, Iterable<String> iterable, Options.ReadOption... readOptions) { return targetTransactionContext.readUsingIndexAsync(s, s1, keySet, iterable, readOptions); } @Nullable @Override public Struct readRow(String s, Key key, Iterable<String> iterable) { return targetTransactionContext.readRow(s, key, iterable); } @Override public ApiFuture<Struct> readRowAsync(String s, Key key, Iterable<String> iterable) { return targetTransactionContext.readRowAsync(s, key, iterable); } @Nullable @Override public Struct readRowUsingIndex( String s, String s1, Key key, Iterable<String> iterable) { return targetTransactionContext.readRowUsingIndex(s, s1, key, iterable); } @Override public ApiFuture<Struct> readRowUsingIndexAsync(String s, String s1, Key key, Iterable<String> iterable) { return targetTransactionContext.readRowUsingIndexAsync(s, s1, key, iterable); } @Override public ResultSet executeQuery( Statement statement, Options.QueryOption... queryOptions) { return targetTransactionContext.executeQuery(statement, queryOptions); } @Override public AsyncResultSet executeQueryAsync(Statement statement, QueryOption... queryOptions) { return targetTransactionContext.executeQueryAsync(statement, queryOptions); } @Override public ResultSet analyzeQuery(Statement statement, QueryAnalyzeMode queryAnalyzeMode) { return targetTransactionContext.analyzeQuery(statement, queryAnalyzeMode); } @Override public void close() { targetTransactionContext.close(); } }; } else { tx.transactionManager = tx.databaseClient.transactionManager(); tx.transactionContext = tx.getTransactionManager().begin(); tx.isReadOnly = false; } TransactionSynchronizationManager.bindResource(tx.getDatabaseClient(), tx); } @Override protected void doCommit(DefaultTransactionStatus defaultTransactionStatus) throws TransactionException { Tx tx = (Tx) defaultTransactionStatus.getTransaction(); try { if (tx.getTransactionManager() != null && tx.getTransactionManager().getState() == TransactionManager.TransactionState.STARTED) { tx.getTransactionManager().commit(); } if (tx.isReadOnly()) { tx.getTransactionContext().close(); } } catch (AbortedException ex) { // The client library will not close transaction resources if state == ABORTED // to allow for retries, but we do not retry aborted transactions. // See: SessionPool.close() if (tx.getTransactionManager() != null) { tx.getTransactionManager().close(); } throw new UnexpectedRollbackException("Transaction Got Rolled Back", ex); } catch (SpannerException ex) { throw makeDataIntegrityViolationException(ex); } } private RuntimeException makeDataIntegrityViolationException(SpannerException e) { switch (e.getErrorCode()) { case ALREADY_EXISTS: return new DuplicateKeyException(e.getErrorCode().toString(), e); } return e; } @Override protected void doRollback(DefaultTransactionStatus defaultTransactionStatus) throws TransactionException { Tx tx = (Tx) defaultTransactionStatus.getTransaction(); if (tx.getTransactionManager() != null && (tx.getTransactionManager().getState() == TransactionManager.TransactionState.STARTED || tx.getTransactionManager().getState() == TransactionManager.TransactionState.ABORTED)) { tx.getTransactionManager().rollback(); } if (tx.isReadOnly()) { tx.getTransactionContext().close(); } } @Override protected boolean isExistingTransaction(Object transaction) { return ((Tx) transaction).getTransactionContext() != null; } @Override protected void doCleanupAfterCompletion(Object transaction) { Tx tx = (Tx) transaction; TransactionSynchronizationManager.unbindResource(tx.getDatabaseClient()); tx.transactionManager = null; tx.transactionContext = null; tx.isReadOnly = false; } /** * A transaction object that holds the transaction context. */ public static class Tx { TransactionManager transactionManager; TransactionContext transactionContext; boolean isReadOnly; DatabaseClient databaseClient; public Tx(DatabaseClient databaseClient) { this.databaseClient = databaseClient; } public TransactionContext getTransactionContext() { return this.transactionContext; } public TransactionManager getTransactionManager() { return this.transactionManager; } public boolean isReadOnly() { return this.isReadOnly; }; public DatabaseClient getDatabaseClient() { return databaseClient; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql.algebra; import static org.junit.Assert.assertEquals ; import static org.junit.Assert.assertFalse ; import static org.junit.Assert.assertTrue ; import java.util.Map ; import org.apache.jena.atlas.lib.StrUtils ; import org.apache.jena.query.Query ; import org.apache.jena.query.QueryFactory ; import org.apache.jena.query.Syntax ; import org.apache.jena.sparql.sse.SSE ; import org.junit.Assert ; import org.junit.Test ; /** * Tests for {@link OpAsQuery} */ public class TestOpAsQuery { // Basic stuff @Test public void testBasic01() { test_roundTripQuery("SELECT * { }") ; } @Test public void testBasic02() { test_roundTripQuery("SELECT * { ?s ?p ?o }") ; } @Test public void testBasic03() { test_roundTripQuery("SELECT * { ?s ?p ?o FILTER(?o > 5) }") ; } @Test public void testBasic04() { test_roundTripQuery("SELECT ?s { ?s ?p ?o FILTER(?o > 5) }") ; } // 01, 02: Same algebra. @Test public void testBind01() { test_roundTripQuery("SELECT ?s (?o + 5 AS ?B) { ?s ?p ?o }") ; } @Test public void testBind02() { test_roundTripAlegbra("SELECT ?o ?B { ?s ?p ?o BIND (?o + 5 AS ?B) }") ; } // No project @Test public void testBind03() { test_roundTripQuery("SELECT * { ?s ?p ?o BIND (?o + 5 AS ?B) }") ; } // Over nested. @Test public void testBind04() { test_roundTripQuery("SELECT * { ?s ?p ?o BIND(?o+1 AS ?a1) ?x ?q ?v BIND(?v+2 AS ?a2) }", "SELECT * { { ?s ?p ?o BIND(( ?o + 1 ) AS ?a1) } ?x ?q ?v BIND(( ?v + 2 ) AS ?a2) } "); } // Over nested. @Test public void testBind05() { test_roundTripQuery("SELECT * { ?s ?p ?o BIND(?o+1 AS ?a1) ?x ?q ?v BIND(2 AS ?a2) } ORDER BY ?s", "SELECT * { { { ?s ?p ?o BIND(( ?o + 1 ) AS ?a1) } ?x ?q ?v } BIND(2 AS ?a2) } ORDER BY ?s"); } // https://issues.apache.org/jira/browse/JENA-1843 @Test public void testBind06() { test_roundTripQuery("SELECT * { ?s ?p ?o BIND(?o + 1 AS ?a1) BIND(?v+2 as ?a2) }"); } @Test public void testBind07() { test_roundTripQuery("SELECT * { BIND(?o + 1 AS ?a1) BIND(?v+2 as ?a2) }"); } @Test public void testOptional01() { test_roundTripQuery("SELECT * WHERE { ?s ?p ?o OPTIONAL { ?s ?q ?z FILTER (?foo) } }") ; } // Double {{...}} matter here in SPARQL. @Test public void testOptional02() { test_roundTripQuery("SELECT * WHERE { ?s ?p ?o OPTIONAL { { ?s ?q ?z FILTER (?foo) } } }") ; } @Test public void testOptional03() // Don't currently unnest the LHS of the second optional. See testOptional03a { test_roundTripQuery("SELECT * WHERE { ?s ?p ?o OPTIONAL { ?s ?p1 ?o1 } OPTIONAL { ?s ?p2 ?o2 } } ") ; } @Test public void testOptional04() { test_roundTripQuery("SELECT * WHERE { ?s ?p ?o OPTIONAL { ?s ?p1 ?o1 } OPTIONAL { ?s ?p2 ?o2 } OPTIONAL { ?s ?p3 ?o3 }} ") ; } @Test public void testCountStar() { test_roundTripQuery("select (count(*) as ?cs) { ?s ?p ?o }"); } @Test public void testCountGroup() { test_roundTripQuery("select (count(?p) as ?cp) { ?s ?p ?o } group by ?s"); } @Test public void testCountGroupAs() { test_roundTripQuery("select (count(?p) as ?cp) { ?s ?p ?o }"); } @Test public void testDoubleCount() { Query[] result = test_roundTripQuery("select (count(?s) as ?sc) (count(?p) as ?pc) { ?s ?p ?o }") ; assertEquals(2, result[1].getResultVars().size()); assertTrue(result[1].getResultVars().contains("sc")); assertTrue(result[1].getResultVars().contains("pc")); } /* JENA-166 */ @Test public void testGroupWithExpression() { test_roundTripQuery("SELECT (sample(?a) + 1 AS ?c) {} GROUP BY ?x"); } /* Coverage developed for JENA-963 : GROUP BY*/ @Test public void testGroupBy_01() { test_roundTripQuery("SELECT ?s { ?s ?p ?o } GROUP BY ?s"); } @Test public void testGroupBy_02() { test_roundTripQuery("SELECT (count(?p) as ?cp) { ?s ?p ?o } GROUP BY ?s"); } @Test public void testGroupBy_03() { test_roundTripQuery("SELECT ?s { ?s ?p ?o } GROUP BY ?s HAVING (count(*) > 1 )"); } @Test public void testGroupBy_04() { test_roundTripQuery("SELECT ?s { ?s ?p ?o } GROUP BY ?s HAVING (?s > 1 )"); } @Test public void testGroupBy_05() { test_roundTripQuery("SELECT (count(?p) as ?cp) { ?s ?p ?o } GROUP BY ?s HAVING (?cp > 1 )"); } @Test public void testGroupBy_06() { test_roundTripQuery("SELECT (count(?p) as ?cp) { ?s ?p ?o } GROUP BY (abs(?o)) HAVING (?cp > 1 )"); } @Test public void testGroupBy_07() { test_roundTripQuery("SELECT (?X+2 AS ?Y) (count(?p) as ?cp) ?Z (1/?X AS ?X1) { ?s ?p ?o } GROUP BY ?Z (abs(?o) AS ?X) HAVING (?cp > 1 )"); } @Test public void testGroupBy_08() { test_roundTripQuery("SELECT (count(?p) as ?cp) { ?s ?p ?o } GROUP BY (abs(?o)) HAVING (?cp > 1 )"); } @Test public void testGroupBy_09() { test_roundTripQuery("SELECT (count(?p) as ?cp) { ?s ?p ?o } GROUP BY (abs(?o)) ORDER BY (COUNT(*))"); } @Test public void testGroupBy_10() { test_roundTripQuery("SELECT (7+count(?p) as ?cp) { ?s ?p ?o } GROUP BY (abs(?o)) HAVING (?cp > 1 && SUM(?o) > 99 ) ORDER BY (6+COUNT(*))"); } @Test public void testGroupBy_11() { test_roundTripQuery("SELECT ?X { ?s ?p ?o } GROUP BY (abs(?o) AS ?X) HAVING (?cp > 1 )"); } @Test public void testGroupBy_12() { test_roundTripQuery("SELECT * { ?s ?q ?z {SELECT DISTINCT * { ?s ?p ?o }} }"); } // https://issues.apache.org/jira/browse/JENA-1844 @Test public void testGroupBy_13() { test_roundTripQuery("SELECT * { ?s ?p ?o BIND(?o+1 AS ?a1) } ORDER BY ?s"); } @Test public void testSubQuery_01() { test_roundTripQuery("SELECT ?s { SELECT (count(*) as ?cp) { ?s ?p ?o } }") ; } @Test public void testSubQuery_02() { test_roundTripQuery("SELECT ?s { ?s ?p ?o { SELECT (count(*) as ?cp) { ?s ?p ?o } }}") ; } @Test public void testSubQuery_03() { test_roundTripQuery("SELECT ?s { { SELECT (count(*) as ?cp) { ?s ?p ?o } } ?s ?p ?o }") ; } @Test public void testSubQuery_04() { test_roundTripQuery("SELECT * WHERE { ?s ?p ?o . BIND(?o AS ?x) }") ; } @Test public void testSubQuery_05() { test_roundTripQuery("SELECT (?o AS ?x) WHERE { ?s ?p ?o .}") ; } @Test public void testProject1() { test_roundTripQuery("SELECT (?x + 1 AS ?c) {}"); } @Test public void testProject2() { Query[] result = test_roundTripQuery("SELECT (?x + 1 AS ?c) ?d {}"); assertEquals(2, result[1].getResultVars().size()); assertTrue(result[1].getResultVars().contains("c")); assertTrue(result[1].getResultVars().contains("d")); } @Test public void testNestedBind() { test_roundTripQuery("SELECT ?c { { } UNION { BIND(?x + 1 AS ?c) } }"); } @Test public void testNestedProject() { test_roundTripQuery("SELECT (?x + 1 AS ?c) { { } UNION { } }"); } @Test public void testGroupExpression() { test_roundTripQuery("SELECT ?z { } GROUP BY (?x + ?y AS ?z)"); } @Test public void testNestedProjectWithGroup() { test_roundTripQuery("SELECT (SAMPLE(?c) as ?s) { {} UNION {BIND(?x + 1 AS ?c)} } GROUP BY ?x"); } @Test public void testQuadPatternInDefaultGraph() { test_roundTripQueryQuads("SELECT * WHERE { ?s a ?type }"); } @Test public void testGraphClauseUri() { test_roundTripQuery("SELECT * WHERE { GRAPH <http://example> { ?s a ?type } }"); } @Test public void testGraphClauseComplex() { test_roundTripQuery("SELECT * WHERE { GRAPH <http://example> { ?s a ?type . OPTIONAL { ?s <http://label> ?label } } }"); } @Test public void testQuadPatternInGraph() { test_roundTripQueryQuads("SELECT * WHERE { GRAPH <http://example> { ?s a ?type } }"); } @Test public void testQuadPatternInGraphComplex01() { //This fails because OpQuadPattern's are converted back to individual GRAPH clauses Object[] result = roundTripQueryQuad("SELECT * WHERE { GRAPH <http://example> { ?s a ?type . OPTIONAL { ?s <http://label> ?label } } }"); assertFalse(result[0].equals(result[1])); } @Test public void testQuadPatternInGraphComplex02() { //This succeeds since each OpQuadPattern is from a single simple GRAPH clause test_roundTripQueryQuads("SELECT * WHERE { GRAPH <http://example> { ?s a ?type } OPTIONAL { GRAPH <http://example> { ?s <http://label> ?label } } }"); } @Test public void testExtend1() { // Top Level BIND should now be round trippable test_roundTripQuery("SELECT * WHERE { ?s ?p ?o . BIND(?o AS ?x) }"); } @Test public void testExtend2() { // Nested BIND should always have been round trippable test_roundTripQuery("SELECT * WHERE { GRAPH ?g { ?s ?p ?o . BIND(?o AS ?x) } }"); } @Test public void testExtend3() { //JENA-429 String query = StrUtils.strjoinNL ("PREFIX : <http://www.cipe.accamargo.org.br/ontologias/h2tc.owl#>" , "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>" , "PREFIX mylib: <java:dateadd.lib.pkgfor.arq.>", "", "SELECT ?yearmonth ( count(?document) as ?total )", "{" , " ?document a :Document;", " :documentDateOfCreation ?date ;", " :documentType \"exam results\" ." , " BIND( mylib:DateFormat( xsd:string(?date), \"yyyy-MM\" ) as ?yearmonth )", "} group by ?yearmonth") ; test_roundTripQuery(query); } @Test public void testExtend4() { //Simplified repo of JENA-429 test_roundTripQuery("SELECT ?key (COUNT(?member) AS ?total) WHERE { ?s ?p ?o . BIND(LCASE(?o) AS ?key) } GROUP BY ?key"); } @Test public void testExtendInService() { //Original test case from JENA-422 Query[] result = test_roundTripQuery("SELECT * WHERE { SERVICE <http://example/endpoint> { ?s ?p ?o . BIND(?o AS ?x) } }"); assertTrue(result[1].toString().contains("BIND")); } @Test public void testSubQuery1() { test_roundTripQuery("SELECT ?s WHERE { SELECT ?s ?p WHERE { ?s ?p ?o } }"); } @Test public void testSubQuery2() { String query = "SELECT ?s ?x WHERE { { SELECT ?s ?p WHERE { ?s ?p ?o } } { SELECT ?x WHERE { ?x ?p ?o } } }"; // The second inner sub-query is specially fixed up in OpJoin processing. // Not all cases of sub-query have unnecessary {} removed. test_roundTripQuery(query) ; } @Test public void testSubQuery3() { String query = "SELECT * WHERE { { SELECT ?s ?p WHERE { ?s ?p ?o } } { SELECT ?x WHERE { ?x ?p ?o } } }"; test_roundTripQuery(query) ; } @Test public void testAggregatesInSubQuery1() { //Simplified form of a test case provided via the mailing list (JENA-445) String query = "SELECT ?key ?agg WHERE { SELECT ?key (COUNT(*) AS ?agg) { ?key ?p ?o } GROUP BY ?key }"; test_roundTripQuery(query); } @Test public void testAggregatesInSubQuery2() { //Simplified form of a test case provided via the mailing list (JENA-445) test_roundTripAlegbra("SELECT * WHERE { { SELECT ?key (COUNT(*) AS ?agg) { ?key ?p ?o } GROUP BY ?key } }"); } @Test public void testAggregatesInSubQuery3() { //Actual test case from JENA-445 bug report String queryString = "PREFIX dcterms: <http://purl.org/dc/terms/> \n" + "PREFIX dbpedia: <http://dbpedia.org/resource/> \n" + "SELECT ?num_of_holidays ?celebrate_Chinese_New_Year WHERE { \n" + "{" + "SELECT ?country_cat (COUNT(?holiday) as ?num_of_holidays) \n" + "WHERE {" + "?country_cat <http://www.w3.org/2004/02/skos/core#broader> <http://dbpedia.org/resource/Category:Public_holidays_by_country>. \n" + "?holiday dcterms:subject ?country_cat \n" + "}GROUP by ?country_cat \n" + "} \n" + "{ \n" + "SELECT ?country_cat (COUNT(?holiday) as ?celebrate_Chinese_New_Year) \n" + "WHERE { \n" + "?country_cat <http://www.w3.org/2004/02/skos/core#broader> <http://dbpedia.org/resource/Category:Public_holidays_by_country>. \n" + "?holiday dcterms:subject ?country_cat \n" + "FILTER(?holiday=\"http://dbpedia.org/resource/Lunar_New_Year\'s_Day\") \n" + "}GROUP by ?country_cat \n" + "} \n" + "}\n"; test_roundTripQuery(queryString); } @Test public void testModifiersOnSubQuery1() { // From JENA-954 String query = StrUtils.strjoinNL("SELECT (COUNT(*) as ?count) {", " SELECT DISTINCT ?uri ?graph WHERE {", " GRAPH ?graph {", " ?uri ?p ?o .", " }", " } LIMIT 1", "}"); test_roundTripQuery(query) ; } @Test public void testModifiersOnSubQuery2() { // From JENA-954 String query = StrUtils.strjoinNL("SELECT (COUNT(*) as ?count) {", " SELECT REDUCED ?uri ?graph WHERE {", " GRAPH ?graph {", " ?uri ?p ?o .", " }", " } LIMIT 1", "}"); test_roundTripQuery(query); } @Test public void testModifiersOnSubQuery3() { // From JENA-954 String query = StrUtils.strjoinNL("SELECT (COUNT(*) as ?count) {", " SELECT ?uri ?graph WHERE {", " GRAPH ?graph {", " ?uri ?p ?o .", " }", " } LIMIT 1", "}"); test_roundTripQuery(query); } @Test public void testModifiersOnSubQuery4() { // From JENA-954 String query = StrUtils.strjoinNL("SELECT (COUNT(*) as ?count) {", " SELECT ?uri ?graph WHERE {", " GRAPH ?graph {", " ?uri ?p ?o .", " }", " } OFFSET 1", "}"); test_roundTripQuery(query); } @Test public void testPathExpressions1() { String query = "PREFIX : <http://example/> SELECT * { ?s :p* ?o . ?x :r 123 . }" ; test_roundTripQuery(query); } @Test public void testPathExpressions2() { String query = "PREFIX : <http://example/> SELECT * { ?s :p*/:q ?o . ?x :r 123 . }" ; test_roundTripQuery(query); } @Test public void testMinus1() { test_roundTripQuery("PREFIX : <http://example/> SELECT * { ?s :p ?o MINUS { ?s :q ?v .FILTER(?v<5) } }") ; } @Test public void testMinus2() { // query gains a level of {} but the meaning is the same. String query = "PREFIX : <http://example/> SELECT * { ?s :p ?o OPTIONAL { ?s :x ?2 } MINUS { ?s :q ?v .FILTER(?v<5) } }" ; test_roundTripAlegbra(query) ; } @Test public void testTable1() { String query = "SELECT * WHERE { ?x ?p ?z . VALUES ?y { } }" ; roundTripQuery(query); } @Test public void testTable2() { // JENA-1468 : op to string and back. String qs = "SELECT * WHERE { ?x ?p ?z . VALUES ?y { } }" ; Query query = QueryFactory.create(qs); Op op = Algebra.compile(query); String x = op.toString(); Op op1 = SSE.parseOp(x); Query query2 = OpAsQuery.asQuery(op1); assertEquals(query, query2); } @Test public void testValues1() { String query = "SELECT * { VALUES ?x {1 2} ?s ?p ?x }" ; test_roundTripQuery(query) ; } @Test public void testValues2() { String query = "SELECT * { ?s ?p ?x VALUES ?x {1 2} }" ; test_roundTripQuery(query) ; } // Algebra to query : optimization cases OpAsQuery can handle. @Test public void testAlgebra01() { String opStr = "(sequence (bgp (?s1 ?p1 ?o1)) (bgp (?s2 ?p2 ?o2)) )" ; String query = "SELECT * { ?s1 ?p1 ?o1. ?s2 ?p2 ?o2}" ; test_AlgebraToQuery(opStr, query); } @Test public void testAlgebra02() { String opStr = "(sequence (bgp (?s1 ?p1 ?o1)) (path ?x (path* :p) ?z) )" ; String query = "PREFIX : <http://example/> SELECT * { ?s1 ?p1 ?o1. ?x :p* ?z}" ; test_AlgebraToQuery(opStr, query); } @Test public void testAlgebra03() { String opStr = "(sequence (path ?x (path* :p) ?z) (bgp (?s1 ?p1 ?o1)) )" ; String query = "PREFIX : <http://example/> SELECT * { ?x :p* ?z . ?s1 ?p1 ?o1. }" ; test_AlgebraToQuery(opStr, query); } // There 3 classes of transformations: there are 3 main test operations. // test_roundTripQuery: The same query is recovered from OpAsQuery // test_roundTripAlegbra: Different queries with the same alegra forms // test_equivalentQuery: Different equivalent queries - same answers, different algebra. // test_algebraToQuery: algebra to query (e.g. optimization shapes) // // test_roundTripQuery is test_equivalentQuery with same input and expected. // + quad variants. public static void test_equivalentQuery(String input, String expected) { Query orig = QueryFactory.create(input, Syntax.syntaxSPARQL_11); Op toReconstruct = Algebra.compile(orig); Query got = OpAsQuery.asQuery(toReconstruct); Query result = QueryFactory.create(expected, Syntax.syntaxSPARQL_11); assertEquals(result, got); } // Test for queries that do query->algebra->OpAsQuery->query // to produce an output that is .equals the input. /** query->algebra->OpAsQuery->query */ public static Query[] test_roundTripQuery(String query) { // [original, got] Query[] r = roundTripQuery(query) ; stripNamespacesAndBase(r[0]) ; stripNamespacesAndBase(r[1]) ; assertEquals(r[0], r[1]) ; return r ; } public static void test_roundTripQuery(String query, String outcome) { Query[] r = roundTripQuery(query) ; Query orig = r[0]; Query output = r[1]; Query q2 = QueryFactory.create(outcome); stripNamespacesAndBase(orig) ; stripNamespacesAndBase(output) ; stripNamespacesAndBase(q2) ; assertEquals(q2, output) ; } // Test via quads public static Query[] test_roundTripQueryQuads(String query) { Query[] r = roundTripQueryQuad(query) ; assertEquals(r[0], r[1]) ; return r ; } // Compare A1 and A2 where // query[Q1]->algebra[A1]->OpAsQuery->query[Q2]->algebra[A2] // Sometimes Q1 and Q2 are equivalent but not .equals. public void test_roundTripAlegbra(String query) { Query[] r = roundTripQuery(query); // Even if the strings come out as non-equal because of the translation from algebra to query // the algebras should be equal // i.e. the queries should remain semantically equivalent Op a1 = Algebra.compile(r[0]); Op a2 = Algebra.compile(r[1]); Assert.assertEquals(a1, a2); } /** algebra->OpAsQuery->query */ public static void test_AlgebraToQuery(String input, String expected) { Op op = SSE.parseOp(input) ; Query orig = QueryFactory.create(expected, Syntax.syntaxSPARQL_11); stripNamespacesAndBase(orig) ; Query got = OpAsQuery.asQuery(op); Assert.assertEquals(orig, got) ; } /** query->algebra->OpAsQuery->query **/ private static Query[] roundTripQuery(String query) { Query orig = QueryFactory.create(query, Syntax.syntaxSPARQL_11); Op toReconstruct = Algebra.compile(orig); Query got = OpAsQuery.asQuery(toReconstruct); Query[] r = { orig, got }; return r; } /** query->algebra/quads->OpAsQuery->query */ private static Query[] roundTripQueryQuad(String query) { Query orig = QueryFactory.create(query, Syntax.syntaxSPARQL_11); Op toReconstruct = Algebra.compile(orig); toReconstruct = Algebra.toQuadForm(toReconstruct); Query got = OpAsQuery.asQuery(toReconstruct); Query[] r = { orig, got }; return r; } protected static void stripNamespacesAndBase(Query q) { Map<String, String> prefixes = q.getPrefixMapping().getNsPrefixMap(); for (String prefix : prefixes.keySet()) { q.getPrefixMapping().removeNsPrefix(prefix); } q.setBaseURI((String)null); } }
/* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.ZyGraph.Settings; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.CUtilityFunctions; import com.google.security.zynamics.binnavi.config.GraphSettingsConfigItem; import com.google.security.zynamics.zylib.general.ListenerProvider; /** * Contains search-related graph settings. */ public final class ZyGraphSearchSettings { /** * Flag that determines whether search operations only operate on visible nodes. */ private boolean m_searchVisibleNodesOnly; /** * Flag that determines whether search operations only operate on selected nodes. */ private boolean m_searchSelectedNodesOnly; /** * Flag that determines whether search operations are case sensitive. */ private boolean m_searchCaseSensitive; /** * Flag that determines whether search operations search for regular expressions. */ private boolean m_searchRegEx; /** * Configuration file object that is synchronized with this settings class. */ private final GraphSettingsConfigItem m_type; /** * Listeners that are notified about changes in the graph settings. */ private final ListenerProvider<IZyGraphSearchSettingsListener> m_listeners = new ListenerProvider<IZyGraphSearchSettingsListener>(); /** * Creates a new settings object backed by graph settings from the configuration file. * * @param type Graph settings from the configuration file. */ public ZyGraphSearchSettings(final GraphSettingsConfigItem type) { Preconditions.checkNotNull(type, "IE00873: Type argument can't be null"); m_type = type; } /** * Creates a new settings type by copying the settings of another settings type. * * @param settings The settings type that provides the initial settings. */ public ZyGraphSearchSettings(final ZyGraphSearchSettings settings) { m_type = null; m_searchCaseSensitive = settings.getSearchCaseSensitive(); m_searchRegEx = settings.getSearchRegEx(); m_searchSelectedNodesOnly = settings.getSearchSelectedNodesOnly(); m_searchVisibleNodesOnly = settings.getSearchVisibleNodesOnly(); } /** * Adds a listener object that is notified about changes in the search settings. * * @param listener The listener object to add. */ public void addListener(final IZyGraphSearchSettingsListener listener) { m_listeners.addListener(listener); } /** * Returns the current case sensitive search setting. * * @return The current case sensitive search setting. */ public boolean getSearchCaseSensitive() { return m_type == null ? m_searchCaseSensitive : m_type.isCaseSensitiveSearch(); } /** * Returns the current regular expression search setting. * * @return The current regular expression search setting. */ public boolean getSearchRegEx() { return m_type == null ? m_searchRegEx : m_type.isRegexSearch(); } /** * Returns the current selected nodes only search setting. * * @return The current selected nodes only search setting. */ public boolean getSearchSelectedNodesOnly() { return m_type == null ? m_searchSelectedNodesOnly : m_type.isSearchSelectedNodesOnly(); } /** * Returns the current visible nodes only search setting. * * @return The current visible nodes only search setting. */ public boolean getSearchVisibleNodesOnly() { return m_type == null ? m_searchVisibleNodesOnly : m_type.isSearchVisibleNodesOnly(); } /** * Removes a previously attached listener object. * * @param listener The listener object to remove. */ public void removeListener(final IZyGraphSearchSettingsListener listener) { m_listeners.removeListener(listener); } /** * Changes the current case sensitive search setting. * * @param value The new value of the case sensitive search setting. */ public void setSearchCaseSensitive(final boolean value) { if (value == getSearchCaseSensitive()) { return; } if (m_type == null) { m_searchCaseSensitive = value; } else { m_type.setCaseSensitiveSearch(value); } for (final IZyGraphSearchSettingsListener listener : m_listeners) { try { listener.changedSearchCaseSensitive(value); } catch (final Exception exception) { CUtilityFunctions.logException(exception); } } } /** * Changes the current regular expression search setting. * * @param value The new value of the regular expression search setting. */ public void setSearchRegEx(final boolean value) { if (value == getSearchRegEx()) { return; } if (m_type == null) { m_searchRegEx = value; } else { m_type.setRegexSearch(value); } for (final IZyGraphSearchSettingsListener listener : m_listeners) { try { listener.changedSearchRegEx(value); } catch (final Exception exception) { CUtilityFunctions.logException(exception); } } } /** * Changes the current search selected nodes only setting. * * @param value The new value of the search selected nodes only setting. */ public void setSearchSelectedNodesOnly(final boolean value) { if (value == getSearchSelectedNodesOnly()) { return; } if (m_type == null) { m_searchSelectedNodesOnly = value; } else { m_type.setSearchSelectedNodesOnly(value); } for (final IZyGraphSearchSettingsListener listener : m_listeners) { try { listener.changedSearchSelectionNodesOnly(value); } catch (final Exception exception) { CUtilityFunctions.logException(exception); } } } /** * Changes the current search visible nodes only setting. * * @param value The new value of the search visible nodes only setting. */ public void setSearchVisibleNodesOnly(final boolean value) { if (value == getSearchVisibleNodesOnly()) { return; } if (m_type == null) { m_searchVisibleNodesOnly = value; } else { m_type.setSearchVisibleNodesOnly(value); } for (final IZyGraphSearchSettingsListener listener : m_listeners) { try { listener.changedSearchVisibleNodesOnly(value); } catch (final Exception exception) { CUtilityFunctions.logException(exception); } } } }